You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@syncope.apache.org by il...@apache.org on 2015/01/12 17:32:07 UTC
[28/52] [abbrv] [partial] syncope git commit: [SYNCOPE-620] Unit
tests all in
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/MultiParentNode.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/MultiParentNode.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/MultiParentNode.java
new file mode 100644
index 0000000..c1d8f78
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/MultiParentNode.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.content;
+
+import java.util.HashSet;
+import java.util.Set;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
+class MultiParentNode<T> {
+
+ private final T object;
+
+ private Set<MultiParentNode<T>> children;
+
+ private int level = 0;
+
+ private boolean exploited = false;
+
+ public MultiParentNode(final T object) {
+ this.object = object;
+ children = new HashSet<MultiParentNode<T>>();
+ }
+
+ public int getLevel() {
+ return level;
+ }
+
+ public void setLevel(int level) {
+ this.level = level;
+ }
+
+ boolean isExploited() {
+ return exploited;
+ }
+
+ void setExploited(boolean exploited) {
+ this.exploited = exploited;
+ }
+
+ public T getObject() {
+ return object;
+ }
+
+ public boolean isParent(final MultiParentNode<T> child) {
+ return children.contains(child);
+ }
+
+ public boolean isChild(final MultiParentNode<T> parent) {
+ return parent.isParent(this);
+ }
+
+ public Set<MultiParentNode<T>> getChildren() {
+ return children;
+ }
+
+ public void addParent(final MultiParentNode<T> parent) {
+ if (parent != null) {
+ parent.children.add(this);
+ }
+ }
+
+ public void removeParent(final MultiParentNode<T> parent) {
+ if (parent != null) {
+ parent.children.remove(this);
+ }
+ }
+
+ public void addChild(final MultiParentNode<T> child) {
+ if (child != null) {
+ children.add(child);
+ }
+ }
+
+ public void removeChild(final MultiParentNode<T> child) {
+ if (child != null) {
+ children.remove(child);
+ }
+ }
+
+ @Override
+ public boolean equals(final Object obj) {
+ return EqualsBuilder.reflectionEquals(this, obj);
+ }
+
+ @Override
+ public int hashCode() {
+ return HashCodeBuilder.reflectionHashCode(this);
+ }
+}
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/MultiParentNodeOp.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/MultiParentNodeOp.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/MultiParentNodeOp.java
new file mode 100644
index 0000000..b55c6af
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/MultiParentNodeOp.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.content;
+
+import java.util.Collection;
+import java.util.Set;
+
+class MultiParentNodeOp {
+
+ private MultiParentNodeOp() {
+ }
+
+ public static <T> void traverseTree(final Set<MultiParentNode<T>> roots, final Collection<T> objects) {
+ for (MultiParentNode<T> root : roots) {
+ traverseTree(root, objects);
+ }
+ }
+
+ public static <T> void traverseTree(final MultiParentNode<T> root, final Collection<T> objects) {
+
+ root.setExploited(true);
+
+ for (MultiParentNode<T> child : root.getChildren()) {
+ if (!child.isExploited()) {
+ traverseTree(child, objects);
+ }
+ }
+
+ if (!objects.contains(root.getObject())) {
+ objects.add(root.getObject());
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/XMLContentExporter.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/XMLContentExporter.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/XMLContentExporter.java
new file mode 100644
index 0000000..90bbbee
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/XMLContentExporter.java
@@ -0,0 +1,387 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.content;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.sql.Blob;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import javax.xml.transform.OutputKeys;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerConfigurationException;
+import javax.xml.transform.sax.SAXTransformerFactory;
+import javax.xml.transform.sax.TransformerHandler;
+import javax.xml.transform.stream.StreamResult;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.syncope.common.lib.SyncopeConstants;
+import org.apache.syncope.server.misc.DataFormat;
+import org.apache.syncope.server.persistence.api.content.ContentExporter;
+import org.apache.syncope.server.persistence.jpa.entity.JPAReportExec;
+import org.apache.syncope.server.persistence.jpa.entity.membership.JPAMDerAttr;
+import org.apache.syncope.server.persistence.jpa.entity.membership.JPAMPlainAttr;
+import org.apache.syncope.server.persistence.jpa.entity.membership.JPAMPlainAttrUniqueValue;
+import org.apache.syncope.server.persistence.jpa.entity.membership.JPAMPlainAttrValue;
+import org.apache.syncope.server.persistence.jpa.entity.membership.JPAMVirAttr;
+import org.apache.syncope.server.persistence.jpa.entity.membership.JPAMembership;
+import org.apache.syncope.server.persistence.jpa.entity.task.JPATaskExec;
+import org.apache.syncope.server.persistence.jpa.entity.user.JPAUDerAttr;
+import org.apache.syncope.server.persistence.jpa.entity.user.JPAUPlainAttr;
+import org.apache.syncope.server.persistence.jpa.entity.user.JPAUPlainAttrUniqueValue;
+import org.apache.syncope.server.persistence.jpa.entity.user.JPAUPlainAttrValue;
+import org.apache.syncope.server.persistence.jpa.entity.user.JPAUVirAttr;
+import org.apache.syncope.server.persistence.jpa.entity.user.JPAUser;
+import org.springframework.jdbc.datasource.DataSourceUtils;
+import org.springframework.security.crypto.codec.Hex;
+import org.springframework.stereotype.Component;
+import org.xml.sax.SAXException;
+import org.xml.sax.helpers.AttributesImpl;
+
+/**
+ * Export internal storage content as XML.
+ */
+@Component
+public class XMLContentExporter extends AbstractContentDealer implements ContentExporter {
+
+ protected final static Set<String> TABLE_PREFIXES_TO_BE_EXCLUDED =
+ new HashSet<>(Arrays.asList(new String[] {
+ "QRTZ_", "LOGGING", JPAReportExec.TABLE, JPATaskExec.TABLE,
+ JPAUser.TABLE, JPAUPlainAttr.TABLE, JPAUPlainAttrValue.TABLE, JPAUPlainAttrUniqueValue.TABLE,
+ JPAUDerAttr.TABLE, JPAUVirAttr.TABLE,
+ JPAMembership.TABLE, JPAMPlainAttr.TABLE, JPAMPlainAttrValue.TABLE, JPAMPlainAttrUniqueValue.TABLE,
+ JPAMDerAttr.TABLE, JPAMVirAttr.TABLE
+ }));
+
+ protected static final Map<String, String> TABLES_TO_BE_FILTERED =
+ Collections.singletonMap("TASK", "DTYPE <> 'PropagationTask'");
+
+ protected static final Map<String, Set<String>> COLUMNS_TO_BE_NULLIFIED =
+ Collections.singletonMap("SYNCOPEROLE", Collections.singleton("USEROWNER_ID"));
+
+ private boolean isTableAllowed(final String tableName) {
+ boolean allowed = true;
+ for (String prefix : TABLE_PREFIXES_TO_BE_EXCLUDED) {
+ if (tableName.toUpperCase().startsWith(prefix)) {
+ allowed = false;
+ }
+ }
+ return allowed;
+ }
+
+ private List<String> sortByForeignKeys(final Connection conn, final Set<String> tableNames)
+ throws SQLException {
+
+ Set<MultiParentNode<String>> roots = new HashSet<>();
+
+ final DatabaseMetaData meta = conn.getMetaData();
+
+ final Map<String, MultiParentNode<String>> exploited = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+ final Set<String> pkTableNames = new HashSet<>();
+
+ for (String tableName : tableNames) {
+ MultiParentNode<String> node = exploited.get(tableName);
+ if (node == null) {
+ node = new MultiParentNode<>(tableName);
+ roots.add(node);
+ exploited.put(tableName, node);
+ }
+
+ pkTableNames.clear();
+
+ ResultSet rs = null;
+ try {
+ rs = meta.getImportedKeys(conn.getCatalog(), dbSchema, tableName);
+
+ // this is to avoid repetition
+ while (rs.next()) {
+ pkTableNames.add(rs.getString("PKTABLE_NAME"));
+ }
+ } finally {
+ if (rs != null) {
+ try {
+ rs.close();
+ } catch (SQLException e) {
+ LOG.error("While closing tables result set", e);
+ }
+ }
+ }
+
+ for (String pkTableName : pkTableNames) {
+ if (!tableName.equalsIgnoreCase(pkTableName)) {
+ MultiParentNode<String> pkNode = exploited.get(pkTableName);
+ if (pkNode == null) {
+ pkNode = new MultiParentNode<>(pkTableName);
+ roots.add(pkNode);
+ exploited.put(pkTableName, pkNode);
+ }
+
+ pkNode.addChild(node);
+
+ if (roots.contains(node)) {
+ roots.remove(node);
+ }
+ }
+ }
+ }
+
+ final List<String> sortedTableNames = new ArrayList<>(tableNames.size());
+ MultiParentNodeOp.traverseTree(roots, sortedTableNames);
+
+ // remove from sortedTableNames any table possibly added during lookup
+ // but matching some item in this.tablePrefixesToBeExcluded
+ sortedTableNames.retainAll(tableNames);
+
+ LOG.debug("Tables after retainAll {}", sortedTableNames);
+
+ Collections.reverse(sortedTableNames);
+
+ return sortedTableNames;
+ }
+
+ private String getValues(final ResultSet rs, final String columnName, final Integer columnType)
+ throws SQLException {
+
+ String res = null;
+
+ try {
+ switch (columnType) {
+ case Types.BINARY:
+ case Types.VARBINARY:
+ case Types.LONGVARBINARY:
+ final InputStream is = rs.getBinaryStream(columnName);
+ if (is != null) {
+ res = new String(Hex.encode(IOUtils.toByteArray(is)));
+ }
+ break;
+
+ case Types.BLOB:
+ final Blob blob = rs.getBlob(columnName);
+ if (blob != null) {
+ res = new String(Hex.encode(IOUtils.toByteArray(blob.getBinaryStream())));
+ }
+ break;
+
+ case Types.BIT:
+ case Types.BOOLEAN:
+ if (rs.getBoolean(columnName)) {
+ res = "1";
+ } else {
+ res = "0";
+ }
+ break;
+
+ case Types.DATE:
+ case Types.TIME:
+ case Types.TIMESTAMP:
+ final Timestamp timestamp = rs.getTimestamp(columnName);
+ if (timestamp != null) {
+ res = DataFormat.format(new Date(timestamp.getTime()));
+ }
+ break;
+
+ default:
+ res = rs.getString(columnName);
+ }
+ } catch (IOException e) {
+ LOG.error("Error retrieving hexadecimal string", e);
+ }
+
+ return res;
+ }
+
+ private void doExportTable(final TransformerHandler handler, final Connection conn, final String tableName,
+ final String whereClause) throws SQLException, SAXException {
+
+ LOG.debug("Export table {}", tableName);
+
+ AttributesImpl attrs = new AttributesImpl();
+
+ PreparedStatement stmt = null;
+ ResultSet rs = null;
+ ResultSet pkeyRS = null;
+ try {
+ // ------------------------------------
+ // retrieve primary keys to perform an ordered select
+
+ final DatabaseMetaData meta = conn.getMetaData();
+ pkeyRS = meta.getPrimaryKeys(null, null, tableName);
+
+ final StringBuilder orderBy = new StringBuilder();
+
+ while (pkeyRS.next()) {
+ final String columnName = pkeyRS.getString("COLUMN_NAME");
+ if (columnName != null) {
+ if (orderBy.length() > 0) {
+ orderBy.append(",");
+ }
+
+ orderBy.append(columnName);
+ }
+ }
+
+ // ------------------------------------
+ StringBuilder query = new StringBuilder();
+ query.append("SELECT * FROM ").append(tableName).append(" a");
+ if (StringUtils.isNotBlank(whereClause)) {
+ query.append(" WHERE ").append(whereClause);
+ }
+ if (orderBy.length() > 0) {
+ query.append(" ORDER BY ").append(orderBy);
+ }
+ stmt = conn.prepareStatement(query.toString());
+
+ rs = stmt.executeQuery();
+ while (rs.next()) {
+ attrs.clear();
+
+ final ResultSetMetaData rsMeta = rs.getMetaData();
+ for (int i = 0; i < rsMeta.getColumnCount(); i++) {
+ final String columnName = rsMeta.getColumnName(i + 1);
+ final Integer columnType = rsMeta.getColumnType(i + 1);
+
+ // Retrieve value taking care of binary values.
+ String value = getValues(rs, columnName, columnType);
+ if (value != null && (!COLUMNS_TO_BE_NULLIFIED.containsKey(tableName)
+ || !COLUMNS_TO_BE_NULLIFIED.get(tableName).contains(columnName))) {
+
+ attrs.addAttribute("", "", columnName, "CDATA", value);
+ }
+ }
+
+ handler.startElement("", "", tableName, attrs);
+ handler.endElement("", "", tableName);
+
+ LOG.debug("Add record {}", attrs);
+ }
+ } finally {
+ if (rs != null) {
+ try {
+ rs.close();
+ } catch (SQLException e) {
+ LOG.error("While closing result set", e);
+ }
+ }
+ if (pkeyRS != null) {
+ try {
+ pkeyRS.close();
+ } catch (SQLException e) {
+ LOG.error("While closing result set", e);
+ }
+ }
+ if (stmt != null) {
+ try {
+ stmt.close();
+ } catch (SQLException e) {
+ LOG.error("While closing result set", e);
+ }
+ }
+ }
+ }
+
+ public void export(final OutputStream os, final String wfTablePrefix)
+ throws SAXException, TransformerConfigurationException {
+
+ if (StringUtils.isNotBlank(wfTablePrefix)) {
+ TABLE_PREFIXES_TO_BE_EXCLUDED.add(wfTablePrefix);
+ }
+
+ StreamResult streamResult = new StreamResult(os);
+ final SAXTransformerFactory transformerFactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
+
+ TransformerHandler handler = transformerFactory.newTransformerHandler();
+ Transformer serializer = handler.getTransformer();
+ serializer.setOutputProperty(OutputKeys.ENCODING, SyncopeConstants.DEFAULT_ENCODING);
+ serializer.setOutputProperty(OutputKeys.INDENT, "yes");
+ handler.setResult(streamResult);
+ handler.startDocument();
+ handler.startElement("", "", ROOT_ELEMENT, new AttributesImpl());
+
+ Connection conn = null;
+ ResultSet rs = null;
+ try {
+ conn = DataSourceUtils.getConnection(dataSource);
+ final DatabaseMetaData meta = conn.getMetaData();
+
+ rs = meta.getTables(null, StringUtils.isBlank(dbSchema) ? null : dbSchema, null, new String[] { "TABLE" });
+
+ final Set<String> tableNames = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER);
+
+ while (rs.next()) {
+ String tableName = rs.getString("TABLE_NAME");
+ LOG.debug("Found table {}", tableName);
+ if (isTableAllowed(tableName)) {
+ tableNames.add(tableName);
+ }
+ }
+
+ LOG.debug("Tables to be exported {}", tableNames);
+
+ // then sort tables based on foreign keys and dump
+ for (String tableName : sortByForeignKeys(conn, tableNames)) {
+ try {
+ doExportTable(handler, conn, tableName, TABLES_TO_BE_FILTERED.get(tableName.toUpperCase()));
+ } catch (Exception e) {
+ LOG.error("Failure exporting table {}", tableName, e);
+ }
+ }
+ } catch (SQLException e) {
+ LOG.error("While exporting database content", e);
+ } finally {
+ if (rs != null) {
+ try {
+ rs.close();
+ } catch (SQLException e) {
+ LOG.error("While closing tables result set", e);
+ }
+ }
+
+ DataSourceUtils.releaseConnection(conn, dataSource);
+ if (conn != null) {
+ try {
+ if (!conn.isClosed()) {
+ conn.close();
+ }
+ } catch (SQLException e) {
+ LOG.error("While releasing connection", e);
+ }
+ }
+ }
+
+ handler.endElement("", "", ROOT_ELEMENT);
+ handler.endDocument();
+ }
+}
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/XMLContentLoader.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/XMLContentLoader.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/XMLContentLoader.java
new file mode 100644
index 0000000..ac24f8c
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/content/XMLContentLoader.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.content;
+
+import java.io.IOException;
+import java.io.InputStream;
+import javax.annotation.Resource;
+import javax.xml.parsers.SAXParser;
+import javax.xml.parsers.SAXParserFactory;
+import org.apache.commons.io.IOUtils;
+import org.apache.syncope.server.persistence.api.content.ContentLoader;
+import org.apache.syncope.server.persistence.jpa.entity.conf.JPAConf;
+import org.apache.syncope.server.misc.spring.ResourceWithFallbackLoader;
+import org.springframework.dao.DataAccessException;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.stereotype.Component;
+import org.springframework.transaction.annotation.Transactional;
+
+/**
+ * Initialize Database with default content if no data is present already.
+ */
+@Component
+public class XMLContentLoader extends AbstractContentDealer implements ContentLoader {
+
+ @Resource(name = "contentXML")
+ private ResourceWithFallbackLoader contentXML;
+
+ @Transactional
+ @Override
+ public void load() {
+ JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
+
+ boolean existingData;
+ try {
+ existingData = jdbcTemplate.queryForObject("SELECT COUNT(0) FROM " + JPAConf.TABLE, Integer.class) > 0;
+ } catch (DataAccessException e) {
+ LOG.error("Could not access to table " + JPAConf.TABLE, e);
+ existingData = true;
+ }
+
+ if (existingData) {
+ LOG.info("Data found in the database, leaving untouched");
+ } else {
+ LOG.info("Empty database found, loading default content");
+
+ try {
+ loadDefaultContent();
+ } catch (Exception e) {
+ LOG.error("While loading default content", e);
+ }
+ try {
+ createIndexes();
+ createViews();
+ } catch (IOException e) {
+ LOG.error("While creating indexes and views", e);
+ }
+ }
+ }
+
+ private void loadDefaultContent() throws Exception {
+ SAXParserFactory factory = SAXParserFactory.newInstance();
+ InputStream in = null;
+ try {
+ in = contentXML.getResource().getInputStream();
+
+ SAXParser parser = factory.newSAXParser();
+ parser.parse(in, new ContentLoaderHandler(dataSource, ROOT_ELEMENT));
+ LOG.debug("Default content successfully loaded");
+ } finally {
+ IOUtils.closeQuietly(in);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/AbstractDAO.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/AbstractDAO.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/AbstractDAO.java
new file mode 100644
index 0000000..645cd70
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/AbstractDAO.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.dao;
+
+import java.util.List;
+import javax.persistence.CacheRetrieveMode;
+import javax.persistence.CacheStoreMode;
+import javax.persistence.EntityManager;
+import javax.persistence.PersistenceContext;
+import javax.persistence.PersistenceContextType;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.syncope.server.persistence.api.dao.DAO;
+import org.apache.syncope.server.persistence.api.dao.search.OrderByClause;
+import org.apache.syncope.server.persistence.api.entity.Entity;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Configurable;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.util.ReflectionUtils;
+
+@Configurable
+public abstract class AbstractDAO<E extends Entity<KEY>, KEY> implements DAO<E, KEY> {
+
+ /**
+ * Logger.
+ */
+ protected static final Logger LOG = LoggerFactory.getLogger(DAO.class);
+
+ private static final String CACHE_STORE_MODE = "javax.persistence.cache.storeMode";
+
+ private static final String CACHE_RETRIEVE_MODE = "javax.persistence.cache.retrieveMode";
+
+ @Value("#{entityManager}")
+ @PersistenceContext(type = PersistenceContextType.TRANSACTION)
+ protected EntityManager entityManager;
+
+ protected CacheRetrieveMode getCacheRetrieveMode() {
+ return entityManager.getProperties().containsKey(CACHE_RETRIEVE_MODE)
+ ? (CacheRetrieveMode) entityManager.getProperties().get(CACHE_RETRIEVE_MODE)
+ : CacheRetrieveMode.BYPASS;
+ }
+
+ protected void setCacheRetrieveMode(final CacheRetrieveMode retrieveMode) {
+ if (retrieveMode != null) {
+ entityManager.getProperties().put(CACHE_RETRIEVE_MODE, retrieveMode);
+ }
+ }
+
+ protected CacheStoreMode getCacheStoreMode() {
+ return entityManager.getProperties().containsKey(CACHE_STORE_MODE)
+ ? (CacheStoreMode) entityManager.getProperties().get(CACHE_STORE_MODE)
+ : CacheStoreMode.BYPASS;
+ }
+
+ protected void setCacheStoreMode(final CacheStoreMode storeMode) {
+ if (storeMode != null) {
+ entityManager.getProperties().put(CACHE_STORE_MODE, storeMode);
+ }
+ }
+
+ protected String toOrderByStatement(final Class<? extends Entity<KEY>> beanClass, final String prefix,
+ final List<OrderByClause> orderByClauses) {
+
+ StringBuilder statement = new StringBuilder();
+
+ for (OrderByClause clause : orderByClauses) {
+ String field = clause.getField().trim();
+ if (ReflectionUtils.findField(beanClass, field) != null) {
+ if (StringUtils.isNotBlank(prefix)) {
+ statement.append(prefix).append('.');
+ }
+ statement.append(field).append(' ').append(clause.getDirection().name());
+ }
+ }
+
+ if (statement.length() > 0) {
+ statement.insert(0, "ORDER BY ");
+ }
+ return statement.toString();
+ }
+
+ @Override
+ public void refresh(final E entity) {
+ entityManager.refresh(entity);
+ }
+
+ @Override
+ public void detach(final E entity) {
+ entityManager.detach(entity);
+ }
+
+ @Override
+ public void flush() {
+ entityManager.flush();
+ }
+
+ @Override
+ public void clear() {
+ entityManager.clear();
+ }
+}
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/AbstractSubjectDAO.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/AbstractSubjectDAO.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/AbstractSubjectDAO.java
new file mode 100644
index 0000000..ba4ce69
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/AbstractSubjectDAO.java
@@ -0,0 +1,347 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.dao;
+
+import java.io.StringReader;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.regex.Pattern;
+import javax.persistence.Query;
+import javax.persistence.TemporalType;
+import org.apache.commons.jexl2.parser.Parser;
+import org.apache.commons.jexl2.parser.ParserConstants;
+import org.apache.commons.jexl2.parser.Token;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.syncope.server.persistence.api.dao.DerSchemaDAO;
+import org.apache.syncope.server.persistence.api.dao.PlainSchemaDAO;
+import org.apache.syncope.server.persistence.api.dao.SubjectDAO;
+import org.apache.syncope.server.persistence.api.entity.AttributableUtil;
+import org.apache.syncope.server.persistence.api.entity.DerAttr;
+import org.apache.syncope.server.persistence.api.entity.DerSchema;
+import org.apache.syncope.server.persistence.api.entity.ExternalResource;
+import org.apache.syncope.server.persistence.api.entity.PlainAttr;
+import org.apache.syncope.server.persistence.api.entity.PlainAttrValue;
+import org.apache.syncope.server.persistence.api.entity.PlainSchema;
+import org.apache.syncope.server.persistence.api.entity.Subject;
+import org.apache.syncope.server.persistence.api.entity.VirAttr;
+import org.apache.syncope.server.persistence.jpa.entity.AbstractPlainAttrValue;
+import org.springframework.beans.factory.annotation.Autowired;
+
+abstract class AbstractSubjectDAO<P extends PlainAttr, D extends DerAttr, V extends VirAttr>
+ extends AbstractDAO<Subject<P, D, V>, Long> implements SubjectDAO<P, D, V> {
+
+ @Autowired
+ protected PlainSchemaDAO plainSchemaDAO;
+
+ @Autowired
+ protected DerSchemaDAO derSchemaDAO;
+
+ /**
+ * Split an attribute value recurring on provided literals/tokens.
+ *
+ * @param attrValue value to be split
+ * @param literals literals/tokens
+ * @return split value
+ */
+ private List<String> split(final String attrValue, final List<String> literals) {
+ final List<String> attrValues = new ArrayList<>();
+
+ if (literals.isEmpty()) {
+ attrValues.add(attrValue);
+ } else {
+ for (String token : attrValue.split(Pattern.quote(literals.get(0)))) {
+ attrValues.addAll(split(token, literals.subList(1, literals.size())));
+ }
+ }
+
+ return attrValues;
+ }
+
+ /**
+ * Generate one where clause for each different attribute schema into the derived schema expression provided.
+ *
+ * @param expression derived schema expression
+ * @param value derived attribute value
+ * @param attrUtil USER / ROLE
+ * @return where clauses to use to build the query
+ */
+ private Set<String> getWhereClause(final String expression, final String value, final AttributableUtil attrUtil) {
+ final Parser parser = new Parser(new StringReader(expression));
+
+ // Schema names
+ final List<String> identifiers = new ArrayList<>();
+
+ // Literals
+ final List<String> literals = new ArrayList<>();
+
+ // Get schema names and literals
+ Token token;
+ while ((token = parser.getNextToken()) != null && StringUtils.isNotBlank(token.toString())) {
+ if (token.kind == ParserConstants.STRING_LITERAL) {
+ literals.add(token.toString().substring(1, token.toString().length() - 1));
+ }
+
+ if (token.kind == ParserConstants.IDENTIFIER) {
+ identifiers.add(token.toString());
+ }
+ }
+
+ // Sort literals in order to process later literals included into others
+ Collections.sort(literals, new Comparator<String>() {
+
+ @Override
+ public int compare(final String t, final String t1) {
+ if (t == null && t1 == null) {
+ return 0;
+ } else if (t != null && t1 == null) {
+ return -1;
+ } else if (t == null && t1 != null) {
+ return 1;
+ } else if (t.length() == t1.length()) {
+ return 0;
+ } else if (t.length() > t1.length()) {
+ return -1;
+ } else {
+ return 1;
+ }
+ }
+ });
+
+ // Split value on provided literals
+ final List<String> attrValues = split(value, literals);
+
+ if (attrValues.size() != identifiers.size()) {
+ LOG.error("Ambiguous JEXL expression resolution.");
+ throw new IllegalArgumentException("literals and values have different size");
+ }
+
+ // clauses to be used with INTERSECTed queries
+ final Set<String> clauses = new HashSet<>();
+
+ // builder to build the clauses
+ final StringBuilder bld = new StringBuilder();
+
+ // Contains used identifiers in order to avoid replications
+ final Set<String> used = new HashSet<>();
+
+ // Create several clauses: one for eanch identifiers
+ for (int i = 0; i < identifiers.size(); i++) {
+ if (!used.contains(identifiers.get(i))) {
+
+ // verify schema existence and get schema type
+ PlainSchema schema = plainSchemaDAO.find(identifiers.get(i), attrUtil.plainSchemaClass());
+ if (schema == null) {
+ LOG.error("Invalid schema name '{}'", identifiers.get(i));
+ throw new IllegalArgumentException("Invalid schema name " + identifiers.get(i));
+ }
+
+ // clear builder
+ bld.delete(0, bld.length());
+
+ bld.append("(");
+
+ // set schema name
+ bld.append("s.name = '").append(identifiers.get(i)).append("'");
+
+ bld.append(" AND ");
+
+ bld.append("s.name = a.schema_name").append(" AND ");
+
+ bld.append("a.id = v.attribute_id");
+
+ bld.append(" AND ");
+
+ // use a value clause different for eanch different schema type
+ switch (schema.getType()) {
+ case Boolean:
+ bld.append("v.booleanValue = '").append(attrValues.get(i)).append("'");
+ break;
+ case Long:
+ bld.append("v.longValue = ").append(attrValues.get(i));
+ break;
+ case Double:
+ bld.append("v.doubleValue = ").append(attrValues.get(i));
+ break;
+ case Date:
+ bld.append("v.dateValue = '").append(attrValues.get(i)).append("'");
+ break;
+ default:
+ bld.append("v.stringValue = '").append(attrValues.get(i)).append("'");
+ }
+
+ bld.append(")");
+
+ used.add(identifiers.get(i));
+
+ clauses.add(bld.toString());
+ }
+ }
+
+ LOG.debug("Generated where clauses {}", clauses);
+
+ return clauses;
+ }
+
+ protected abstract Subject<P, D, V> findInternal(Long key);
+
+ private Query findByAttrValueQuery(final String entityName) {
+ return entityManager.createQuery("SELECT e FROM " + entityName + " e"
+ + " WHERE e.attribute.schema.name = :schemaName AND (e.stringValue IS NOT NULL"
+ + " AND e.stringValue = :stringValue)"
+ + " OR (e.booleanValue IS NOT NULL AND e.booleanValue = :booleanValue)"
+ + " OR (e.dateValue IS NOT NULL AND e.dateValue = :dateValue)"
+ + " OR (e.longValue IS NOT NULL AND e.longValue = :longValue)"
+ + " OR (e.doubleValue IS NOT NULL AND e.doubleValue = :doubleValue)");
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public List<? extends Subject<P, D, V>> findByAttrValue(
+ final String schemaName, final PlainAttrValue attrValue, final AttributableUtil attrUtil) {
+
+ PlainSchema schema = plainSchemaDAO.find(schemaName, attrUtil.plainSchemaClass());
+ if (schema == null) {
+ LOG.error("Invalid schema name '{}'", schemaName);
+ return Collections.<Subject<P, D, V>>emptyList();
+ }
+
+ final String entityName = schema.isUniqueConstraint()
+ ? attrUtil.plainAttrUniqueValueClass().getName()
+ : attrUtil.plainAttrValueClass().getName();
+
+ Query query = findByAttrValueQuery(entityName);
+
+ query.setParameter("schemaName", schemaName);
+ query.setParameter("stringValue", attrValue.getStringValue());
+ query.setParameter("booleanValue", attrValue.getBooleanValue() == null
+ ? null
+ : ((AbstractPlainAttrValue) attrValue).getBooleanAsInteger(attrValue.getBooleanValue()));
+ if (attrValue.getDateValue() == null) {
+ query.setParameter("dateValue", null);
+ } else {
+ query.setParameter("dateValue", attrValue.getDateValue(), TemporalType.TIMESTAMP);
+ }
+ query.setParameter("longValue", attrValue.getLongValue());
+ query.setParameter("doubleValue", attrValue.getDoubleValue());
+
+ List<Subject<P, D, V>> result = new ArrayList<>();
+ for (PlainAttrValue value : (List<PlainAttrValue>) query.getResultList()) {
+ Subject<P, D, V> subject = (Subject<P, D, V>) value.getAttr().getOwner();
+ if (!result.contains(subject)) {
+ result.add(subject);
+ }
+ }
+
+ return result;
+ }
+
+ @Override
+ public Subject<P, D, V> findByAttrUniqueValue(
+ final String schemaName, final PlainAttrValue attrUniqueValue, final AttributableUtil attrUtil) {
+
+ PlainSchema schema = plainSchemaDAO.find(schemaName, attrUtil.plainSchemaClass());
+ if (schema == null) {
+ LOG.error("Invalid schema name '{}'", schemaName);
+ return null;
+ }
+ if (!schema.isUniqueConstraint()) {
+ LOG.error("This schema has not unique constraint: '{}'", schemaName);
+ return null;
+ }
+
+ List<? extends Subject<P, D, V>> result = findByAttrValue(schemaName, attrUniqueValue, attrUtil);
+ return result.isEmpty()
+ ? null
+ : result.iterator().next();
+ }
+
+ /**
+ * Find users / roles by derived attribute value. This method could fail if one or more string literals contained
+ * into the derived attribute value provided derive from identifier (schema name) replacement. When you are going to
+ * specify a derived attribute expression you must be quite sure that string literals used to build the expression
+ * cannot be found into the attribute values used to replace attribute schema names used as identifiers.
+ *
+ * @param schemaName derived schema name
+ * @param value derived attribute value
+ * @param attrUtil AttributableUtil
+ * @return list of users / roles
+ */
+ @Override
+ public List<? extends Subject<P, D, V>> findByDerAttrValue(
+ final String schemaName, final String value, final AttributableUtil attrUtil) {
+
+ DerSchema schema = derSchemaDAO.find(schemaName, attrUtil.derSchemaClass());
+ if (schema == null) {
+ LOG.error("Invalid schema name '{}'", schemaName);
+ return Collections.<Subject<P, D, V>>emptyList();
+ }
+
+ // query string
+ final StringBuilder querystring = new StringBuilder();
+
+ boolean subquery = false;
+ for (String clause : getWhereClause(schema.getExpression(), value, attrUtil)) {
+ if (querystring.length() > 0) {
+ subquery = true;
+ querystring.append(" AND a.owner_id IN ( ");
+ }
+
+ querystring.append("SELECT a.owner_id ").
+ append("FROM ").append(attrUtil.plainAttrClass().getSimpleName().substring(3)).append(" a, ").
+ append(attrUtil.plainAttrValueClass().getSimpleName().substring(3)).append(" v, ").
+ append(attrUtil.plainSchemaClass().getSimpleName().substring(3)).append(" s ").
+ append("WHERE ").append(clause);
+
+ if (subquery) {
+ querystring.append(')');
+ }
+ }
+
+ LOG.debug("Execute query {}", querystring);
+
+ final Query query = entityManager.createNativeQuery(querystring.toString());
+
+ final List<Subject<P, D, V>> result = new ArrayList<>();
+ for (Object userId : query.getResultList()) {
+ Subject<P, D, V> subject = findInternal(Long.parseLong(userId.toString()));
+ if (!result.contains(subject)) {
+ result.add(subject);
+ }
+ }
+
+ return result;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public List<? extends Subject<P, D, V>> findByResource(
+ final ExternalResource resource, final AttributableUtil attrUtil) {
+
+ Query query = entityManager.createQuery(
+ "SELECT e FROM " + attrUtil.attributableClass().getSimpleName() + " e "
+ + "WHERE :resource MEMBER OF e.resources");
+ query.setParameter("resource", resource);
+
+ return query.getResultList();
+ }
+}
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAAttrTemplateDAO.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAAttrTemplateDAO.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAAttrTemplateDAO.java
new file mode 100644
index 0000000..9e5cfa7
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAAttrTemplateDAO.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.dao;
+
+import java.util.Collections;
+import java.util.List;
+import javax.persistence.Query;
+import org.apache.syncope.server.persistence.api.dao.AttrTemplateDAO;
+import org.apache.syncope.server.persistence.api.entity.AttrTemplate;
+import org.apache.syncope.server.persistence.api.entity.Schema;
+import org.apache.syncope.server.persistence.api.entity.membership.MDerAttrTemplate;
+import org.apache.syncope.server.persistence.api.entity.membership.MPlainAttrTemplate;
+import org.apache.syncope.server.persistence.api.entity.membership.MVirAttrTemplate;
+import org.apache.syncope.server.persistence.api.entity.role.RDerAttrTemplate;
+import org.apache.syncope.server.persistence.api.entity.role.RPlainAttrTemplate;
+import org.apache.syncope.server.persistence.api.entity.role.RVirAttrTemplate;
+import org.apache.syncope.server.persistence.jpa.entity.AbstractAttrTemplate;
+import org.apache.syncope.server.persistence.jpa.entity.membership.JPAMDerAttrTemplate;
+import org.apache.syncope.server.persistence.jpa.entity.membership.JPAMPlainAttrTemplate;
+import org.apache.syncope.server.persistence.jpa.entity.membership.JPAMVirAttrTemplate;
+import org.apache.syncope.server.persistence.jpa.entity.role.JPARDerAttrTemplate;
+import org.apache.syncope.server.persistence.jpa.entity.role.JPARPlainAttrTemplate;
+import org.apache.syncope.server.persistence.jpa.entity.role.JPARVirAttrTemplate;
+import org.springframework.stereotype.Repository;
+import org.springframework.util.ReflectionUtils;
+
+@Repository
+public class JPAAttrTemplateDAO<S extends Schema>
+ extends AbstractDAO<AttrTemplate<S>, Long> implements AttrTemplateDAO<S> {
+
+ private <T extends AttrTemplate<S>> Class<? extends AbstractAttrTemplate<? extends Schema>> getJPAEntityReference(
+ final Class<T> reference) {
+
+ return MPlainAttrTemplate.class.isAssignableFrom(reference)
+ ? JPAMPlainAttrTemplate.class
+ : MDerAttrTemplate.class.isAssignableFrom(reference)
+ ? JPAMDerAttrTemplate.class
+ : MVirAttrTemplate.class.isAssignableFrom(reference)
+ ? JPAMVirAttrTemplate.class
+ : RPlainAttrTemplate.class.isAssignableFrom(reference)
+ ? JPARPlainAttrTemplate.class
+ : RDerAttrTemplate.class.isAssignableFrom(reference)
+ ? JPARDerAttrTemplate.class
+ : RVirAttrTemplate.class.isAssignableFrom(reference)
+ ? JPARVirAttrTemplate.class
+ : null;
+ }
+
+ @Override
+ public <T extends AttrTemplate<S>> T find(final Long key, final Class<T> reference) {
+ return reference.cast(entityManager.find(getJPAEntityReference(reference), key));
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T extends AttrTemplate<S>> List<Number> findBySchemaName(
+ final String schemaName, final Class<T> reference) {
+
+ Query query = null;
+ try {
+ query = entityManager.createNativeQuery("SELECT id FROM "
+ + ReflectionUtils.findField(getJPAEntityReference(reference), "TABLE").get(null).toString()
+ + " WHERE schema_name=?1");
+ query.setParameter(1, schemaName);
+ } catch (Exception e) {
+ LOG.error("Unexpected exception", e);
+ }
+
+ return query == null ? Collections.<Number>emptyList() : query.getResultList();
+ }
+
+ @Override
+ public <T extends AttrTemplate<S>> void delete(final Long key, final Class<T> reference) {
+ T attrTemplate = find(key, reference);
+ if (attrTemplate == null) {
+ return;
+ }
+
+ delete(attrTemplate);
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T extends AttrTemplate<S>> void delete(final T attrTemplate) {
+ if (attrTemplate.getOwner() != null) {
+ attrTemplate.getOwner().getAttrTemplates(attrTemplate.getClass()).remove(attrTemplate);
+ }
+
+ entityManager.remove(attrTemplate);
+ }
+}
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAConfDAO.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAConfDAO.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAConfDAO.java
new file mode 100644
index 0000000..bd83e6a
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAConfDAO.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.dao;
+
+import org.apache.syncope.common.lib.types.AttributableType;
+import org.apache.syncope.server.persistence.api.dao.ConfDAO;
+import org.apache.syncope.server.persistence.api.dao.PlainAttrDAO;
+import org.apache.syncope.server.persistence.api.dao.PlainSchemaDAO;
+import org.apache.syncope.server.persistence.api.entity.AttributableUtilFactory;
+import org.apache.syncope.server.persistence.api.entity.conf.CPlainAttr;
+import org.apache.syncope.server.persistence.api.entity.conf.CPlainSchema;
+import org.apache.syncope.server.persistence.api.entity.conf.Conf;
+import org.apache.syncope.server.persistence.jpa.entity.conf.JPACPlainAttr;
+import org.apache.syncope.server.persistence.jpa.entity.conf.JPAConf;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Repository;
+import org.springframework.transaction.annotation.Transactional;
+
+@Repository
+public class JPAConfDAO extends AbstractDAO<Conf, Long> implements ConfDAO {
+
+ @Autowired
+ private PlainSchemaDAO schemaDAO;
+
+ @Autowired
+ private PlainAttrDAO attrDAO;
+
+ @Autowired
+ private AttributableUtilFactory attrUtilFactory;
+
+ @Override
+ public Conf get() {
+ Conf instance = entityManager.find(JPAConf.class, 1L);
+ if (instance == null) {
+ instance = new JPAConf();
+ instance.setKey(1L);
+
+ instance = entityManager.merge(instance);
+ }
+
+ return instance;
+ }
+
+ @Transactional(readOnly = true)
+ @Override
+ public CPlainAttr find(final String key) {
+ return get().getPlainAttr(key);
+ }
+
+ @Transactional(readOnly = true)
+ @Override
+ public CPlainAttr find(final String key, final String defaultValue) {
+ CPlainAttr result = find(key);
+ if (result == null) {
+ result = new JPACPlainAttr();
+ result.setSchema(schemaDAO.find(key, CPlainSchema.class));
+
+ result.addValue(defaultValue, attrUtilFactory.getInstance(AttributableType.CONFIGURATION));
+ }
+
+ return result;
+ }
+
+ @Override
+ public Conf save(final CPlainAttr attr) {
+ Conf instance = get();
+
+ CPlainAttr old = instance.getPlainAttr(attr.getSchema().getKey());
+ if (old != null && (!attr.getSchema().isUniqueConstraint()
+ || (!attr.getUniqueValue().getStringValue().equals(old.getUniqueValue().getStringValue())))) {
+
+ instance.removePlainAttr(old);
+ attrDAO.delete(old.getKey(), CPlainAttr.class);
+ }
+
+ instance.addPlainAttr(attr);
+ attr.setOwner(instance);
+
+ return entityManager.merge(instance);
+ }
+
+ @Override
+ public Conf delete(final String key) {
+ Conf instance = get();
+ CPlainAttr attr = instance.getPlainAttr(key);
+ if (attr != null) {
+ instance.removePlainAttr(attr);
+ instance = entityManager.merge(instance);
+ }
+
+ return instance;
+ }
+}
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAConnInstanceDAO.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAConnInstanceDAO.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAConnInstanceDAO.java
new file mode 100644
index 0000000..e080233
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAConnInstanceDAO.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.dao;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import javax.persistence.TypedQuery;
+import org.apache.syncope.server.persistence.api.dao.ConnInstanceDAO;
+import org.apache.syncope.server.persistence.api.dao.ExternalResourceDAO;
+import org.apache.syncope.server.persistence.api.dao.NotFoundException;
+import org.apache.syncope.server.persistence.api.entity.ConnInstance;
+import org.apache.syncope.server.persistence.api.entity.ExternalResource;
+import org.apache.syncope.server.persistence.jpa.entity.JPAConnInstance;
+import org.apache.syncope.server.provisioning.api.ConnectorRegistry;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Repository;
+
+@Repository
+public class JPAConnInstanceDAO extends AbstractDAO<ConnInstance, Long> implements ConnInstanceDAO {
+
+ @Autowired
+ private ExternalResourceDAO resourceDAO;
+
+ @Autowired
+ private ConnectorRegistry connRegistry;
+
+ @Override
+ public ConnInstance find(final Long key) {
+ return entityManager.find(JPAConnInstance.class, key);
+ }
+
+ @Override
+ public List<ConnInstance> findAll() {
+ TypedQuery<ConnInstance> query = entityManager.createQuery(
+ "SELECT e FROM " + JPAConnInstance.class.getSimpleName() + " e", ConnInstance.class);
+ return query.getResultList();
+ }
+
+ @Override
+ public ConnInstance save(final ConnInstance connector) {
+ final ConnInstance merged = entityManager.merge(connector);
+
+ for (ExternalResource resource : merged.getResources()) {
+ try {
+ connRegistry.registerConnector(resource);
+ } catch (NotFoundException e) {
+ LOG.error("While registering connector for resource", e);
+ }
+ }
+
+ return merged;
+ }
+
+ @Override
+ public void delete(final Long key) {
+ ConnInstance connInstance = find(key);
+ if (connInstance == null) {
+ return;
+ }
+
+ Set<String> resourceNames = new HashSet<>(connInstance.getResources().size());
+ for (ExternalResource resource : connInstance.getResources()) {
+ resourceNames.add(resource.getKey());
+ }
+ for (String resourceName : resourceNames) {
+ resourceDAO.delete(resourceName);
+ }
+
+ entityManager.remove(connInstance);
+
+ connRegistry.unregisterConnector(key.toString());
+ }
+}
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPADerAttrDAO.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPADerAttrDAO.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPADerAttrDAO.java
new file mode 100644
index 0000000..d44de33
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPADerAttrDAO.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.dao;
+
+import java.util.List;
+import javax.persistence.TypedQuery;
+import org.apache.syncope.server.persistence.api.dao.DerAttrDAO;
+import org.apache.syncope.server.persistence.api.entity.Attributable;
+import org.apache.syncope.server.persistence.api.entity.DerAttr;
+import org.apache.syncope.server.persistence.api.entity.membership.MDerAttr;
+import org.apache.syncope.server.persistence.api.entity.role.RDerAttr;
+import org.apache.syncope.server.persistence.api.entity.user.UDerAttr;
+import org.apache.syncope.server.persistence.jpa.entity.AbstractDerAttr;
+import org.apache.syncope.server.persistence.jpa.entity.membership.JPAMDerAttr;
+import org.apache.syncope.server.persistence.jpa.entity.role.JPARDerAttr;
+import org.apache.syncope.server.persistence.jpa.entity.user.JPAUDerAttr;
+import org.springframework.stereotype.Repository;
+
+@Repository
+public class JPADerAttrDAO extends AbstractDAO<DerAttr, Long> implements DerAttrDAO {
+
+ public <T extends DerAttr> Class<? extends AbstractDerAttr> getJPAEntityReference(
+ final Class<T> reference) {
+
+ return RDerAttr.class.isAssignableFrom(reference)
+ ? JPARDerAttr.class
+ : MDerAttr.class.isAssignableFrom(reference)
+ ? JPAMDerAttr.class
+ : UDerAttr.class.isAssignableFrom(reference)
+ ? JPAUDerAttr.class
+ : null;
+ }
+
+ @Override
+ public <T extends DerAttr> T find(final Long key, final Class<T> reference) {
+ return reference.cast(entityManager.find(getJPAEntityReference(reference), key));
+ }
+
+ @Override
+ public <T extends DerAttr> List<T> findAll(final Class<T> reference) {
+ TypedQuery<T> query = entityManager.createQuery(
+ "SELECT e FROM " + getJPAEntityReference(reference).getSimpleName() + " e", reference);
+ return query.getResultList();
+ }
+
+ @Override
+ public <T extends DerAttr> T save(final T derAttr) {
+ return entityManager.merge(derAttr);
+ }
+
+ @Override
+ public <T extends DerAttr> void delete(final Long key, final Class<T> reference) {
+ T derAttr = find(key, reference);
+ if (derAttr == null) {
+ return;
+ }
+
+ delete(derAttr);
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T extends DerAttr> void delete(final T derAttr) {
+ if (derAttr.getOwner() != null) {
+ ((Attributable<?, T, ?>) derAttr.getOwner()).removeDerAttr(derAttr);
+ }
+
+ entityManager.remove(derAttr);
+ }
+}
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPADerSchemaDAO.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPADerSchemaDAO.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPADerSchemaDAO.java
new file mode 100644
index 0000000..78271e9
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPADerSchemaDAO.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.dao;
+
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import javax.persistence.TypedQuery;
+import org.apache.syncope.common.lib.types.AttributableType;
+import org.apache.syncope.server.persistence.api.dao.AttrTemplateDAO;
+import org.apache.syncope.server.persistence.api.dao.DerAttrDAO;
+import org.apache.syncope.server.persistence.api.dao.DerSchemaDAO;
+import org.apache.syncope.server.persistence.api.dao.ExternalResourceDAO;
+import org.apache.syncope.server.persistence.api.entity.AttributableUtil;
+import org.apache.syncope.server.persistence.api.entity.DerAttr;
+import org.apache.syncope.server.persistence.api.entity.DerSchema;
+import org.apache.syncope.server.persistence.api.entity.membership.MDerSchema;
+import org.apache.syncope.server.persistence.api.entity.role.RDerSchema;
+import org.apache.syncope.server.persistence.api.entity.user.UDerAttr;
+import org.apache.syncope.server.persistence.api.entity.user.UDerSchema;
+import org.apache.syncope.server.persistence.api.entity.user.UMappingItem;
+import org.apache.syncope.server.persistence.jpa.entity.AbstractDerSchema;
+import org.apache.syncope.server.persistence.jpa.entity.membership.JPAMDerSchema;
+import org.apache.syncope.server.persistence.jpa.entity.role.JPARDerSchema;
+import org.apache.syncope.server.persistence.jpa.entity.user.JPAUDerSchema;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Repository;
+
+@Repository
+public class JPADerSchemaDAO extends AbstractDAO<DerSchema, String> implements DerSchemaDAO {
+
+ @Autowired
+ private DerAttrDAO derAttrDAO;
+
+ @Autowired
+ private AttrTemplateDAO<DerSchema> attrTemplateDAO;
+
+ @Autowired
+ private ExternalResourceDAO resourceDAO;
+
+ private <T extends DerSchema> Class<? extends AbstractDerSchema> getJPAEntityReference(final Class<T> reference) {
+ return RDerSchema.class.isAssignableFrom(reference)
+ ? JPARDerSchema.class
+ : MDerSchema.class.isAssignableFrom(reference)
+ ? JPAMDerSchema.class
+ : UDerSchema.class.isAssignableFrom(reference)
+ ? JPAUDerSchema.class
+ : null;
+ }
+
+ @Override
+ public <T extends DerSchema> T find(final String key, final Class<T> reference) {
+ return reference.cast(entityManager.find(getJPAEntityReference(reference), key));
+ }
+
+ @Override
+ public <T extends DerSchema> List<T> findAll(final Class<T> reference) {
+ TypedQuery<T> query = entityManager.createQuery(
+ "SELECT e FROM " + getJPAEntityReference(reference).getSimpleName() + " e", reference);
+ return query.getResultList();
+ }
+
+ @Override
+ public <T extends DerAttr> List<T> findAttrs(final DerSchema schema, final Class<T> reference) {
+ final StringBuilder queryString = new StringBuilder("SELECT e FROM ").
+ append(((JPADerAttrDAO) derAttrDAO).getJPAEntityReference(reference).getSimpleName()).
+ append(" e WHERE e.");
+ if (UDerAttr.class.isAssignableFrom(reference)) {
+ queryString.append("derSchema");
+ } else {
+ queryString.append("template.schema");
+ }
+ queryString.append("=:schema");
+
+ TypedQuery<T> query = entityManager.createQuery(queryString.toString(), reference);
+ query.setParameter("schema", schema);
+
+ return query.getResultList();
+ }
+
+ @Override
+ public <T extends DerSchema> T save(final T derSchema) {
+ return entityManager.merge(derSchema);
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public void delete(final String key, final AttributableUtil attributableUtil) {
+ final DerSchema schema = find(key, attributableUtil.derSchemaClass());
+ if (schema == null) {
+ return;
+ }
+
+ final Set<Long> attrIds = new HashSet<>();
+ for (DerAttr attr : findAttrs(schema, attributableUtil.derAttrClass())) {
+ attrIds.add(attr.getKey());
+ }
+ for (Long attrId : attrIds) {
+ derAttrDAO.delete(attrId, attributableUtil.derAttrClass());
+ }
+
+ if (attributableUtil.getType() != AttributableType.USER) {
+ for (Iterator<Number> it = attrTemplateDAO.
+ findBySchemaName(schema.getKey(), attributableUtil.derAttrTemplateClass()).iterator();
+ it.hasNext();) {
+
+ attrTemplateDAO.delete(it.next().longValue(), attributableUtil.derAttrTemplateClass());
+ }
+ }
+
+ resourceDAO.deleteMapping(key, attributableUtil.derIntMappingType(), UMappingItem.class);
+
+ entityManager.remove(schema);
+ }
+}
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAEntitlementDAO.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAEntitlementDAO.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAEntitlementDAO.java
new file mode 100644
index 0000000..08c020f
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAEntitlementDAO.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.dao;
+
+import java.util.List;
+import javax.persistence.TypedQuery;
+import org.apache.syncope.server.persistence.api.RoleEntitlementUtil;
+import org.apache.syncope.server.persistence.api.dao.EntitlementDAO;
+import org.apache.syncope.server.persistence.api.dao.RoleDAO;
+import org.apache.syncope.server.persistence.api.entity.Entitlement;
+import org.apache.syncope.server.persistence.api.entity.role.Role;
+import org.apache.syncope.server.persistence.jpa.entity.JPAEntitlement;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Repository;
+
+@Repository
+public class JPAEntitlementDAO extends AbstractDAO<Entitlement, String> implements EntitlementDAO {
+
+ @Autowired
+ private RoleDAO roleDAO;
+
+ @Override
+ public Entitlement find(final String name) {
+ return entityManager.find(JPAEntitlement.class, name);
+ }
+
+ @Override
+ public List<Entitlement> findAll() {
+ TypedQuery<Entitlement> query = entityManager.createQuery(
+ "SELECT e FROM " + JPAEntitlement.class.getSimpleName() + " e", Entitlement.class);
+
+ return query.getResultList();
+ }
+
+ @Override
+ public Entitlement save(final Entitlement entitlement) {
+ return entityManager.merge(entitlement);
+ }
+
+ @Override
+ public Entitlement saveRoleEntitlement(final Role role) {
+ Entitlement roleEnt = new JPAEntitlement();
+ roleEnt.setKey(RoleEntitlementUtil.getEntitlementNameFromRoleKey(role.getKey()));
+ roleEnt.setDescription("Entitlement for managing role " + role.getKey());
+
+ return save(roleEnt);
+ }
+
+ @Override
+ public void delete(final String name) {
+ Entitlement entitlement = find(name);
+ if (entitlement == null) {
+ return;
+ }
+
+ delete(entitlement);
+ }
+
+ @Override
+ public void delete(final Entitlement entitlement) {
+ if (entitlement == null) {
+ return;
+ }
+
+ for (Role role : roleDAO.findByEntitlement(entitlement)) {
+ role.removeEntitlement(entitlement);
+ roleDAO.save(role);
+ }
+
+ entityManager.remove(entitlement);
+ }
+}
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAExternalResourceDAO.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAExternalResourceDAO.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAExternalResourceDAO.java
new file mode 100644
index 0000000..e061f1f
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPAExternalResourceDAO.java
@@ -0,0 +1,254 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.dao;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import javax.persistence.NoResultException;
+import javax.persistence.TypedQuery;
+import org.apache.syncope.common.lib.types.IntMappingType;
+import org.apache.syncope.common.lib.types.PolicyType;
+import org.apache.syncope.common.lib.types.TaskType;
+import org.apache.syncope.server.persistence.api.dao.ExternalResourceDAO;
+import org.apache.syncope.server.persistence.api.dao.NotFoundException;
+import org.apache.syncope.server.persistence.api.dao.PolicyDAO;
+import org.apache.syncope.server.persistence.api.dao.RoleDAO;
+import org.apache.syncope.server.persistence.api.dao.TaskDAO;
+import org.apache.syncope.server.persistence.api.dao.UserDAO;
+import org.apache.syncope.server.persistence.api.entity.AccountPolicy;
+import org.apache.syncope.server.persistence.api.entity.ExternalResource;
+import org.apache.syncope.server.persistence.api.entity.Mapping;
+import org.apache.syncope.server.persistence.api.entity.MappingItem;
+import org.apache.syncope.server.persistence.api.entity.Policy;
+import org.apache.syncope.server.persistence.api.entity.role.Role;
+import org.apache.syncope.server.persistence.api.entity.task.PropagationTask;
+import org.apache.syncope.server.persistence.api.entity.task.PushTask;
+import org.apache.syncope.server.persistence.api.entity.task.SyncTask;
+import org.apache.syncope.server.persistence.api.entity.user.UMappingItem;
+import org.apache.syncope.server.persistence.api.entity.user.User;
+import org.apache.syncope.server.persistence.jpa.entity.AbstractMappingItem;
+import org.apache.syncope.server.persistence.jpa.entity.JPAExternalResource;
+import org.apache.syncope.server.persistence.jpa.entity.role.JPARMappingItem;
+import org.apache.syncope.server.persistence.jpa.entity.user.JPAUMappingItem;
+import org.apache.syncope.server.provisioning.api.ConnectorRegistry;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Repository;
+import org.springframework.transaction.annotation.Transactional;
+
+@Repository
+public class JPAExternalResourceDAO extends AbstractDAO<ExternalResource, String> implements ExternalResourceDAO {
+
+ @Autowired
+ private TaskDAO taskDAO;
+
+ @Autowired
+ private UserDAO userDAO;
+
+ @Autowired
+ private RoleDAO roleDAO;
+
+ @Autowired
+ private PolicyDAO policyDAO;
+
+ @Autowired
+ private ConnectorRegistry connRegistry;
+
+ @Override
+ public ExternalResource find(final String name) {
+ TypedQuery<ExternalResource> query = entityManager.createQuery("SELECT e FROM "
+ + JPAExternalResource.class.getSimpleName() + " e WHERE e.name = :name", ExternalResource.class);
+ query.setParameter("name", name);
+
+ ExternalResource result = null;
+ try {
+ result = query.getSingleResult();
+ } catch (NoResultException e) {
+ LOG.debug("No resource found with name {}", name, e);
+ }
+
+ return result;
+ }
+
+ private StringBuilder getByPolicyQuery(final PolicyType type) {
+ StringBuilder query = new StringBuilder("SELECT e FROM ").
+ append(JPAExternalResource.class.getSimpleName()).
+ append(" e WHERE e.");
+ switch (type) {
+ case ACCOUNT:
+ case GLOBAL_ACCOUNT:
+ query.append("accountPolicy");
+ break;
+
+ case PASSWORD:
+ case GLOBAL_PASSWORD:
+ query.append("passwordPolicy");
+ break;
+
+ case SYNC:
+ case GLOBAL_SYNC:
+ query.append("syncPolicy");
+ break;
+
+ default:
+ break;
+ }
+ return query;
+ }
+
+ @Override
+ public List<ExternalResource> findByPolicy(final Policy policy) {
+ TypedQuery<ExternalResource> query = entityManager.createQuery(
+ getByPolicyQuery(policy.getType()).append(" = :policy").toString(), ExternalResource.class);
+ query.setParameter("policy", policy);
+ return query.getResultList();
+ }
+
+ @Override
+ public List<ExternalResource> findWithoutPolicy(final PolicyType type) {
+ TypedQuery<ExternalResource> query = entityManager.createQuery(
+ getByPolicyQuery(type).append(" IS NULL").toString(), ExternalResource.class);
+ return query.getResultList();
+ }
+
+ @Override
+ public List<ExternalResource> findAll() {
+ TypedQuery<ExternalResource> query = entityManager.createQuery(
+ "SELECT e FROM " + JPAExternalResource.class.getSimpleName() + " e", ExternalResource.class);
+ return query.getResultList();
+ }
+
+ @Override
+ public List<ExternalResource> findAllByPriority() {
+ TypedQuery<ExternalResource> query = entityManager.createQuery(
+ "SELECT e FROM " + JPAExternalResource.class.getSimpleName() + " e ORDER BY e.propagationPriority",
+ ExternalResource.class);
+ return query.getResultList();
+ }
+
+ /**
+ * This method has an explicit Transactional annotation because it is called by SyncJob.
+ *
+ * @see org.apache.syncope.core.sync.impl.SyncJob
+ *
+ * @param resource entity to be merged
+ * @return the same entity, updated
+ */
+ @Override
+ @Transactional(rollbackFor = { Throwable.class })
+ public ExternalResource save(final ExternalResource resource) {
+ ExternalResource merged = entityManager.merge(resource);
+ try {
+ connRegistry.registerConnector(merged);
+ } catch (NotFoundException e) {
+ LOG.error("While registering connector for resource", e);
+ }
+ return merged;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T extends MappingItem> void deleteMapping(
+ final String intAttrName, final IntMappingType intMappingType, final Class<T> reference) {
+
+ if (IntMappingType.getEmbedded().contains(intMappingType)) {
+ return;
+ }
+
+ Class<? extends AbstractMappingItem> jpaRef = reference.equals(UMappingItem.class)
+ ? JPAUMappingItem.class
+ : JPARMappingItem.class;
+
+ TypedQuery<T> query = entityManager.createQuery("SELECT m FROM " + jpaRef.getSimpleName()
+ + " m WHERE m.intAttrName=:intAttrName AND m.intMappingType=:intMappingType", reference);
+ query.setParameter("intAttrName", intAttrName);
+ query.setParameter("intMappingType", intMappingType);
+
+ Set<Long> itemIds = new HashSet<>();
+ for (T item : query.getResultList()) {
+ itemIds.add(item.getKey());
+ }
+ Class<?> mappingRef = null;
+ for (Long itemId : itemIds) {
+ T item = (T) entityManager.find(jpaRef, itemId);
+ if (item != null) {
+ mappingRef = item.getMapping().getClass();
+
+ ((Mapping<T>) item.getMapping()).removeItem(item);
+ item.setMapping(null);
+
+ entityManager.remove(item);
+ }
+ }
+
+ // Make empty query cache for *MappingItem and related *Mapping
+ entityManager.getEntityManagerFactory().getCache().evict(jpaRef);
+ if (mappingRef != null) {
+ entityManager.getEntityManagerFactory().getCache().evict(mappingRef);
+ }
+ }
+
+ @Override
+ public void delete(final String name) {
+ ExternalResource resource = find(name);
+ if (resource == null) {
+ return;
+ }
+
+ taskDAO.deleteAll(resource, TaskType.PROPAGATION);
+ taskDAO.deleteAll(resource, TaskType.SYNCHRONIZATION);
+ taskDAO.deleteAll(resource, TaskType.PUSH);
+
+ for (User user : userDAO.findByResource(resource)) {
+ user.removeResource(resource);
+ }
+ for (Role role : roleDAO.findByResource(resource)) {
+ role.removeResource(resource);
+ }
+ for (AccountPolicy policy : policyDAO.findByResource(resource)) {
+ policy.removeResource(resource);
+ }
+
+ if (resource.getConnector() != null && resource.getConnector().getResources() != null
+ && !resource.getConnector().getResources().isEmpty()) {
+
+ resource.getConnector().getResources().remove(resource);
+ }
+ resource.setConnector(null);
+
+ if (resource.getUmapping() != null) {
+ for (MappingItem item : resource.getUmapping().getItems()) {
+ item.setMapping(null);
+ }
+ resource.getUmapping().getItems().clear();
+ resource.getUmapping().setResource(null);
+ resource.setUmapping(null);
+ }
+ if (resource.getRmapping() != null) {
+ for (MappingItem item : resource.getRmapping().getItems()) {
+ item.setMapping(null);
+ }
+ resource.getRmapping().getItems().clear();
+ resource.getRmapping().setResource(null);
+ resource.setRmapping(null);
+ }
+
+ entityManager.remove(resource);
+ }
+}
http://git-wip-us.apache.org/repos/asf/syncope/blob/235f60fa/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPALoggerDAO.java
----------------------------------------------------------------------
diff --git a/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPALoggerDAO.java b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPALoggerDAO.java
new file mode 100644
index 0000000..1b2e385
--- /dev/null
+++ b/syncope620/server/persistence-jpa/src/main/java/org/apache/syncope/server/persistence/jpa/dao/JPALoggerDAO.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.server.persistence.jpa.dao;
+
+import java.util.List;
+import javax.persistence.TypedQuery;
+import org.apache.syncope.common.lib.types.LoggerLevel;
+import org.apache.syncope.common.lib.types.LoggerType;
+import org.apache.syncope.server.persistence.api.dao.LoggerDAO;
+import org.apache.syncope.server.persistence.api.entity.Logger;
+import org.apache.syncope.server.persistence.jpa.entity.JPALogger;
+import org.springframework.stereotype.Repository;
+
+@Repository
+public class JPALoggerDAO extends AbstractDAO<Logger, String> implements LoggerDAO {
+
+ @Override
+ public Logger find(final String key) {
+ return entityManager.find(JPALogger.class, key);
+ }
+
+ @Override
+ public List<Logger> findAll(final LoggerType type) {
+ TypedQuery<Logger> query = entityManager.createQuery(
+ "SELECT e FROM " + JPALogger.class.getSimpleName() + " e WHERE e.type=:type", Logger.class);
+ query.setParameter("type", type);
+ return query.getResultList();
+ }
+
+ @Override
+ public Logger save(final Logger logger) {
+ // Audit loggers must be either OFF or DEBUG, no more options
+ if (LoggerType.AUDIT == logger.getType() && LoggerLevel.OFF != logger.getLevel()) {
+ logger.setLevel(LoggerLevel.DEBUG);
+ }
+ return entityManager.merge(logger);
+ }
+
+ @Override
+ public void delete(final Logger logger) {
+ entityManager.remove(logger);
+ }
+
+ @Override
+ public void delete(final String key) {
+ Logger logger = find(key);
+ if (logger == null) {
+ return;
+ }
+
+ delete(logger);
+ }
+}