You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@syncope.apache.org by il...@apache.org on 2018/03/02 11:47:24 UTC

[05/11] syncope git commit: [SYNCOPE-1279] Now providing runtime status updates from running Tasks and Reports

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/notification/NotificationJobDelegate.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/notification/NotificationJobDelegate.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/notification/NotificationJobDelegate.java
deleted file mode 100644
index c019639..0000000
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/notification/NotificationJobDelegate.java
+++ /dev/null
@@ -1,278 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.syncope.core.provisioning.java.job.notification;
-
-import java.io.PrintStream;
-import java.util.Date;
-import java.util.Enumeration;
-import java.util.Properties;
-import javax.mail.Session;
-import javax.mail.internet.MimeMessage;
-import org.apache.commons.lang3.BooleanUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.syncope.common.lib.LogOutputStream;
-import org.apache.syncope.common.lib.PropertyUtils;
-import org.apache.syncope.common.lib.types.AuditElements;
-import org.apache.syncope.common.lib.types.TaskType;
-import org.apache.syncope.common.lib.types.TraceLevel;
-import org.apache.syncope.core.provisioning.api.utils.ExceptionUtils2;
-import org.apache.syncope.core.persistence.api.dao.TaskDAO;
-import org.apache.syncope.core.persistence.api.entity.EntityFactory;
-import org.apache.syncope.core.persistence.api.entity.task.NotificationTask;
-import org.apache.syncope.core.persistence.api.entity.task.TaskExec;
-import org.apache.syncope.core.provisioning.api.AuditManager;
-import org.apache.syncope.core.provisioning.api.notification.NotificationManager;
-import org.apache.syncope.core.spring.security.Encryptor;
-import org.quartz.JobExecutionException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.InitializingBean;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.mail.javamail.JavaMailSender;
-import org.springframework.mail.javamail.JavaMailSenderImpl;
-import org.springframework.mail.javamail.MimeMessageHelper;
-import org.springframework.stereotype.Component;
-import org.springframework.transaction.annotation.Transactional;
-
-@Component
-public class NotificationJobDelegate implements InitializingBean {
-
-    private static final Logger LOG = LoggerFactory.getLogger(NotificationJobDelegate.class);
-
-    @Autowired
-    private TaskDAO taskDAO;
-
-    @Autowired
-    private JavaMailSender mailSender;
-
-    @Autowired
-    private EntityFactory entityFactory;
-
-    @Autowired
-    private AuditManager auditManager;
-
-    @Autowired
-    private NotificationManager notificationManager;
-
-    @Override
-    public void afterPropertiesSet() throws Exception {
-        if (mailSender instanceof JavaMailSenderImpl) {
-            JavaMailSenderImpl javaMailSender = (JavaMailSenderImpl) mailSender;
-
-            Properties javaMailProperties = javaMailSender.getJavaMailProperties();
-
-            Properties props = PropertyUtils.read(Encryptor.class, "mail.properties", "conf.directory").getLeft();
-            for (Enumeration<?> e = props.propertyNames(); e.hasMoreElements();) {
-                String prop = (String) e.nextElement();
-                if (prop.startsWith("mail.smtp.")) {
-                    javaMailProperties.setProperty(prop, props.getProperty(prop));
-                }
-            }
-
-            if (StringUtils.isNotBlank(javaMailSender.getUsername())) {
-                javaMailProperties.setProperty("mail.smtp.auth", "true");
-            }
-
-            javaMailSender.setJavaMailProperties(javaMailProperties);
-
-            String mailDebug = props.getProperty("mail.debug", "false");
-            if (BooleanUtils.toBoolean(mailDebug)) {
-                Session session = javaMailSender.getSession();
-                session.setDebug(true);
-                session.setDebugOut(new PrintStream(new LogOutputStream(LOG)));
-            }
-        }
-    }
-
-    @Transactional
-    public TaskExec executeSingle(final NotificationTask task) {
-        TaskExec execution = entityFactory.newEntity(TaskExec.class);
-        execution.setTask(task);
-        execution.setStart(new Date());
-
-        boolean retryPossible = true;
-
-        if (StringUtils.isBlank(task.getSubject()) || task.getRecipients().isEmpty()
-                || StringUtils.isBlank(task.getHtmlBody()) || StringUtils.isBlank(task.getTextBody())) {
-
-            String message = "Could not fetch all required information for sending e-mails:\n"
-                    + task.getRecipients() + "\n"
-                    + task.getSender() + "\n"
-                    + task.getSubject() + "\n"
-                    + task.getHtmlBody() + "\n"
-                    + task.getTextBody();
-            LOG.error(message);
-
-            execution.setStatus(NotificationJob.Status.NOT_SENT.name());
-            retryPossible = false;
-
-            if (task.getTraceLevel().ordinal() >= TraceLevel.FAILURES.ordinal()) {
-                execution.setMessage(message);
-            }
-        } else {
-            if (LOG.isDebugEnabled()) {
-                LOG.debug("About to send e-mails:\n"
-                        + task.getRecipients() + "\n"
-                        + task.getSender() + "\n"
-                        + task.getSubject() + "\n"
-                        + task.getHtmlBody() + "\n"
-                        + task.getTextBody() + "\n");
-            }
-
-            for (String to : task.getRecipients()) {
-                try {
-                    MimeMessage message = mailSender.createMimeMessage();
-                    MimeMessageHelper helper = new MimeMessageHelper(message, true);
-                    helper.setTo(to);
-                    helper.setFrom(task.getSender());
-                    helper.setSubject(task.getSubject());
-                    helper.setText(task.getTextBody(), task.getHtmlBody());
-
-                    mailSender.send(message);
-
-                    execution.setStatus(NotificationJob.Status.SENT.name());
-
-                    StringBuilder report = new StringBuilder();
-                    switch (task.getTraceLevel()) {
-                        case ALL:
-                            report.append("FROM: ").append(task.getSender()).append('\n').
-                                    append("TO: ").append(to).append('\n').
-                                    append("SUBJECT: ").append(task.getSubject()).append('\n').append('\n').
-                                    append(task.getTextBody()).append('\n').append('\n').
-                                    append(task.getHtmlBody()).append('\n');
-                            break;
-
-                        case SUMMARY:
-                            report.append("E-mail sent to ").append(to).append('\n');
-                            break;
-
-                        case FAILURES:
-                        case NONE:
-                        default:
-                    }
-                    if (report.length() > 0) {
-                        execution.setMessage(report.toString());
-                    }
-
-                    notificationManager.createTasks(
-                            AuditElements.EventCategoryType.TASK,
-                            "notification",
-                            null,
-                            "send",
-                            AuditElements.Result.SUCCESS,
-                            null,
-                            null,
-                            task,
-                            "Successfully sent notification to " + to);
-                } catch (Exception e) {
-                    LOG.error("Could not send e-mail", e);
-
-                    execution.setStatus(NotificationJob.Status.NOT_SENT.name());
-                    if (task.getTraceLevel().ordinal() >= TraceLevel.FAILURES.ordinal()) {
-                        execution.setMessage(ExceptionUtils2.getFullStackTrace(e));
-                    }
-
-                    notificationManager.createTasks(
-                            AuditElements.EventCategoryType.TASK,
-                            "notification",
-                            null,
-                            "send",
-                            AuditElements.Result.FAILURE,
-                            null,
-                            null,
-                            task,
-                            "Could not send notification to " + to, e);
-                }
-
-                execution.setEnd(new Date());
-            }
-        }
-
-        if (hasToBeRegistered(execution)) {
-            execution = notificationManager.storeExec(execution);
-            if (retryPossible
-                    && (NotificationJob.Status.valueOf(execution.getStatus()) == NotificationJob.Status.NOT_SENT)) {
-
-                handleRetries(execution);
-            }
-        } else {
-            notificationManager.setTaskExecuted(execution.getTask().getKey(), true);
-        }
-
-        return execution;
-    }
-
-    @Transactional
-    public void execute() throws JobExecutionException {
-        for (NotificationTask task : taskDAO.<NotificationTask>findToExec(TaskType.NOTIFICATION)) {
-            LOG.debug("Found notification task {} to be executed: starting...", task);
-            executeSingle(task);
-            LOG.debug("Notification task {} executed", task);
-        }
-    }
-
-    private boolean hasToBeRegistered(final TaskExec execution) {
-        NotificationTask task = (NotificationTask) execution.getTask();
-
-        // True if either failed and failures have to be registered, or if ALL
-        // has to be registered.
-        return (NotificationJob.Status.valueOf(execution.getStatus()) == NotificationJob.Status.NOT_SENT
-                && task.getTraceLevel().ordinal() >= TraceLevel.FAILURES.ordinal())
-                || task.getTraceLevel() == TraceLevel.ALL;
-    }
-
-    private void handleRetries(final TaskExec execution) {
-        if (notificationManager.getMaxRetries() <= 0) {
-            return;
-        }
-
-        long failedExecutionsCount = notificationManager.countExecutionsWithStatus(
-                execution.getTask().getKey(), NotificationJob.Status.NOT_SENT.name());
-
-        if (failedExecutionsCount <= notificationManager.getMaxRetries()) {
-            LOG.debug("Execution of notification task {} will be retried [{}/{}]",
-                    execution.getTask(), failedExecutionsCount, notificationManager.getMaxRetries());
-            notificationManager.setTaskExecuted(execution.getTask().getKey(), false);
-
-            auditManager.audit(
-                    AuditElements.EventCategoryType.TASK,
-                    "notification",
-                    null,
-                    "retry",
-                    AuditElements.Result.SUCCESS,
-                    null,
-                    null,
-                    execution,
-                    "Notification task " + execution.getTask().getKey() + " will be retried");
-        } else {
-            LOG.error("Maximum number of retries reached for task {} - giving up", execution.getTask());
-
-            auditManager.audit(
-                    AuditElements.EventCategoryType.TASK,
-                    "notification",
-                    null,
-                    "retry",
-                    AuditElements.Result.FAILURE,
-                    null,
-                    null,
-                    execution,
-                    "Giving up retries on notification task " + execution.getTask().getKey());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/AbstractReportlet.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/AbstractReportlet.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/AbstractReportlet.java
index 2c85b20..81a108c 100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/AbstractReportlet.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/AbstractReportlet.java
@@ -18,6 +18,7 @@
  */
 package org.apache.syncope.core.provisioning.java.job.report;
 
+import java.util.concurrent.atomic.AtomicReference;
 import org.apache.syncope.core.persistence.api.dao.Reportlet;
 import org.apache.syncope.common.lib.report.ReportletConf;
 import org.slf4j.Logger;
@@ -31,11 +32,17 @@ public abstract class AbstractReportlet implements Reportlet {
 
     protected static final Logger LOG = LoggerFactory.getLogger(AbstractReportlet.class);
 
-    protected abstract void doExtract(ReportletConf conf, ContentHandler handler) throws SAXException;
+    protected abstract void doExtract(ReportletConf conf, ContentHandler handler, AtomicReference<String> status)
+            throws SAXException;
 
     @Override
     @Transactional(readOnly = true)
-    public void extract(final ReportletConf conf, final ContentHandler handler) throws SAXException {
+    public void extract(
+            final ReportletConf conf,
+            final ContentHandler handler,
+            final AtomicReference<String> status)
+            throws SAXException {
+
         if (conf == null) {
             throw new ReportException(new IllegalArgumentException("No configuration provided"));
         }
@@ -45,7 +52,7 @@ public abstract class AbstractReportlet implements Reportlet {
         atts.addAttribute("", "", ReportXMLConst.ATTR_CLASS, ReportXMLConst.XSD_STRING, getClass().getName());
         handler.startElement("", "", ReportXMLConst.ELEMENT_REPORTLET, atts);
 
-        doExtract(conf, handler);
+        doExtract(conf, handler, status);
 
         handler.endElement("", "", ReportXMLConst.ELEMENT_REPORTLET);
     }

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/AuditReportlet.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/AuditReportlet.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/AuditReportlet.java
index d6f0ec5..6ba9a53 100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/AuditReportlet.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/AuditReportlet.java
@@ -20,6 +20,7 @@ package org.apache.syncope.core.provisioning.java.job.report;
 
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.atomic.AtomicReference;
 import javax.sql.DataSource;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.builder.ToStringBuilder;
@@ -47,7 +48,9 @@ public class AuditReportlet extends AbstractReportlet {
 
     private DataSource datasource;
 
-    private void doExtractConf(final ContentHandler handler) throws SAXException {
+    private void doExtractConf(final ContentHandler handler, final AtomicReference<String> status) throws SAXException {
+        status.set("Fetching " + conf.getSize() + " rows from the SYNCOPEAUDIT table");
+
         JdbcTemplate jdbcTemplate = new JdbcTemplate(datasource);
         jdbcTemplate.setMaxRows(conf.getSize());
         List<Map<String, Object>> rows = jdbcTemplate.
@@ -120,10 +123,17 @@ public class AuditReportlet extends AbstractReportlet {
             handler.endElement("", "", "event");
         }
         handler.endElement("", "", "events");
+
+        status.set("Fetched " + conf.getSize() + " rows from the SYNCOPEAUDIT table");
     }
 
     @Override
-    protected void doExtract(final ReportletConf conf, final ContentHandler handler) throws SAXException {
+    protected void doExtract(
+            final ReportletConf conf,
+            final ContentHandler handler,
+            final AtomicReference<String> status)
+            throws SAXException {
+
         if (conf instanceof AuditReportletConf) {
             this.conf = AuditReportletConf.class.cast(conf);
         } else {
@@ -135,7 +145,7 @@ public class AuditReportlet extends AbstractReportlet {
             throw new ReportException(new IllegalArgumentException("Could not get to DataSource"));
         }
 
-        doExtractConf(handler);
+        doExtractConf(handler, status);
     }
 
 }

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/DefaultReportJobDelegate.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/DefaultReportJobDelegate.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/DefaultReportJobDelegate.java
new file mode 100644
index 0000000..a9aeab0
--- /dev/null
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/DefaultReportJobDelegate.java
@@ -0,0 +1,216 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.core.provisioning.java.job.report;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.Date;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.zip.Deflater;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+import javax.xml.transform.OutputKeys;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.sax.SAXTransformerFactory;
+import javax.xml.transform.sax.TransformerHandler;
+import javax.xml.transform.stream.StreamResult;
+import org.apache.syncope.common.lib.report.ReportletConf;
+import org.apache.syncope.common.lib.types.ReportExecStatus;
+import org.apache.syncope.core.provisioning.api.utils.ExceptionUtils2;
+import org.apache.syncope.core.spring.ApplicationContextProvider;
+import org.apache.syncope.core.persistence.api.ImplementationLookup;
+import org.apache.syncope.core.persistence.api.dao.ReportDAO;
+import org.apache.syncope.core.persistence.api.dao.ReportExecDAO;
+import org.apache.syncope.core.persistence.api.dao.Reportlet;
+import org.apache.syncope.core.persistence.api.entity.EntityFactory;
+import org.apache.syncope.core.persistence.api.entity.Report;
+import org.apache.syncope.core.persistence.api.entity.ReportExec;
+import org.apache.syncope.core.provisioning.api.job.report.ReportJobDelegate;
+import org.quartz.JobExecutionException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.support.AbstractBeanDefinition;
+import org.springframework.stereotype.Component;
+import org.springframework.transaction.annotation.Transactional;
+import org.xml.sax.helpers.AttributesImpl;
+
+@Component
+public class DefaultReportJobDelegate implements ReportJobDelegate {
+
+    private static final Logger LOG = LoggerFactory.getLogger(ReportJobDelegate.class);
+
+    /**
+     * Report DAO.
+     */
+    @Autowired
+    private ReportDAO reportDAO;
+
+    /**
+     * Report execution DAO.
+     */
+    @Autowired
+    private ReportExecDAO reportExecDAO;
+
+    @Autowired
+    private EntityFactory entityFactory;
+
+    @Autowired
+    private ImplementationLookup implementationLookup;
+
+    private final AtomicReference<String> status = new AtomicReference<>();
+
+    @Override
+    public String currentStatus() {
+        return status.get();
+    }
+
+    @Transactional
+    @Override
+    public void execute(final String reportKey) throws JobExecutionException {
+        Report report = reportDAO.find(reportKey);
+        if (report == null) {
+            throw new JobExecutionException("Report " + reportKey + " not found");
+        }
+
+        if (!report.isActive()) {
+            LOG.info("Report {} not active, aborting...", reportKey);
+            return;
+        }
+
+        // 1. create execution
+        ReportExec execution = entityFactory.newEntity(ReportExec.class);
+        execution.setStatus(ReportExecStatus.STARTED);
+        execution.setStart(new Date());
+        execution.setReport(report);
+        execution = reportExecDAO.save(execution);
+
+        report.add(execution);
+        report = reportDAO.save(report);
+
+        // 2. define a SAX handler for generating result as XML
+        TransformerHandler handler;
+
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        ZipOutputStream zos = new ZipOutputStream(baos);
+        zos.setLevel(Deflater.BEST_COMPRESSION);
+        try {
+            SAXTransformerFactory tFactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
+            tFactory.setFeature(javax.xml.XMLConstants.FEATURE_SECURE_PROCESSING, true);
+            handler = tFactory.newTransformerHandler();
+            Transformer serializer = handler.getTransformer();
+            serializer.setOutputProperty(OutputKeys.ENCODING, StandardCharsets.UTF_8.name());
+            serializer.setOutputProperty(OutputKeys.INDENT, "yes");
+
+            // a single ZipEntry in the ZipOutputStream
+            zos.putNextEntry(new ZipEntry(report.getName()));
+
+            // streaming SAX handler in a compressed byte array stream
+            handler.setResult(new StreamResult(zos));
+        } catch (Exception e) {
+            throw new JobExecutionException("While configuring for SAX generation", e, true);
+        }
+
+        execution.setStatus(ReportExecStatus.RUNNING);
+        execution = reportExecDAO.save(execution);
+
+        status.set("Starting");
+
+        // 3. actual report execution
+        StringBuilder reportExecutionMessage = new StringBuilder();
+        try {
+            // report header
+            handler.startDocument();
+            AttributesImpl atts = new AttributesImpl();
+            atts.addAttribute("", "", ReportXMLConst.ATTR_NAME, ReportXMLConst.XSD_STRING, report.getName());
+            handler.startElement("", "", ReportXMLConst.ELEMENT_REPORT, atts);
+
+            status.set("Generating report header");
+
+            // iterate over reportlet instances defined for this report
+            for (ReportletConf reportletConf : report.getReportletConfs()) {
+                Class<? extends Reportlet> reportletClass =
+                        implementationLookup.getReportletClass(reportletConf.getClass());
+                if (reportletClass == null) {
+                    LOG.warn("Could not find matching reportlet for {}", reportletConf.getClass());
+                } else {
+                    // fetch (or create) reportlet
+                    Reportlet reportlet;
+                    if (ApplicationContextProvider.getBeanFactory().containsSingleton(reportletClass.getName())) {
+                        reportlet = (Reportlet) ApplicationContextProvider.getBeanFactory().
+                                getSingleton(reportletClass.getName());
+                    } else {
+                        reportlet = (Reportlet) ApplicationContextProvider.getBeanFactory().
+                                createBean(reportletClass, AbstractBeanDefinition.AUTOWIRE_BY_TYPE, false);
+                        ApplicationContextProvider.getBeanFactory().
+                                registerSingleton(reportletClass.getName(), reportlet);
+                    }
+
+                    // invoke reportlet
+                    try {
+                        status.set("Invoking reportlet " + reportletClass.getName());
+                        reportlet.extract(reportletConf, handler, status);
+                    } catch (Throwable t) {
+                        LOG.error("While executing reportlet {} for report {}", reportlet, reportKey, t);
+
+                        execution.setStatus(ReportExecStatus.FAILURE);
+
+                        Throwable effective = t instanceof ReportException
+                                ? t.getCause()
+                                : t;
+                        reportExecutionMessage.
+                                append(ExceptionUtils2.getFullStackTrace(effective)).
+                                append("\n==================\n");
+                    }
+                }
+            }
+
+            // report footer
+            status.set("Generating report footer");
+
+            handler.endElement("", "", ReportXMLConst.ELEMENT_REPORT);
+            handler.endDocument();
+
+            if (!ReportExecStatus.FAILURE.name().equals(execution.getStatus())) {
+                execution.setStatus(ReportExecStatus.SUCCESS);
+            }
+        } catch (Exception e) {
+            execution.setStatus(ReportExecStatus.FAILURE);
+            reportExecutionMessage.append(ExceptionUtils2.getFullStackTrace(e));
+
+            throw new JobExecutionException(e, true);
+        } finally {
+            status.set("Completed");
+
+            try {
+                zos.closeEntry();
+                zos.close();
+                baos.close();
+            } catch (IOException e) {
+                LOG.error("While closing StreamResult's backend", e);
+            }
+
+            execution.setExecResult(baos.toByteArray());
+            execution.setMessage(reportExecutionMessage.toString());
+            execution.setEnd(new Date());
+            reportExecDAO.save(execution);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/GroupReportlet.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/GroupReportlet.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/GroupReportlet.java
index 44a4dfa..b34e271 100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/GroupReportlet.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/GroupReportlet.java
@@ -22,6 +22,7 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.atomic.AtomicReference;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.syncope.common.lib.EntityTOUtils;
 import org.apache.syncope.common.lib.SyncopeConstants;
@@ -288,7 +289,12 @@ public class GroupReportlet extends AbstractReportlet {
     }
 
     @Override
-    protected void doExtract(final ReportletConf conf, final ContentHandler handler) throws SAXException {
+    protected void doExtract(
+            final ReportletConf conf,
+            final ContentHandler handler,
+            final AtomicReference<String> status)
+            throws SAXException {
+
         if (conf instanceof GroupReportletConf) {
             this.conf = GroupReportletConf.class.cast(conf);
         } else {
@@ -297,7 +303,14 @@ public class GroupReportlet extends AbstractReportlet {
 
         doExtractConf(handler);
 
-        for (int page = 1; page <= (count() / AnyDAO.DEFAULT_PAGE_SIZE) + 1; page++) {
+        int total = count();
+        int pages = (total / AnyDAO.DEFAULT_PAGE_SIZE) + 1;
+
+        status.set("Processing " + total + " groups in " + pages + " pages");
+
+        for (int page = 1; page <= pages; page++) {
+            status.set("Processing " + total + " groups: page " + page + " of " + pages);
+
             List<Group> groups;
             if (StringUtils.isBlank(this.conf.getMatchingCond())) {
                 groups = groupDAO.findAll(page, AnyDAO.DEFAULT_PAGE_SIZE);
@@ -312,6 +325,8 @@ public class GroupReportlet extends AbstractReportlet {
             }
 
             doExtract(handler, groups);
+
+            status.set("Processed " + total + " groups: page " + page + " of " + pages);
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReconciliationReportlet.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReconciliationReportlet.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReconciliationReportlet.java
index 0328e62..aac1888 100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReconciliationReportlet.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReconciliationReportlet.java
@@ -25,6 +25,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Objects;
 import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
 import org.apache.commons.collections4.Closure;
 import org.apache.commons.collections4.CollectionUtils;
 import org.apache.commons.collections4.IterableUtils;
@@ -50,7 +51,6 @@ import org.apache.syncope.core.persistence.api.entity.Any;
 import org.apache.syncope.core.persistence.api.entity.AnyType;
 import org.apache.syncope.core.persistence.api.entity.AnyUtils;
 import org.apache.syncope.core.persistence.api.entity.AnyUtilsFactory;
-import org.apache.syncope.core.persistence.api.entity.anyobject.AnyObject;
 import org.apache.syncope.core.persistence.api.entity.group.Group;
 import org.apache.syncope.core.persistence.api.entity.resource.ExternalResource;
 import org.apache.syncope.core.persistence.api.entity.resource.MappingItem;
@@ -373,25 +373,13 @@ public class ReconciliationReportlet extends AbstractReportlet {
         }
     }
 
-    private void doExtract(
-            final ContentHandler handler, final int count, final SearchCond cond, final AnyTypeKind anyTypeKind)
+    @Override
+    protected void doExtract(
+            final ReportletConf conf,
+            final ContentHandler handler,
+            final AtomicReference<String> status)
             throws SAXException {
 
-        for (int page = 1; page <= (count / PAGE_SIZE) + 1; page++) {
-            List<AnyObject> anys = searchDAO.search(
-                    SyncopeConstants.FULL_ADMIN_REALMS,
-                    cond,
-                    page,
-                    PAGE_SIZE,
-                    Collections.<OrderByClause>emptyList(),
-                    anyTypeKind);
-
-            doExtract(handler, anys);
-        }
-    }
-
-    @Override
-    protected void doExtract(final ReportletConf conf, final ContentHandler handler) throws SAXException {
         if (conf instanceof ReconciliationReportletConf) {
             this.conf = ReconciliationReportletConf.class.cast(conf);
         } else {
@@ -401,39 +389,81 @@ public class ReconciliationReportlet extends AbstractReportlet {
         AttributesImpl atts = new AttributesImpl();
 
         if (StringUtils.isBlank(this.conf.getUserMatchingCond())) {
-            atts.addAttribute("", "", "total", ReportXMLConst.XSD_INT, String.valueOf(userDAO.count()));
+            int total = userDAO.count();
+            int pages = (total / AnyDAO.DEFAULT_PAGE_SIZE) + 1;
+
+            status.set("Processing " + total + " users in " + pages + " pages");
+
+            atts.addAttribute("", "", "total", ReportXMLConst.XSD_INT, String.valueOf(total));
             handler.startElement("", "", getAnyElementName(AnyTypeKind.USER) + "s", atts);
 
-            for (int page = 1; page <= (userDAO.count() / AnyDAO.DEFAULT_PAGE_SIZE) + 1; page++) {
+            for (int page = 1; page <= pages; page++) {
+                status.set("Processing " + total + " users: page " + page + " of " + pages);
+
                 doExtract(handler, userDAO.findAll(page, AnyDAO.DEFAULT_PAGE_SIZE));
             }
         } else {
             SearchCond cond = SearchCondConverter.convert(this.conf.getUserMatchingCond());
 
-            int count = searchDAO.count(SyncopeConstants.FULL_ADMIN_REALMS, cond, AnyTypeKind.USER);
-            atts.addAttribute("", "", "total", ReportXMLConst.XSD_INT, String.valueOf(count));
+            int total = searchDAO.count(SyncopeConstants.FULL_ADMIN_REALMS, cond, AnyTypeKind.USER);
+            int pages = (total / AnyDAO.DEFAULT_PAGE_SIZE) + 1;
+
+            status.set("Processing " + total + " users in " + pages + " pages");
+
+            atts.addAttribute("", "", "total", ReportXMLConst.XSD_INT, String.valueOf(total));
             handler.startElement("", "", getAnyElementName(AnyTypeKind.USER) + "s", atts);
 
-            doExtract(handler, count, cond, AnyTypeKind.USER);
+            for (int page = 1; page <= pages; page++) {
+                status.set("Processing " + total + " users: page " + page + " of " + pages);
+
+                doExtract(handler, searchDAO.search(
+                        SyncopeConstants.FULL_ADMIN_REALMS,
+                        cond,
+                        page,
+                        PAGE_SIZE,
+                        Collections.<OrderByClause>emptyList(),
+                        AnyTypeKind.USER));
+            }
         }
         handler.endElement("", "", getAnyElementName(AnyTypeKind.USER) + "s");
 
         atts.clear();
         if (StringUtils.isBlank(this.conf.getGroupMatchingCond())) {
-            atts.addAttribute("", "", "total", ReportXMLConst.XSD_INT, String.valueOf(groupDAO.count()));
+            int total = groupDAO.count();
+            int pages = (total / AnyDAO.DEFAULT_PAGE_SIZE) + 1;
+
+            status.set("Processing " + total + " groups in " + pages + " pages");
+
+            atts.addAttribute("", "", "total", ReportXMLConst.XSD_INT, String.valueOf(total));
             handler.startElement("", "", getAnyElementName(AnyTypeKind.GROUP) + "s", atts);
 
-            for (int page = 1; page <= (groupDAO.count() / AnyDAO.DEFAULT_PAGE_SIZE) + 1; page++) {
+            for (int page = 1; page <= pages; page++) {
+                status.set("Processing " + total + " groups: page " + page + " of " + pages);
+
                 doExtract(handler, groupDAO.findAll(page, AnyDAO.DEFAULT_PAGE_SIZE));
             }
         } else {
             SearchCond cond = SearchCondConverter.convert(this.conf.getUserMatchingCond());
 
-            int count = searchDAO.count(SyncopeConstants.FULL_ADMIN_REALMS, cond, AnyTypeKind.GROUP);
-            atts.addAttribute("", "", "total", ReportXMLConst.XSD_INT, String.valueOf(count));
+            int total = searchDAO.count(SyncopeConstants.FULL_ADMIN_REALMS, cond, AnyTypeKind.GROUP);
+            int pages = (total / AnyDAO.DEFAULT_PAGE_SIZE) + 1;
+
+            status.set("Processing " + total + " groups in " + pages + " pages");
+
+            atts.addAttribute("", "", "total", ReportXMLConst.XSD_INT, String.valueOf(total));
             handler.startElement("", "", getAnyElementName(AnyTypeKind.GROUP) + "s", atts);
 
-            doExtract(handler, count, cond, AnyTypeKind.GROUP);
+            for (int page = 1; page <= pages; page++) {
+                status.set("Processing " + total + " groups: page " + page + " of " + pages);
+
+                doExtract(handler, searchDAO.search(
+                        SyncopeConstants.FULL_ADMIN_REALMS,
+                        cond,
+                        page,
+                        PAGE_SIZE,
+                        Collections.<OrderByClause>emptyList(),
+                        AnyTypeKind.GROUP));
+            }
         }
         handler.endElement("", "", getAnyElementName(AnyTypeKind.GROUP) + "s");
 
@@ -447,14 +477,28 @@ public class ReconciliationReportlet extends AbstractReportlet {
                                 SearchCond.getLeafCond(anyTypeCond),
                                 SearchCondConverter.convert(this.conf.getAnyObjectMatchingCond()));
 
-                int count = searchDAO.count(SyncopeConstants.FULL_ADMIN_REALMS, cond, AnyTypeKind.ANY_OBJECT);
+                int total = searchDAO.count(SyncopeConstants.FULL_ADMIN_REALMS, cond, AnyTypeKind.ANY_OBJECT);
+                int pages = (total / AnyDAO.DEFAULT_PAGE_SIZE) + 1;
+
+                status.set("Processing " + total + " any objects " + anyType.getKey() + " in " + pages + " pages");
 
                 atts.clear();
                 atts.addAttribute("", "", "type", ReportXMLConst.XSD_STRING, anyType.getKey());
-                atts.addAttribute("", "", "total", ReportXMLConst.XSD_INT, String.valueOf(count));
+                atts.addAttribute("", "", "total", ReportXMLConst.XSD_INT, String.valueOf(total));
                 handler.startElement("", "", getAnyElementName(AnyTypeKind.ANY_OBJECT) + "s", atts);
 
-                doExtract(handler, count, cond, AnyTypeKind.ANY_OBJECT);
+                for (int page = 1; page <= pages; page++) {
+                    status.set("Processing " + total + " any objects " + anyType.getKey()
+                            + ": page " + page + " of " + pages);
+
+                    doExtract(handler, searchDAO.search(
+                            SyncopeConstants.FULL_ADMIN_REALMS,
+                            cond,
+                            page,
+                            PAGE_SIZE,
+                            Collections.<OrderByClause>emptyList(),
+                            AnyTypeKind.ANY_OBJECT));
+                }
 
                 handler.endElement("", "", getAnyElementName(AnyTypeKind.ANY_OBJECT) + "s");
             }

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReportJob.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReportJob.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReportJob.java
index 78183d5..5ef6785 100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReportJob.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReportJob.java
@@ -18,12 +18,14 @@
  */
 package org.apache.syncope.core.provisioning.java.job.report;
 
+import org.apache.syncope.core.provisioning.api.job.JobDelegate;
 import org.apache.syncope.core.spring.security.AuthContextUtils;
 import org.apache.syncope.core.provisioning.java.job.AbstractInterruptableJob;
 import org.quartz.JobExecutionContext;
 import org.quartz.JobExecutionException;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.apache.syncope.core.provisioning.api.job.JobManager;
+import org.apache.syncope.core.provisioning.api.job.report.ReportJobDelegate;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -52,6 +54,11 @@ public class ReportJob extends AbstractInterruptableJob {
     }
 
     @Override
+    public JobDelegate getDelegate() {
+        return delegate;
+    }
+
+    @Override
     public void execute(final JobExecutionContext context) throws JobExecutionException {
         super.execute(context);
 

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReportJobDelegate.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReportJobDelegate.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReportJobDelegate.java
deleted file mode 100644
index 40d9250..0000000
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/ReportJobDelegate.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.syncope.core.provisioning.java.job.report;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.util.Date;
-import java.util.zip.Deflater;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipOutputStream;
-import javax.xml.transform.OutputKeys;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.sax.SAXTransformerFactory;
-import javax.xml.transform.sax.TransformerHandler;
-import javax.xml.transform.stream.StreamResult;
-import org.apache.syncope.common.lib.report.ReportletConf;
-import org.apache.syncope.common.lib.types.ReportExecStatus;
-import org.apache.syncope.core.provisioning.api.utils.ExceptionUtils2;
-import org.apache.syncope.core.spring.ApplicationContextProvider;
-import org.apache.syncope.core.persistence.api.ImplementationLookup;
-import org.apache.syncope.core.persistence.api.dao.ReportDAO;
-import org.apache.syncope.core.persistence.api.dao.ReportExecDAO;
-import org.apache.syncope.core.persistence.api.dao.Reportlet;
-import org.apache.syncope.core.persistence.api.entity.EntityFactory;
-import org.apache.syncope.core.persistence.api.entity.Report;
-import org.apache.syncope.core.persistence.api.entity.ReportExec;
-import org.quartz.JobExecutionException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.support.AbstractBeanDefinition;
-import org.springframework.stereotype.Component;
-import org.springframework.transaction.annotation.Transactional;
-import org.xml.sax.helpers.AttributesImpl;
-
-@Component
-public class ReportJobDelegate {
-
-    private static final Logger LOG = LoggerFactory.getLogger(ReportJobDelegate.class);
-
-    /**
-     * Report DAO.
-     */
-    @Autowired
-    private ReportDAO reportDAO;
-
-    /**
-     * Report execution DAO.
-     */
-    @Autowired
-    private ReportExecDAO reportExecDAO;
-
-    @Autowired
-    private EntityFactory entityFactory;
-
-    @Autowired
-    private ImplementationLookup implementationLookup;
-
-    @Transactional
-    public void execute(final String reportKey) throws JobExecutionException {
-        Report report = reportDAO.find(reportKey);
-        if (report == null) {
-            throw new JobExecutionException("Report " + reportKey + " not found");
-        }
-
-        if (!report.isActive()) {
-            LOG.info("Report {} not active, aborting...", reportKey);
-            return;
-        }
-
-        // 1. create execution
-        ReportExec execution = entityFactory.newEntity(ReportExec.class);
-        execution.setStatus(ReportExecStatus.STARTED);
-        execution.setStart(new Date());
-        execution.setReport(report);
-        execution = reportExecDAO.save(execution);
-
-        report.add(execution);
-        report = reportDAO.save(report);
-
-        // 2. define a SAX handler for generating result as XML
-        TransformerHandler handler;
-
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        ZipOutputStream zos = new ZipOutputStream(baos);
-        zos.setLevel(Deflater.BEST_COMPRESSION);
-        try {
-            SAXTransformerFactory tFactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
-            tFactory.setFeature(javax.xml.XMLConstants.FEATURE_SECURE_PROCESSING, true);
-            handler = tFactory.newTransformerHandler();
-            Transformer serializer = handler.getTransformer();
-            serializer.setOutputProperty(OutputKeys.ENCODING, StandardCharsets.UTF_8.name());
-            serializer.setOutputProperty(OutputKeys.INDENT, "yes");
-
-            // a single ZipEntry in the ZipOutputStream
-            zos.putNextEntry(new ZipEntry(report.getName()));
-
-            // streaming SAX handler in a compressed byte array stream
-            handler.setResult(new StreamResult(zos));
-        } catch (Exception e) {
-            throw new JobExecutionException("While configuring for SAX generation", e, true);
-        }
-
-        execution.setStatus(ReportExecStatus.RUNNING);
-        execution = reportExecDAO.save(execution);
-
-        // 3. actual report execution
-        StringBuilder reportExecutionMessage = new StringBuilder();
-        try {
-            // report header
-            handler.startDocument();
-            AttributesImpl atts = new AttributesImpl();
-            atts.addAttribute("", "", ReportXMLConst.ATTR_NAME, ReportXMLConst.XSD_STRING, report.getName());
-            handler.startElement("", "", ReportXMLConst.ELEMENT_REPORT, atts);
-
-            // iterate over reportlet instances defined for this report
-            for (ReportletConf reportletConf : report.getReportletConfs()) {
-                Class<? extends Reportlet> reportletClass =
-                        implementationLookup.getReportletClass(reportletConf.getClass());
-                if (reportletClass == null) {
-                    LOG.warn("Could not find matching reportlet for {}", reportletConf.getClass());
-                } else {
-                    // fetch (or create) reportlet
-                    Reportlet reportlet;
-                    if (ApplicationContextProvider.getBeanFactory().containsSingleton(reportletClass.getName())) {
-                        reportlet = (Reportlet) ApplicationContextProvider.getBeanFactory().
-                                getSingleton(reportletClass.getName());
-                    } else {
-                        reportlet = (Reportlet) ApplicationContextProvider.getBeanFactory().
-                                createBean(reportletClass, AbstractBeanDefinition.AUTOWIRE_BY_TYPE, false);
-                        ApplicationContextProvider.getBeanFactory().
-                                registerSingleton(reportletClass.getName(), reportlet);
-                    }
-
-                    // invoke reportlet
-                    try {
-                        reportlet.extract(reportletConf, handler);
-                    } catch (Throwable t) {
-                        LOG.error("While executing reportlet {} for report {}", reportlet, reportKey, t);
-
-                        execution.setStatus(ReportExecStatus.FAILURE);
-
-                        Throwable effective = t instanceof ReportException
-                                ? t.getCause()
-                                : t;
-                        reportExecutionMessage.
-                                append(ExceptionUtils2.getFullStackTrace(effective)).
-                                append("\n==================\n");
-                    }
-                }
-            }
-
-            // report footer
-            handler.endElement("", "", ReportXMLConst.ELEMENT_REPORT);
-            handler.endDocument();
-
-            if (!ReportExecStatus.FAILURE.name().equals(execution.getStatus())) {
-                execution.setStatus(ReportExecStatus.SUCCESS);
-            }
-        } catch (Exception e) {
-            execution.setStatus(ReportExecStatus.FAILURE);
-            reportExecutionMessage.append(ExceptionUtils2.getFullStackTrace(e));
-
-            throw new JobExecutionException(e, true);
-        } finally {
-            try {
-                zos.closeEntry();
-                zos.close();
-                baos.close();
-            } catch (IOException e) {
-                LOG.error("While closing StreamResult's backend", e);
-            }
-
-            execution.setExecResult(baos.toByteArray());
-            execution.setMessage(reportExecutionMessage.toString());
-            execution.setEnd(new Date());
-            reportExecDAO.save(execution);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/StaticReportlet.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/StaticReportlet.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/StaticReportlet.java
index 1156d25..49e5e2a 100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/StaticReportlet.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/StaticReportlet.java
@@ -18,6 +18,7 @@
  */
 package org.apache.syncope.core.provisioning.java.job.report;
 
+import java.util.concurrent.atomic.AtomicReference;
 import org.apache.syncope.core.persistence.api.dao.ReportletConfClass;
 import org.apache.syncope.common.lib.report.ReportletConf;
 import org.apache.syncope.common.lib.report.StaticReportletConf;
@@ -70,7 +71,12 @@ public class StaticReportlet extends AbstractReportlet {
     }
 
     @Override
-    protected void doExtract(final ReportletConf conf, final ContentHandler handler) throws SAXException {
+    protected void doExtract(
+            final ReportletConf conf,
+            final ContentHandler handler,
+            final AtomicReference<String> status)
+            throws SAXException {
+
         if (conf instanceof StaticReportletConf) {
             this.conf = StaticReportletConf.class.cast(conf);
         } else {

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/UserReportlet.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/UserReportlet.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/UserReportlet.java
index 5ab0e78..0c13b27 100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/UserReportlet.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/report/UserReportlet.java
@@ -22,6 +22,7 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.atomic.AtomicReference;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.syncope.common.lib.EntityTOUtils;
 import org.apache.syncope.common.lib.SyncopeConstants;
@@ -355,7 +356,12 @@ public class UserReportlet extends AbstractReportlet {
     }
 
     @Override
-    protected void doExtract(final ReportletConf conf, final ContentHandler handler) throws SAXException {
+    protected void doExtract(
+            final ReportletConf conf,
+            final ContentHandler handler,
+            final AtomicReference<String> status)
+            throws SAXException {
+
         if (conf instanceof UserReportletConf) {
             this.conf = UserReportletConf.class.cast(conf);
         } else {
@@ -364,7 +370,14 @@ public class UserReportlet extends AbstractReportlet {
 
         doExtractConf(handler);
 
-        for (int page = 1; page <= (count() / AnyDAO.DEFAULT_PAGE_SIZE) + 1; page++) {
+        int total = count();
+        int pages = (total / AnyDAO.DEFAULT_PAGE_SIZE) + 1;
+
+        status.set("Processing " + total + " users in " + pages + " pages");
+
+        for (int page = 1; page <= pages; page++) {
+            status.set("Processing " + total + " users: page " + page + " of " + pages);
+
             List<User> users;
             if (StringUtils.isBlank(this.conf.getMatchingCond())) {
                 users = userDAO.findAll(page, AnyDAO.DEFAULT_PAGE_SIZE);

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/propagation/AbstractPropagationTaskExecutor.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/propagation/AbstractPropagationTaskExecutor.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/propagation/AbstractPropagationTaskExecutor.java
index 66e864f..3357335 100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/propagation/AbstractPropagationTaskExecutor.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/propagation/AbstractPropagationTaskExecutor.java
@@ -28,6 +28,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
 import org.apache.commons.collections4.IteratorUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.syncope.common.lib.to.ExecTO;
@@ -209,7 +210,7 @@ public abstract class AbstractPropagationTaskExecutor implements PropagationTask
             final PropagationTask task,
             final ConnectorObject beforeObj,
             final Connector connector,
-            final Boolean[] propagationAttempted) {
+            final AtomicReference<Boolean> propagationAttempted) {
 
         // set of attributes to be propagated
         Set<Attribute> attributes = new HashSet<>(task.getAttributes());
@@ -299,7 +300,7 @@ public abstract class AbstractPropagationTaskExecutor implements PropagationTask
             final PropagationTask task,
             final ConnectorObject beforeObj,
             final Connector connector,
-            final Boolean[] propagationAttempted) {
+            final AtomicReference<Boolean> propagationAttempted) {
 
         Uid result;
         if (beforeObj == null) {
@@ -392,7 +393,7 @@ public abstract class AbstractPropagationTaskExecutor implements PropagationTask
         String failureReason = null;
 
         // Flag to state whether any propagation has been attempted
-        Boolean[] propagationAttempted = new Boolean[] { false };
+        AtomicReference<Boolean> propagationAttempted = new AtomicReference<>(false);
 
         ConnectorObject beforeObj = null;
         ConnectorObject afterObj = null;
@@ -431,7 +432,7 @@ public abstract class AbstractPropagationTaskExecutor implements PropagationTask
                 default:
             }
 
-            execution.setStatus(propagationAttempted[0]
+            execution.setStatus(propagationAttempted.get()
                     ? PropagationTaskExecStatus.SUCCESS.name()
                     : PropagationTaskExecStatus.NOT_ATTEMPTED.name());
 
@@ -463,7 +464,7 @@ public abstract class AbstractPropagationTaskExecutor implements PropagationTask
                 LOG.error("While executing KO action on {}", execution, wft);
             }
 
-            propagationAttempted[0] = true;
+            propagationAttempted.set(true);
 
             for (PropagationActions action : actions) {
                 action.onError(task, execution, e);

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/AbstractPullResultHandler.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/AbstractPullResultHandler.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/AbstractPullResultHandler.java
index d1853c2..ef32fc9 100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/AbstractPullResultHandler.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/AbstractPullResultHandler.java
@@ -130,6 +130,7 @@ public abstract class AbstractPullResultHandler extends AbstractSyncopeResultHan
             }
 
             doHandle(delta, provision);
+            executor.reportHandled(delta.getObjectClass(), delta.getObject().getName());
 
             LOG.debug("Successfully handled {}", delta);
 
@@ -159,6 +160,7 @@ public abstract class AbstractPullResultHandler extends AbstractSyncopeResultHan
             LOG.warn("Ignoring during pull", e);
 
             executor.setLatestSyncToken(delta.getObjectClass(), delta.getToken());
+            executor.reportHandled(delta.getObjectClass(), delta.getObject().getName());
 
             return true;
         } catch (JobExecutionException e) {

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/DefaultRealmPullResultHandler.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/DefaultRealmPullResultHandler.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/DefaultRealmPullResultHandler.java
index c274dc6..6ff95fc 100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/DefaultRealmPullResultHandler.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/DefaultRealmPullResultHandler.java
@@ -89,6 +89,7 @@ public class DefaultRealmPullResultHandler
             }
 
             doHandle(delta, orgUnit);
+            executor.reportHandled(delta.getObjectClass(), delta.getObject().getName());
 
             LOG.debug("Successfully handled {}", delta);
 
@@ -117,6 +118,7 @@ public class DefaultRealmPullResultHandler
             LOG.warn("Ignoring during pull", e);
 
             executor.setLatestSyncToken(delta.getObjectClass(), delta.getToken());
+            executor.reportHandled(delta.getObjectClass(), delta.getObject().getName());
 
             return true;
         } catch (JobExecutionException e) {

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/PullJobDelegate.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/PullJobDelegate.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/PullJobDelegate.java
index b8c1fa2..dfb526e 100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/PullJobDelegate.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/PullJobDelegate.java
@@ -27,6 +27,7 @@ import java.util.Map;
 import java.util.Set;
 import org.apache.commons.collections4.IteratorUtils;
 import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang3.tuple.MutablePair;
 import org.apache.syncope.common.lib.policy.PullPolicySpec;
 import org.apache.syncope.core.spring.ApplicationContextProvider;
 import org.apache.syncope.core.persistence.api.dao.GroupDAO;
@@ -54,6 +55,7 @@ import org.apache.syncope.core.provisioning.api.pushpull.SyncopePullExecutor;
 import org.apache.syncope.core.provisioning.api.pushpull.SyncopePullResultHandler;
 import org.apache.syncope.core.provisioning.api.pushpull.UserPullResultHandler;
 import org.apache.syncope.core.provisioning.java.utils.MappingUtils;
+import org.identityconnectors.framework.common.objects.Name;
 import org.identityconnectors.framework.common.objects.ObjectClass;
 import org.identityconnectors.framework.common.objects.OperationOptions;
 import org.identityconnectors.framework.common.objects.SyncToken;
@@ -74,6 +76,8 @@ public class PullJobDelegate extends AbstractProvisioningJobDelegate<PullTask> i
 
     protected final Map<ObjectClass, SyncToken> latestSyncTokens = new HashMap<>();
 
+    protected final Map<ObjectClass, MutablePair<Integer, String>> handled = new HashMap<>();
+
     protected ProvisioningProfile<PullTask, PullActions> profile;
 
     protected RealmPullResultHandler rhandler;
@@ -89,6 +93,34 @@ public class PullJobDelegate extends AbstractProvisioningJobDelegate<PullTask> i
         latestSyncTokens.put(objectClass, latestSyncToken);
     }
 
+    @Override
+    public void reportHandled(final ObjectClass objectClass, final Name name) {
+        MutablePair<Integer, String> pair = handled.get(objectClass);
+        if (pair == null) {
+            pair = MutablePair.of(0, null);
+            handled.put(objectClass, pair);
+        }
+        pair.setLeft(pair.getLeft() + 1);
+        pair.setRight(name.getNameValue());
+    }
+
+    @Override
+    public String currentStatus() {
+        synchronized (status) {
+            if (!handled.isEmpty()) {
+                StringBuilder builder = new StringBuilder("Processed:\n");
+                for (Map.Entry<ObjectClass, MutablePair<Integer, String>> entry : handled.entrySet()) {
+                    builder.append(' ').append(entry.getValue().getLeft()).append('\t').
+                            append(entry.getKey().getObjectClassValue()).
+                            append("\t/ latest: ").append(entry.getValue().getRight()).
+                            append('\n');
+                }
+                status.set(builder.toString());
+            }
+        }
+        return status.get();
+    }
+
     protected void setGroupOwners(final GroupPullResultHandler ghandler) {
         for (Map.Entry<String, String> entry : ghandler.getGroupOwnerMap().entrySet()) {
             Group group = groupDAO.find(entry.getKey());
@@ -195,8 +227,12 @@ public class PullJobDelegate extends AbstractProvisioningJobDelegate<PullTask> i
             }
         }
 
+        status.set("Initialization completed");
+
         // First realms...
         if (pullTask.getResource().getOrgUnit() != null) {
+            status.set("Pulling " + pullTask.getResource().getOrgUnit().getObjectClass().getObjectClassValue());
+
             OrgUnit orgUnit = pullTask.getResource().getOrgUnit();
             OperationOptions options = MappingUtils.buildOperationOptions(
                     MappingUtils.getPullItems(orgUnit.getItems()).iterator());
@@ -252,6 +288,8 @@ public class PullJobDelegate extends AbstractProvisioningJobDelegate<PullTask> i
 
         for (Provision provision : pullTask.getResource().getProvisions()) {
             if (provision.getMapping() != null) {
+                status.set("Pulling " + provision.getObjectClass().getObjectClassValue());
+
                 SyncopePullResultHandler handler;
                 switch (provision.getAnyType().getKind()) {
                     case USER:
@@ -331,6 +369,8 @@ public class PullJobDelegate extends AbstractProvisioningJobDelegate<PullTask> i
             }
         }
 
+        status.set("Pull done");
+
         String result = createReport(profile.getResults(), pullTask.getResource(), dryRun);
         LOG.debug("Pull result: {}", result);
         return result;

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/PushJobDelegate.java
----------------------------------------------------------------------
diff --git a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/PushJobDelegate.java b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/PushJobDelegate.java
index 5c8e788..abbb765 100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/PushJobDelegate.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/pushpull/PushJobDelegate.java
@@ -20,8 +20,12 @@ package org.apache.syncope.core.provisioning.java.pushpull;
 
 import java.util.ArrayList;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang3.tuple.MutablePair;
+import org.apache.syncope.common.lib.SyncopeConstants;
 import org.apache.syncope.common.lib.types.AnyTypeKind;
 import org.apache.syncope.core.persistence.api.search.SearchCondConverter;
 import org.apache.syncope.core.spring.ApplicationContextProvider;
@@ -35,9 +39,12 @@ import org.apache.syncope.core.persistence.api.dao.search.OrderByClause;
 import org.apache.syncope.core.persistence.api.dao.search.SearchCond;
 import org.apache.syncope.core.persistence.api.entity.Any;
 import org.apache.syncope.core.persistence.api.entity.Realm;
+import org.apache.syncope.core.persistence.api.entity.anyobject.AnyObject;
+import org.apache.syncope.core.persistence.api.entity.group.Group;
 import org.apache.syncope.core.persistence.api.entity.resource.ExternalResource;
 import org.apache.syncope.core.persistence.api.entity.resource.Provision;
 import org.apache.syncope.core.persistence.api.entity.task.PushTask;
+import org.apache.syncope.core.persistence.api.entity.user.User;
 import org.apache.syncope.core.provisioning.api.Connector;
 import org.apache.syncope.core.provisioning.api.pushpull.AnyObjectPushResultHandler;
 import org.apache.syncope.core.provisioning.api.pushpull.GroupPushResultHandler;
@@ -78,6 +85,8 @@ public class PushJobDelegate extends AbstractProvisioningJobDelegate<PushTask> {
 
     protected ProvisioningProfile<PushTask, PushActions> profile;
 
+    protected final Map<String, MutablePair<Integer, String>> handled = new HashMap<>();
+
     protected RealmPushResultHandler rhandler;
 
     protected AnyObjectPushResultHandler ahandler;
@@ -86,6 +95,33 @@ public class PushJobDelegate extends AbstractProvisioningJobDelegate<PushTask> {
 
     protected GroupPushResultHandler ghandler;
 
+    protected void reportHandled(final String anyType, final String key) {
+        MutablePair<Integer, String> pair = handled.get(anyType);
+        if (pair == null) {
+            pair = MutablePair.of(0, null);
+            handled.put(anyType, pair);
+        }
+        pair.setLeft(pair.getLeft() + 1);
+        pair.setRight(key);
+    }
+
+    @Override
+    public String currentStatus() {
+        synchronized (status) {
+            if (!handled.isEmpty()) {
+                StringBuilder builder = new StringBuilder("Processed:\n");
+                for (Map.Entry<String, MutablePair<Integer, String>> entry : handled.entrySet()) {
+                    builder.append(' ').append(entry.getValue().getLeft()).append('\t').
+                            append(entry.getKey()).
+                            append("\t/ latest: ").append(entry.getValue().getRight()).
+                            append('\n');
+                }
+                status.set(builder.toString());
+            }
+        }
+        return status.get();
+    }
+
     protected AnyDAO<?> getAnyDAO(final AnyTypeKind anyTypeKind) {
         AnyDAO<?> result;
         switch (anyTypeKind) {
@@ -114,6 +150,13 @@ public class PushJobDelegate extends AbstractProvisioningJobDelegate<PushTask> {
         for (Any<?> any : anys) {
             try {
                 handler.handle(any.getKey());
+                reportHandled(
+                        any.getType().getKey(),
+                        (any instanceof User
+                                ? ((User) any).getUsername()
+                                : any instanceof Group
+                                        ? ((Group) any).getName()
+                                        : ((AnyObject) any).getName()));
             } catch (Exception e) {
                 LOG.warn("Failure pushing '{}' on '{}'", any, resource, e);
                 throw new JobExecutionException("While pushing " + any + " on " + resource, e);
@@ -185,8 +228,12 @@ public class PushJobDelegate extends AbstractProvisioningJobDelegate<PushTask> {
             }
         }
 
+        status.set("Initialization completed");
+
         // First realms...
         if (pushTask.getResource().getOrgUnit() != null) {
+            status.set("Pushing realms");
+
             rhandler = buildRealmHandler();
 
             for (Realm realm : realmDAO.findDescendants(profile.getTask().getSourceRealm())) {
@@ -194,6 +241,7 @@ public class PushJobDelegate extends AbstractProvisioningJobDelegate<PushTask> {
                 if (realm.getParent() != null) {
                     try {
                         rhandler.handle(realm.getKey());
+                        reportHandled(SyncopeConstants.REALM_ANYTYPE, realm.getName());
                     } catch (Exception e) {
                         LOG.warn("Failure pushing '{}' on '{}'", realm, pushTask.getResource(), e);
                         throw new JobExecutionException("While pushing " + realm + " on " + pushTask.getResource(), e);
@@ -209,6 +257,8 @@ public class PushJobDelegate extends AbstractProvisioningJobDelegate<PushTask> {
 
         for (Provision provision : pushTask.getResource().getProvisions()) {
             if (provision.getMapping() != null) {
+                status.set("Pushing " + provision.getAnyType().getKey());
+
                 AnyDAO<?> anyDAO = getAnyDAO(provision.getAnyType().getKind());
 
                 SyncopePushResultHandler handler;
@@ -255,6 +305,8 @@ public class PushJobDelegate extends AbstractProvisioningJobDelegate<PushTask> {
             }
         }
 
+        status.set("Push done");
+
         String result = createReport(profile.getResults(), pushTask.getResource(), dryRun);
         LOG.debug("Push result: {}", result);
         return result;

http://git-wip-us.apache.org/repos/asf/syncope/blob/799f079f/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/AbstractExecutableService.java
----------------------------------------------------------------------
diff --git a/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/AbstractExecutableService.java b/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/AbstractExecutableService.java
index aadbaf3..81ef47c 100644
--- a/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/AbstractExecutableService.java
+++ b/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/AbstractExecutableService.java
@@ -71,6 +71,11 @@ public abstract class AbstractExecutableService extends AbstractServiceImpl impl
     }
 
     @Override
+    public JobTO getJob(final String key) {
+        return getExecutableLogic().getJob(key);
+    }
+
+    @Override
     public List<JobTO> listJobs() {
         return getExecutableLogic().listJobs();
     }