You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@syncope.apache.org by gi...@apache.org on 2015/05/22 09:47:42 UTC

[1/4] syncope git commit: [SYNCOPE-660] Asynchronous jobs delegated to Quartz, like Task and Report, now can be checked and interrupted during execution

Repository: syncope
Updated Branches:
  refs/heads/master d19133ef5 -> d489e8c59


[SYNCOPE-660] Asynchronous jobs delegated to Quartz, like Task and Report, now can be checked and interrupted during execution


Project: http://git-wip-us.apache.org/repos/asf/syncope/repo
Commit: http://git-wip-us.apache.org/repos/asf/syncope/commit/358aef72
Tree: http://git-wip-us.apache.org/repos/asf/syncope/tree/358aef72
Diff: http://git-wip-us.apache.org/repos/asf/syncope/diff/358aef72

Branch: refs/heads/master
Commit: 358aef72dca5672361cf1a9a3ac2b8c0c57517a3
Parents: 1d6451b
Author: giacomolm <gi...@hotmail.it>
Authored: Tue Apr 14 16:33:45 2015 +0200
Committer: giacomolm <gi...@hotmail.it>
Committed: Thu May 21 17:00:47 2015 +0200

----------------------------------------------------------------------
 .../syncope/common/services/ReportService.java  |  24 +++
 .../syncope/common/services/TaskService.java    |  23 +++
 .../apache/syncope/common/types/JobAction.java  |  29 ++++
 .../syncope/common/types/JobStatusType.java     |  30 ++++
 core/pom.xml                                    |   1 +
 .../syncope/core/quartz/AbstractTaskJob.java    |  25 ++-
 .../org/apache/syncope/core/quartz/TaskJob.java |   4 +-
 .../rest/controller/AbstractJobController.java  | 155 +++++++++++++++++++
 .../core/rest/controller/ReportController.java  |  30 +++-
 .../core/rest/controller/TaskController.java    |  26 +++-
 .../core/services/ReportServiceImpl.java        |  42 +++--
 .../syncope/core/services/TaskServiceImpl.java  |  12 ++
 .../syncope/core/quartz/TestSampleJob.java      |  63 ++++++++
 .../syncope/core/rest/TaskTestITCase.java       |  60 +++++++
 14 files changed, 500 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/common/src/main/java/org/apache/syncope/common/services/ReportService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/syncope/common/services/ReportService.java b/common/src/main/java/org/apache/syncope/common/services/ReportService.java
index 4bf9c27..47a75ac 100644
--- a/common/src/main/java/org/apache/syncope/common/services/ReportService.java
+++ b/common/src/main/java/org/apache/syncope/common/services/ReportService.java
@@ -25,6 +25,7 @@ import javax.ws.rs.Consumes;
 import javax.ws.rs.DELETE;
 import javax.ws.rs.DefaultValue;
 import javax.ws.rs.GET;
+import javax.ws.rs.MatrixParam;
 import javax.ws.rs.POST;
 import javax.ws.rs.PUT;
 import javax.ws.rs.Path;
@@ -39,6 +40,8 @@ import org.apache.cxf.jaxrs.model.wadl.DocTarget;
 import org.apache.syncope.common.reqres.PagedResult;
 import org.apache.syncope.common.to.ReportExecTO;
 import org.apache.syncope.common.to.ReportTO;
+import org.apache.syncope.common.types.JobAction;
+import org.apache.syncope.common.types.JobStatusType;
 import org.apache.syncope.common.types.ReportExecExportFormat;
 import org.apache.syncope.common.wrap.ReportletConfClass;
 
@@ -192,4 +195,25 @@ public interface ReportService extends JAXRSService {
     @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
     Response exportExecutionResult(@NotNull @PathParam("executionId") Long executionId,
             @QueryParam("format") ReportExecExportFormat fmt);
+
+    /**
+     * List report jobs of the given type
+     *
+     * @param type of report job
+     * @return List of ReportExecTO
+     */
+    @GET
+    @Path("jobs")
+    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
+    List<ReportExecTO> list(@MatrixParam("type") JobStatusType type);
+
+    /**
+     * Execute a control action on an existing report
+     *
+     * @param action
+     * @param reportId id of report
+     */
+    @POST
+    @Path("{reportId}")
+    void process(@QueryParam("action") JobAction action, @PathParam("reportId") Long reportId);
 }

http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/common/src/main/java/org/apache/syncope/common/services/TaskService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/syncope/common/services/TaskService.java b/common/src/main/java/org/apache/syncope/common/services/TaskService.java
index 8efba34..70929db 100644
--- a/common/src/main/java/org/apache/syncope/common/services/TaskService.java
+++ b/common/src/main/java/org/apache/syncope/common/services/TaskService.java
@@ -44,6 +44,8 @@ import org.apache.syncope.common.to.ReportExecTO;
 import org.apache.syncope.common.to.TaskExecTO;
 import org.apache.syncope.common.to.AbstractTaskTO;
 import org.apache.syncope.common.to.SchedTaskTO;
+import org.apache.syncope.common.types.JobAction;
+import org.apache.syncope.common.types.JobStatusType;
 import org.apache.syncope.common.types.TaskType;
 import org.apache.syncope.common.wrap.JobClass;
 import org.apache.syncope.common.wrap.PushActionClass;
@@ -242,4 +244,25 @@ public interface TaskService extends JAXRSService {
     @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
     @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
     BulkActionResult bulk(@NotNull BulkAction bulkAction);
+
+    /**
+     * List task jobs of the given type
+     *
+     * @param type of task job
+     * @return List of TaskExecTO
+     */
+    @GET
+    @Path("jobs")
+    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
+    List<TaskExecTO> list(@MatrixParam("type") JobStatusType type);
+
+    /**
+     * Execute a control action on an existing task
+     *
+     * @param action
+     * @param taskId id of task
+     */
+    @POST
+    @Path("{taskId}")
+    void process(@QueryParam("action") JobAction action, @PathParam("taskId") Long taskId);
 }

http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/common/src/main/java/org/apache/syncope/common/types/JobAction.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/syncope/common/types/JobAction.java b/common/src/main/java/org/apache/syncope/common/types/JobAction.java
new file mode 100644
index 0000000..cbabd2a
--- /dev/null
+++ b/common/src/main/java/org/apache/syncope/common/types/JobAction.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.common.types;
+
+import javax.xml.bind.annotation.XmlEnum;
+
+@XmlEnum
+public enum JobAction {
+
+    START,
+    STOP;
+
+}

http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/common/src/main/java/org/apache/syncope/common/types/JobStatusType.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/syncope/common/types/JobStatusType.java b/common/src/main/java/org/apache/syncope/common/types/JobStatusType.java
new file mode 100644
index 0000000..ed5f914
--- /dev/null
+++ b/common/src/main/java/org/apache/syncope/common/types/JobStatusType.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.common.types;
+
+import javax.xml.bind.annotation.XmlEnum;
+
+@XmlEnum
+public enum JobStatusType {
+
+    ALL,
+    RUNNING,
+    SCHEDULED;
+
+}

http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index b613270..79c2cfe 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -466,6 +466,7 @@ under the License.
                 <copy file="${project.build.directory}/test-classes/org/apache/syncope/core/sync/TestSyncActions.class" todir="${cargo.run.dir}/WEB-INF/classes/org/apache/syncope/core/sync" />
                 <copy file="${project.build.directory}/test-classes/org/apache/syncope/core/sync/TestSyncRule.class" todir="${cargo.run.dir}/WEB-INF/classes/org/apache/syncope/core/sync" />
                 <copy file="${project.build.directory}/test-classes/org/apache/syncope/core/rest/data/DoubleValueAttributableTransformer.class" todir="${cargo.run.dir}/WEB-INF/classes/org/apache/syncope/core/rest/data" />
+                <copy file="${project.build.directory}/test-classes/org/apache/syncope/core/quartz/TestSampleJob.class" todir="${cargo.run.dir}/WEB-INF/classes/org/apache/syncope/core/quartz" />
                 <copy file="${project.build.directory}/test-classes/db.jsp" todir="${cargo.run.dir}" />
               </target>
             </configuration>

http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/core/src/main/java/org/apache/syncope/core/quartz/AbstractTaskJob.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/syncope/core/quartz/AbstractTaskJob.java b/core/src/main/java/org/apache/syncope/core/quartz/AbstractTaskJob.java
index d0b590c..9538311 100644
--- a/core/src/main/java/org/apache/syncope/core/quartz/AbstractTaskJob.java
+++ b/core/src/main/java/org/apache/syncope/core/quartz/AbstractTaskJob.java
@@ -18,12 +18,15 @@
  */
 package org.apache.syncope.core.quartz;
 
+import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.Locale;
+import java.util.concurrent.atomic.AtomicReference;
+import org.apache.syncope.common.SyncopeConstants;
 import org.apache.syncope.common.types.AuditElements;
 import org.apache.syncope.common.types.AuditElements.Result;
 import org.apache.syncope.core.audit.AuditManager;
 import org.apache.syncope.core.notification.NotificationManager;
-
 import org.apache.syncope.core.persistence.beans.Task;
 import org.apache.syncope.core.persistence.beans.TaskExec;
 import org.apache.syncope.core.persistence.dao.TaskDAO;
@@ -32,6 +35,7 @@ import org.apache.syncope.core.util.ExceptionUtil;
 import org.quartz.DisallowConcurrentExecution;
 import org.quartz.JobExecutionContext;
 import org.quartz.JobExecutionException;
+import org.quartz.UnableToInterruptJobException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -99,6 +103,11 @@ public abstract class AbstractTaskJob implements TaskJob {
     protected Task task;
 
     /**
+     * The current running thread containing the task to be executed.
+     */
+    protected AtomicReference<Thread> runningThread = new AtomicReference<Thread>();
+
+    /**
      * Task id setter.
      *
      * @param taskId to be set
@@ -110,6 +119,7 @@ public abstract class AbstractTaskJob implements TaskJob {
 
     @Override
     public void execute(final JobExecutionContext context) throws JobExecutionException {
+        this.runningThread.set(Thread.currentThread());
         task = taskDAO.find(taskId);
         if (task == null) {
             throw new JobExecutionException("Task " + taskId + " not found");
@@ -176,4 +186,17 @@ public abstract class AbstractTaskJob implements TaskJob {
     protected boolean hasToBeRegistered(final TaskExec execution) {
         return false;
     }
+
+    @Override
+    public void interrupt() throws UnableToInterruptJobException {
+        Thread thread = this.runningThread.getAndSet(null);
+        if (thread != null) {
+            LOG.info("Interrupting job time {} ", (new SimpleDateFormat(SyncopeConstants.DEFAULT_DATE_PATTERN, Locale.
+                    getDefault())).format(new Date()));
+            thread.interrupt();
+        } else {
+            LOG.warn("Unable to retrieve the right thread related to the current job execution");
+        }
+    }
+;
 }

http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/core/src/main/java/org/apache/syncope/core/quartz/TaskJob.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/syncope/core/quartz/TaskJob.java b/core/src/main/java/org/apache/syncope/core/quartz/TaskJob.java
index 7423690..d1dd83e 100644
--- a/core/src/main/java/org/apache/syncope/core/quartz/TaskJob.java
+++ b/core/src/main/java/org/apache/syncope/core/quartz/TaskJob.java
@@ -18,12 +18,12 @@
  */
 package org.apache.syncope.core.quartz;
 
-import org.quartz.Job;
+import org.quartz.InterruptableJob;
 
 /**
  * Interface for Quartz jobs bound to a given Task.
  */
-public interface TaskJob extends Job {
+public interface TaskJob extends InterruptableJob {
 
     void setTaskId(Long taskId);
 }

http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/core/src/main/java/org/apache/syncope/core/rest/controller/AbstractJobController.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/syncope/core/rest/controller/AbstractJobController.java b/core/src/main/java/org/apache/syncope/core/rest/controller/AbstractJobController.java
new file mode 100644
index 0000000..4726330
--- /dev/null
+++ b/core/src/main/java/org/apache/syncope/core/rest/controller/AbstractJobController.java
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.core.rest.controller;
+
+import static org.apache.syncope.core.rest.controller.AbstractController.LOG;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.syncope.common.AbstractBaseBean;
+import org.apache.syncope.common.to.AbstractExecTO;
+import org.apache.syncope.common.types.JobAction;
+import org.apache.syncope.common.types.JobStatusType;
+import org.quartz.JobExecutionContext;
+import org.quartz.JobKey;
+import org.quartz.Scheduler;
+import org.quartz.SchedulerException;
+import org.quartz.Trigger;
+import org.quartz.impl.matchers.GroupMatcher;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.scheduling.quartz.SchedulerFactoryBean;
+
+abstract class AbstractJobController<T extends AbstractBaseBean> extends AbstractTransactionalController<T> {
+
+    @Autowired
+    protected SchedulerFactoryBean scheduler;
+
+    protected abstract Long getIdFromJobName(JobKey jobKey);
+
+    public <E extends AbstractExecTO> List<E> list(final JobStatusType type, final Class<E> reference) {
+        List<E> jobExecTOs = new ArrayList<E>();
+
+        switch (type) {
+            case ALL:
+                try {
+                    for (String groupName : scheduler.getScheduler().getJobGroupNames()) {
+                        for (JobKey jobKey : scheduler.getScheduler().getJobKeys(GroupMatcher.jobGroupEquals(groupName))) {
+
+                            Long jobId = getIdFromJobName(jobKey);
+                            if (jobId != null) {
+                                List<? extends Trigger> jobTriggers = scheduler.getScheduler().getTriggersOfJob(jobKey);
+                                if (jobTriggers.size() > 0) {
+                                    for (Trigger t : jobTriggers) {
+                                        E jobExecTO = reference.newInstance();
+                                        jobExecTO.setId(jobId);
+                                        jobExecTO.
+                                                setStatus(scheduler.getScheduler().getTriggerState(t.getKey()).name());
+                                        jobExecTO.setStartDate(t.getStartTime());
+                                        jobExecTOs.add(jobExecTO);
+                                    }
+                                } else {
+                                    E jobExecTO = reference.newInstance();
+                                    jobExecTO.setId(jobId);
+                                    jobExecTO.setStatus("Not Scheduled");
+                                    jobExecTOs.add(jobExecTO);
+                                }
+                            }
+                        }
+                    }
+                } catch (SchedulerException ex) {
+                    LOG.debug("Problems during retrieving all scheduled jobs {}", ex);
+                } catch (InstantiationException ex) {
+                    LOG.debug("Problems during instantiating {}  {}", reference, ex);
+                } catch (IllegalAccessException ex) {
+                    LOG.debug("Problems during accessing {}  {}", reference, ex);
+                }
+                break;
+            case RUNNING:
+                try {
+                    for (JobExecutionContext jec : scheduler.getScheduler().getCurrentlyExecutingJobs()) {
+                        Long jobId = getIdFromJobName(jec.getJobDetail().getKey());
+                        if (jobId != null) {
+                            E jobExecTO = reference.newInstance();
+                            jobExecTO.setId(jobId);
+                            jobExecTO.setStatus(scheduler.getScheduler().getTriggerState(jec.getTrigger().getKey()).
+                                    name());
+                            jobExecTO.setStartDate(jec.getFireTime());
+                            jobExecTOs.add(jobExecTO);
+                        }
+                    }
+                } catch (SchedulerException ex) {
+                    LOG.debug("Problems during retrieving all currently executing jobs {}", ex);
+                } catch (InstantiationException ex) {
+                    LOG.debug("Problems during instantiating {}  {}", reference, ex);
+                } catch (IllegalAccessException ex) {
+                    LOG.debug("Problems during accessing {}  {}", reference, ex);
+                }
+                break;
+            case SCHEDULED:
+                try {
+                    for (String groupName : scheduler.getScheduler().getJobGroupNames()) {
+                        for (JobKey jobKey : scheduler.getScheduler().getJobKeys(GroupMatcher.jobGroupEquals(groupName))) {
+                            Long jobId = getIdFromJobName(jobKey);
+                            if (jobId != null) {
+                                List<? extends Trigger> jobTriggers = scheduler.getScheduler().getTriggersOfJob(jobKey);
+                                for (Trigger t : jobTriggers) {
+                                    E jobExecTO = reference.newInstance();
+                                    jobExecTO.setId(jobId);
+                                    jobExecTO.setStatus(scheduler.getScheduler().getTriggerState(t.getKey()).name());
+                                    jobExecTO.setStartDate(t.getStartTime());
+                                    jobExecTOs.add(jobExecTO);
+                                }
+                            }
+                        }
+                    }
+                } catch (SchedulerException ex) {
+                    LOG.debug("Problems during retrieving all scheduled jobs {}", ex);
+                } catch (InstantiationException ex) {
+                    LOG.debug("Problems during instantiating {}  {}", reference, ex);
+                } catch (IllegalAccessException ex) {
+                    LOG.debug("Problems during accessing {}  {}", reference, ex);
+                }
+                break;
+            default:
+        }
+        return jobExecTOs;
+    }
+
+    protected void process(JobAction action, String jobName) {
+
+        if (jobName != null) {
+            JobKey jobKey = new JobKey(jobName, Scheduler.DEFAULT_GROUP);
+            try {
+                if (scheduler.getScheduler().checkExists(jobKey)) {
+                    switch (action) {
+                        case START:
+                            scheduler.getScheduler().triggerJob(jobKey);
+                            break;
+                        case STOP:
+                            scheduler.getScheduler().interrupt(jobKey);
+                            break;
+                        default:
+                    }
+                }
+            } catch (SchedulerException ex) {
+                LOG.debug("Problems during {} operation on job with id {}", action.toString(), ex);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/core/src/main/java/org/apache/syncope/core/rest/controller/ReportController.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/syncope/core/rest/controller/ReportController.java b/core/src/main/java/org/apache/syncope/core/rest/controller/ReportController.java
index 8d5e3f7..0f8d4bf 100644
--- a/core/src/main/java/org/apache/syncope/core/rest/controller/ReportController.java
+++ b/core/src/main/java/org/apache/syncope/core/rest/controller/ReportController.java
@@ -45,6 +45,9 @@ import org.apache.syncope.common.types.ReportExecExportFormat;
 import org.apache.syncope.common.types.ReportExecStatus;
 import org.apache.syncope.common.types.ClientExceptionType;
 import org.apache.syncope.common.SyncopeClientException;
+import org.apache.syncope.common.to.AbstractExecTO;
+import org.apache.syncope.common.types.JobAction;
+import org.apache.syncope.common.types.JobStatusType;
 import org.apache.syncope.core.init.JobInstanceLoader;
 import org.apache.syncope.core.persistence.beans.Report;
 import org.apache.syncope.core.persistence.beans.ReportExec;
@@ -59,13 +62,12 @@ import org.apache.xmlgraphics.util.MimeConstants;
 import org.quartz.JobKey;
 import org.quartz.Scheduler;
 import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.scheduling.quartz.SchedulerFactoryBean;
 import org.springframework.security.access.prepost.PreAuthorize;
 import org.springframework.stereotype.Component;
 import org.springframework.transaction.annotation.Transactional;
 
 @Component
-public class ReportController extends AbstractTransactionalController<ReportTO> {
+public class ReportController extends AbstractJobController<ReportTO> {
 
     @Autowired
     private ReportDAO reportDAO;
@@ -77,9 +79,6 @@ public class ReportController extends AbstractTransactionalController<ReportTO>
     private JobInstanceLoader jobInstanceLoader;
 
     @Autowired
-    private SchedulerFactoryBean scheduler;
-
-    @Autowired
     private ReportDataBinder binder;
 
     @PreAuthorize("hasRole('REPORT_CREATE')")
@@ -341,4 +340,25 @@ public class ReportController extends AbstractTransactionalController<ReportTO>
 
         throw new UnresolvedReferenceException();
     }
+
+    @Override
+    @PreAuthorize("hasRole('REPORT_LIST')")
+    public <E extends AbstractExecTO> List<E> list(JobStatusType type, Class<E> reference) {
+        return super.list(type, reference);
+    }
+
+    @PreAuthorize("hasRole('REPORT_EXECUTE')")
+    public void process(JobAction action, Long reportId) {
+        Report report = reportDAO.find(reportId);
+        if (report == null) {
+            throw new NotFoundException("Report " + reportId);
+        }
+        String jobName = JobInstanceLoader.getJobName(report);
+        process(action, jobName);
+    }
+
+    @Override
+    protected Long getIdFromJobName(JobKey jobKey) {
+        return JobInstanceLoader.getReportIdFromJobName(jobKey.getName());
+    }
 }

http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/core/src/main/java/org/apache/syncope/core/rest/controller/TaskController.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/syncope/core/rest/controller/TaskController.java b/core/src/main/java/org/apache/syncope/core/rest/controller/TaskController.java
index 431f6be..b493069 100644
--- a/core/src/main/java/org/apache/syncope/core/rest/controller/TaskController.java
+++ b/core/src/main/java/org/apache/syncope/core/rest/controller/TaskController.java
@@ -35,6 +35,9 @@ import org.apache.syncope.common.types.PropagationTaskExecStatus;
 import org.apache.syncope.common.types.ClientExceptionType;
 import org.apache.syncope.common.types.TaskType;
 import org.apache.syncope.common.SyncopeClientException;
+import org.apache.syncope.common.to.AbstractExecTO;
+import org.apache.syncope.common.types.JobAction;
+import org.apache.syncope.common.types.JobStatusType;
 import org.apache.syncope.core.init.ImplementationClassNamesLoader;
 import org.apache.syncope.core.init.JobInstanceLoader;
 import org.apache.syncope.core.notification.NotificationJob;
@@ -60,7 +63,7 @@ import org.springframework.security.access.prepost.PreAuthorize;
 import org.springframework.stereotype.Component;
 
 @Component
-public class TaskController extends AbstractTransactionalController<AbstractTaskTO> {
+public class TaskController extends AbstractJobController<AbstractTaskTO> {
 
     @Autowired
     private TaskDAO taskDAO;
@@ -401,4 +404,25 @@ public class TaskController extends AbstractTransactionalController<AbstractTask
 
         throw new UnresolvedReferenceException();
     }
+
+    @Override
+    @PreAuthorize("hasRole('TASK_LIST')")
+    public <E extends AbstractExecTO> List<E> list(JobStatusType type, Class<E> reference) {
+        return super.list(type, reference);
+    }
+
+    @PreAuthorize("hasRole('TASK_EXECUTE')")
+    public void process(JobAction action, Long taskId) {
+        Task task = taskDAO.find(taskId);
+        if (task == null) {
+            throw new NotFoundException("Task " + taskId);
+        }
+        String jobName = JobInstanceLoader.getJobName(task);
+        process(action, jobName);
+    }
+
+    @Override
+    protected Long getIdFromJobName(JobKey jobKey) {
+        return JobInstanceLoader.getTaskIdFromJobName(jobKey.getName());
+    }
 }

http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/core/src/main/java/org/apache/syncope/core/services/ReportServiceImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/syncope/core/services/ReportServiceImpl.java b/core/src/main/java/org/apache/syncope/core/services/ReportServiceImpl.java
index 05ec25b..a4176fb 100644
--- a/core/src/main/java/org/apache/syncope/core/services/ReportServiceImpl.java
+++ b/core/src/main/java/org/apache/syncope/core/services/ReportServiceImpl.java
@@ -29,6 +29,8 @@ import org.apache.syncope.common.services.ReportService;
 import org.apache.syncope.common.reqres.PagedResult;
 import org.apache.syncope.common.to.ReportExecTO;
 import org.apache.syncope.common.to.ReportTO;
+import org.apache.syncope.common.types.JobAction;
+import org.apache.syncope.common.types.JobStatusType;
 import org.apache.syncope.common.types.RESTHeaders;
 import org.apache.syncope.common.types.ReportExecExportFormat;
 import org.apache.syncope.common.util.CollectionWrapper;
@@ -41,10 +43,10 @@ import org.springframework.stereotype.Service;
 
 @Service
 public class ReportServiceImpl extends AbstractServiceImpl implements ReportService {
-
+    
     @Autowired
     private ReportController controller;
-
+    
     @Override
     public Response create(final ReportTO reportTO) {
         ReportTO createdReportTO = controller.create(reportTO);
@@ -53,55 +55,55 @@ public class ReportServiceImpl extends AbstractServiceImpl implements ReportServ
                 header(RESTHeaders.RESOURCE_ID.toString(), createdReportTO.getId()).
                 build();
     }
-
+    
     @Override
     public void update(final Long reportId, final ReportTO reportTO) {
         reportTO.setId(reportId);
         controller.update(reportTO);
     }
-
+    
     @Override
     public PagedResult<ReportTO> list() {
         return list(DEFAULT_PARAM_PAGE_VALUE, DEFAULT_PARAM_SIZE_VALUE, null);
     }
-
+    
     @Override
     public PagedResult<ReportTO> list(final String orderBy) {
         return list(DEFAULT_PARAM_PAGE_VALUE, DEFAULT_PARAM_SIZE_VALUE, orderBy);
     }
-
+    
     @Override
     public PagedResult<ReportTO> list(final Integer page, final Integer size) {
         return list(page, size, null);
     }
-
+    
     @Override
     public PagedResult<ReportTO> list(final Integer page, final Integer size, final String orderBy) {
         List<OrderByClause> orderByClauses = getOrderByClauses(orderBy);
         return buildPagedResult(controller.list(page, size, orderByClauses), page, size, controller.count());
     }
-
+    
     @Override
     public List<ReportletConfClass> getReportletConfClasses() {
         return CollectionWrapper.wrap(controller.getReportletConfClasses(), ReportletConfClass.class);
     }
-
+    
     @Override
     public ReportTO read(final Long reportId) {
         return controller.read(reportId);
     }
-
+    
     @Override
     public ReportExecTO readExecution(final Long executionId) {
         return controller.readExecution(executionId);
     }
-
+    
     @Override
     public Response exportExecutionResult(final Long executionId, final ReportExecExportFormat fmt) {
         final ReportExecExportFormat format = (fmt == null) ? ReportExecExportFormat.XML : fmt;
         final ReportExec reportExec = controller.getAndCheckReportExec(executionId);
         StreamingOutput sout = new StreamingOutput() {
-
+            
             @Override
             public void write(final OutputStream os) throws IOException {
                 controller.exportExecutionResult(os, reportExec, format);
@@ -113,19 +115,29 @@ public class ReportServiceImpl extends AbstractServiceImpl implements ReportServ
                 header(HttpHeaders.CONTENT_DISPOSITION, disposition).
                 build();
     }
-
+    
     @Override
     public ReportExecTO execute(final Long reportId) {
         return controller.execute(reportId);
     }
-
+    
     @Override
     public void delete(final Long reportId) {
         controller.delete(reportId);
     }
-
+    
     @Override
     public void deleteExecution(final Long executionId) {
         controller.deleteExecution(executionId);
     }
+    
+    @Override
+    public List<ReportExecTO> list(JobStatusType type) {
+        return controller.list(type, ReportExecTO.class);
+    }
+    
+    @Override
+    public void process(JobAction action, Long reportId) {
+        controller.process(action, reportId);
+    }
 }

http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/core/src/main/java/org/apache/syncope/core/services/TaskServiceImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/syncope/core/services/TaskServiceImpl.java b/core/src/main/java/org/apache/syncope/core/services/TaskServiceImpl.java
index 82a4d43..51b140a 100644
--- a/core/src/main/java/org/apache/syncope/core/services/TaskServiceImpl.java
+++ b/core/src/main/java/org/apache/syncope/core/services/TaskServiceImpl.java
@@ -34,8 +34,10 @@ import org.apache.syncope.common.to.TaskExecTO;
 import org.apache.syncope.common.to.AbstractTaskTO;
 import org.apache.syncope.common.reqres.PagedResult;
 import org.apache.syncope.common.to.PushTaskTO;
+import org.apache.syncope.common.types.JobStatusType;
 import org.apache.syncope.common.types.RESTHeaders;
 import org.apache.syncope.common.types.PropagationTaskExecStatus;
+import org.apache.syncope.common.types.JobAction;
 import org.apache.syncope.common.types.TaskType;
 import org.apache.syncope.common.util.CollectionWrapper;
 import org.apache.syncope.common.wrap.PushActionClass;
@@ -155,4 +157,14 @@ public class TaskServiceImpl extends AbstractServiceImpl implements TaskService
     public BulkActionResult bulk(final BulkAction bulkAction) {
         return controller.bulk(bulkAction);
     }
+
+    @Override
+    public List<TaskExecTO> list(JobStatusType type) {
+        return controller.list(type, TaskExecTO.class);
+    }
+
+    @Override
+    public void process(JobAction action, Long taskId) {
+        controller.process(action, taskId);
+    }
 }

http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/core/src/test/java/org/apache/syncope/core/quartz/TestSampleJob.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/syncope/core/quartz/TestSampleJob.java b/core/src/test/java/org/apache/syncope/core/quartz/TestSampleJob.java
new file mode 100644
index 0000000..f7387f0
--- /dev/null
+++ b/core/src/test/java/org/apache/syncope/core/quartz/TestSampleJob.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.core.quartz;
+
+import java.util.Date;
+import org.apache.syncope.core.persistence.beans.SchedTask;
+import org.apache.syncope.core.persistence.beans.TaskExec;
+import org.quartz.JobExecutionException;
+
+/**
+ * Sample implementation for execution a scheduled task.
+ *
+ * @see SchedTask
+ */
+public class TestSampleJob extends AbstractTaskJob {
+
+    @Override
+    protected String doExecute(final boolean dryRun) throws JobExecutionException {
+        if (!(task instanceof SchedTask)) {
+            throw new JobExecutionException("Task " + taskId + " isn't a SchedTask");
+        }
+
+        for (int i = 0; i < 10; i++) {
+            LOG.debug("TestSampleJob#doExecute round {} time {}", i, new Date().toString());
+            try {
+                Thread.sleep(1000);
+            } catch (InterruptedException ex) {
+                throw new JobExecutionException("Job interrupted");
+            }
+        }
+
+        final SchedTask schedTask = (SchedTask) this.task;
+
+        LOG.info("TestSampleJob {}running [SchedTask {}]", (dryRun
+                ? "dry "
+                : ""), schedTask.getId());
+
+        return (dryRun
+                ? "DRY "
+                : "") + "RUNNING";
+    }
+
+    @Override
+    protected boolean hasToBeRegistered(final TaskExec execution) {
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/syncope/blob/358aef72/core/src/test/java/org/apache/syncope/core/rest/TaskTestITCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/syncope/core/rest/TaskTestITCase.java b/core/src/test/java/org/apache/syncope/core/rest/TaskTestITCase.java
index 5823f9b..115f2c6 100644
--- a/core/src/test/java/org/apache/syncope/core/rest/TaskTestITCase.java
+++ b/core/src/test/java/org/apache/syncope/core/rest/TaskTestITCase.java
@@ -73,6 +73,8 @@ import org.apache.syncope.common.types.AttributeSchemaType;
 import org.apache.syncope.common.types.CipherAlgorithm;
 import org.apache.syncope.common.types.ConnConfProperty;
 import org.apache.syncope.common.types.IntMappingType;
+import org.apache.syncope.common.types.JobAction;
+import org.apache.syncope.common.types.JobStatusType;
 import org.apache.syncope.common.types.MappingPurpose;
 import org.apache.syncope.common.types.MatchingRule;
 import org.apache.syncope.common.types.PropagationTaskExecStatus;
@@ -85,6 +87,7 @@ import org.apache.syncope.common.types.UnmatchingRule;
 import org.apache.syncope.common.util.CollectionWrapper;
 import org.apache.syncope.common.wrap.PushActionClass;
 import org.apache.syncope.common.wrap.ResourceName;
+import org.apache.syncope.core.quartz.TestSampleJob;
 import org.apache.syncope.core.sync.TestSyncActions;
 import org.apache.syncope.core.sync.TestSyncRule;
 import org.apache.syncope.core.sync.impl.DBPasswordSyncActions;
@@ -1444,4 +1447,61 @@ public class TaskTestITCase extends AbstractTest {
         NotificationTaskTO taskTO = findNotificationTaskBySender("syncope648@syncope.apache.org");
         assertNotNull(taskTO);
     }
+
+    @Test
+    public void issueSYNCOPE660() {
+        List<TaskExecTO> list = taskService.list(JobStatusType.ALL);
+        int old_size = list.size();
+
+        list = taskService.list(JobStatusType.SCHEDULED);
+
+        SchedTaskTO task = new SchedTaskTO();
+        task.setName("issueSYNCOPE660");
+        task.setDescription("issueSYNCOPE660 Description");
+        task.setJobClassName(TestSampleJob.class.getName());
+
+        Response response = taskService.create(task);
+        SchedTaskTO actual = getObject(response.getLocation(), TaskService.class, SchedTaskTO.class);
+
+        list = taskService.list(JobStatusType.ALL);
+        assertEquals(list.size(), old_size + 1);
+
+        taskService.process(JobAction.START, actual.getId());
+
+        int i = 0, maxit = 50;
+
+        // wait for task exec completion (executions incremented)
+        do {
+            try {
+                Thread.sleep(1000);
+            } catch (InterruptedException e) {
+            }
+
+            list = taskService.list(JobStatusType.RUNNING);
+
+            assertNotNull(list);
+            i++;
+        } while (list.size() < 1 && i < maxit);
+
+        assertEquals(list.size(), 1);
+
+        taskService.process(JobAction.STOP, actual.getId());
+
+        i = 0;
+
+        // wait for task exec completion (executions incremented)
+        do {
+            try {
+                Thread.sleep(1000);
+            } catch (InterruptedException e) {
+            }
+
+            list = taskService.list(JobStatusType.RUNNING);
+
+            assertNotNull(list);
+            i++;
+        } while (list.size() >= 1 && i < maxit);
+
+        assertEquals(list.size(), 0);
+    }
 }


[3/4] syncope git commit: [SYNCOPE-660] Merge from 1_2_X; This closes #5

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/fit/core-reference/src/main/java/org/apache/syncope/fit/core/reference/TestSampleJob.java
----------------------------------------------------------------------
diff --cc fit/core-reference/src/main/java/org/apache/syncope/fit/core/reference/TestSampleJob.java
index 0000000,0000000..a8f7184
new file mode 100644
--- /dev/null
+++ b/fit/core-reference/src/main/java/org/apache/syncope/fit/core/reference/TestSampleJob.java
@@@ -1,0 -1,0 +1,64 @@@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ *
++ *   http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied.  See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++package org.apache.syncope.fit.core.reference;
++
++import java.util.Date;
++import org.apache.syncope.core.persistence.api.entity.task.SchedTask;
++import org.apache.syncope.core.persistence.api.entity.task.TaskExec;
++import org.apache.syncope.core.provisioning.java.job.AbstractTaskJob;
++import org.quartz.JobExecutionException;
++
++/**
++ * Sample implementation for execution a scheduled task.
++ *
++ * @see SchedTask
++ */
++public class TestSampleJob extends AbstractTaskJob {
++
++    @Override
++    protected String doExecute(final boolean dryRun) throws JobExecutionException {
++        if (!(task instanceof SchedTask)) {
++            throw new JobExecutionException("Task " + taskId + " isn't a SchedTask");
++        }
++
++        for (int i = 0; i < 10; i++) {
++            LOG.debug("TestSampleJob#doExecute round {} time {}", i, new Date().toString());
++            try {
++                Thread.sleep(1000);
++            } catch (InterruptedException ex) {
++                throw new JobExecutionException("Job interrupted");
++            }
++        }
++
++        final SchedTask schedTask = (SchedTask) this.task;
++
++        LOG.info("TestSampleJob {}running [SchedTask {}]", (dryRun
++                ? "dry "
++                : ""), schedTask.getKey());
++
++        return (dryRun
++                ? "DRY "
++                : "") + "RUNNING";
++    }
++
++    @Override
++    protected boolean hasToBeRegistered(final TaskExec execution) {
++        return true;
++    }
++}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/fit/core-reference/src/test/java/org/apache/syncope/fit/core/reference/SchedTaskITCase.java
----------------------------------------------------------------------
diff --cc fit/core-reference/src/test/java/org/apache/syncope/fit/core/reference/SchedTaskITCase.java
index 437bca5,0000000..ff5bff8
mode 100644,000000..100644
--- a/fit/core-reference/src/test/java/org/apache/syncope/fit/core/reference/SchedTaskITCase.java
+++ b/fit/core-reference/src/test/java/org/apache/syncope/fit/core/reference/SchedTaskITCase.java
@@@ -1,107 -1,0 +1,167 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.fit.core.reference;
 +
 +import static org.junit.Assert.assertEquals;
 +import static org.junit.Assert.assertFalse;
 +import static org.junit.Assert.assertNotNull;
 +import static org.junit.Assert.assertNull;
 +import static org.junit.Assert.fail;
 +
 +import java.util.List;
 +import javax.ws.rs.core.Response;
 +import org.apache.syncope.client.lib.SyncopeClient;
 +import org.apache.syncope.common.lib.to.AbstractTaskTO;
 +import org.apache.syncope.common.lib.to.PagedResult;
 +import org.apache.syncope.common.lib.to.PushTaskTO;
 +import org.apache.syncope.common.lib.to.SchedTaskTO;
 +import org.apache.syncope.common.lib.to.SyncTaskTO;
++import org.apache.syncope.common.lib.to.TaskExecTO;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
 +import org.apache.syncope.common.lib.types.TaskType;
 +import org.apache.syncope.common.rest.api.service.TaskService;
 +import org.apache.syncope.core.provisioning.api.job.SyncJob;
 +import org.junit.FixMethodOrder;
 +import org.junit.Test;
 +import org.junit.runners.MethodSorters;
 +
 +@FixMethodOrder(MethodSorters.JVM)
 +public class SchedTaskITCase extends AbstractTaskITCase {
 +
 +    @Test
 +    public void getJobClasses() {
 +        List<String> jobClasses = syncopeService.info().getTaskJobs();
 +        assertNotNull(jobClasses);
 +        assertFalse(jobClasses.isEmpty());
 +    }
 +
 +    @Test
 +    public void list() {
 +        PagedResult<SchedTaskTO> tasks =
 +                taskService.list(TaskType.SCHEDULED, SyncopeClient.getListQueryBuilder().build());
 +        assertFalse(tasks.getResult().isEmpty());
 +        for (AbstractTaskTO task : tasks.getResult()) {
 +            if (!(task instanceof SchedTaskTO) || task instanceof SyncTaskTO || task instanceof PushTaskTO) {
 +                fail();
 +            }
 +        }
 +    }
 +
 +    @Test
 +    public void update() {
 +        SchedTaskTO task = taskService.read(SCHED_TASK_ID);
 +        assertNotNull(task);
 +
 +        SchedTaskTO taskMod = new SchedTaskTO();
 +        taskMod.setKey(5);
 +        taskMod.setCronExpression(null);
 +
 +        taskService.update(taskMod.getKey(), taskMod);
 +        SchedTaskTO actual = taskService.read(taskMod.getKey());
 +        assertNotNull(actual);
 +        assertEquals(task.getKey(), actual.getKey());
 +        assertNull(actual.getCronExpression());
 +    }
 +
 +    @Test
 +    public void issueSYNCOPE144() {
 +        SchedTaskTO task = new SchedTaskTO();
 +        task.setName("issueSYNCOPE144");
 +        task.setDescription("issueSYNCOPE144 Description");
 +        task.setJobClassName(SyncJob.class.getName());
 +
 +        Response response = taskService.create(task);
 +        SchedTaskTO actual = getObject(response.getLocation(), TaskService.class, SchedTaskTO.class);
 +        assertNotNull(actual);
 +        assertEquals("issueSYNCOPE144", actual.getName());
 +        assertEquals("issueSYNCOPE144 Description", actual.getDescription());
 +
 +        task = taskService.read(actual.getKey());
 +        assertNotNull(task);
 +        assertEquals("issueSYNCOPE144", task.getName());
 +        assertEquals("issueSYNCOPE144 Description", task.getDescription());
 +
 +        task.setName("issueSYNCOPE144_2");
 +        task.setDescription("issueSYNCOPE144 Description_2");
 +
 +        response = taskService.create(task);
 +        actual = getObject(response.getLocation(), TaskService.class, SchedTaskTO.class);
 +        assertNotNull(actual);
 +        assertEquals("issueSYNCOPE144_2", actual.getName());
 +        assertEquals("issueSYNCOPE144 Description_2", actual.getDescription());
 +    }
++
++    @Test
++    public void issueSYNCOPE660() {
++        List<TaskExecTO> list = taskService.list(JobStatusType.ALL);
++        int old_size = list.size();
++
++        list = taskService.list(JobStatusType.SCHEDULED);
++
++        SchedTaskTO task = new SchedTaskTO();
++        task.setName("issueSYNCOPE660");
++        task.setDescription("issueSYNCOPE660 Description");
++        task.setJobClassName(TestSampleJob.class.getName());
++
++        Response response = taskService.create(task);
++        SchedTaskTO actual = getObject(response.getLocation(), TaskService.class, SchedTaskTO.class);
++
++        list = taskService.list(JobStatusType.ALL);
++        assertEquals(list.size(), old_size + 1);
++
++        taskService.process(JobAction.START, actual.getKey());
++
++        int i = 0, maxit = 50;
++
++        // wait for task exec completion (executions incremented)
++        do {
++            try {
++                Thread.sleep(1000);
++            } catch (InterruptedException e) {
++            }
++
++            list = taskService.list(JobStatusType.RUNNING);
++
++            assertNotNull(list);
++            i++;
++        } while (list.size() < 1 && i < maxit);
++
++        assertEquals(list.size(), 1);
++
++        taskService.process(JobAction.STOP, actual.getKey());
++
++        i = 0;
++
++        // wait for task exec completion (executions incremented)
++        do {
++            try {
++                Thread.sleep(1000);
++            } catch (InterruptedException e) {
++            }
++
++            list = taskService.list(JobStatusType.RUNNING);
++
++            assertNotNull(list);
++            i++;
++        } while (list.size() >= 1 && i < maxit);
++
++        assertEquals(list.size(), 0);
++    }
 +}


[4/4] syncope git commit: [SYNCOPE-660] Merge from 1_2_X; This closes #5

Posted by gi...@apache.org.
[SYNCOPE-660] Merge from 1_2_X; This closes #5


Project: http://git-wip-us.apache.org/repos/asf/syncope/repo
Commit: http://git-wip-us.apache.org/repos/asf/syncope/commit/d489e8c5
Tree: http://git-wip-us.apache.org/repos/asf/syncope/tree/d489e8c5
Diff: http://git-wip-us.apache.org/repos/asf/syncope/diff/d489e8c5

Branch: refs/heads/master
Commit: d489e8c59a93bf137d75af4222b5b87f4c7ad925
Parents: d19133e b43c9c8
Author: giacomolm <gi...@hotmail.it>
Authored: Fri May 22 09:46:31 2015 +0200
Committer: giacomolm <gi...@hotmail.it>
Committed: Fri May 22 09:46:31 2015 +0200

----------------------------------------------------------------------
 .../syncope/common/lib/types/JobAction.java     |  29 ++++
 .../syncope/common/lib/types/JobStatusType.java |  30 ++++
 .../common/rest/api/service/ReportService.java  |  24 +++
 .../common/rest/api/service/TaskService.java    |  24 +++
 .../syncope/core/logic/AbstractJobLogic.java    | 155 +++++++++++++++++++
 .../apache/syncope/core/logic/ReportLogic.java  |  30 +++-
 .../apache/syncope/core/logic/TaskLogic.java    |  26 +++-
 .../core/provisioning/api/job/JobNamer.java     |   4 +-
 .../core/provisioning/api/job/TaskJob.java      |   4 +-
 .../provisioning/java/job/AbstractTaskJob.java  |  23 +++
 .../rest/cxf/service/ReportServiceImpl.java     |  12 ++
 .../core/rest/cxf/service/TaskServiceImpl.java  |  13 ++
 .../fit/core/reference/TestSampleJob.java       |  64 ++++++++
 .../fit/core/reference/SchedTaskITCase.java     |  60 +++++++
 14 files changed, 488 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/common/lib/src/main/java/org/apache/syncope/common/lib/types/JobAction.java
----------------------------------------------------------------------
diff --cc common/lib/src/main/java/org/apache/syncope/common/lib/types/JobAction.java
index 0000000,0000000..920393b
new file mode 100644
--- /dev/null
+++ b/common/lib/src/main/java/org/apache/syncope/common/lib/types/JobAction.java
@@@ -1,0 -1,0 +1,29 @@@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ *
++ *   http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied.  See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++package org.apache.syncope.common.lib.types;
++
++import javax.xml.bind.annotation.XmlEnum;
++
++@XmlEnum
++public enum JobAction {
++
++    START,
++    STOP;
++
++}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/common/lib/src/main/java/org/apache/syncope/common/lib/types/JobStatusType.java
----------------------------------------------------------------------
diff --cc common/lib/src/main/java/org/apache/syncope/common/lib/types/JobStatusType.java
index 0000000,0000000..3a43807
new file mode 100644
--- /dev/null
+++ b/common/lib/src/main/java/org/apache/syncope/common/lib/types/JobStatusType.java
@@@ -1,0 -1,0 +1,30 @@@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ *
++ *   http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied.  See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++package org.apache.syncope.common.lib.types;
++
++import javax.xml.bind.annotation.XmlEnum;
++
++@XmlEnum
++public enum JobStatusType {
++
++    ALL,
++    RUNNING,
++    SCHEDULED;
++
++}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/ReportService.java
----------------------------------------------------------------------
diff --cc common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/ReportService.java
index 0037d0a,0000000..fc37192
mode 100644,000000..100644
--- a/common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/ReportService.java
+++ b/common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/ReportService.java
@@@ -1,158 -1,0 +1,182 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.common.rest.api.service;
 +
 +import java.util.List;
 +import javax.validation.constraints.NotNull;
 +import javax.ws.rs.BeanParam;
 +import javax.ws.rs.Consumes;
 +import javax.ws.rs.DELETE;
 +import javax.ws.rs.GET;
++import javax.ws.rs.MatrixParam;
 +import javax.ws.rs.POST;
 +import javax.ws.rs.PUT;
 +import javax.ws.rs.Path;
 +import javax.ws.rs.PathParam;
 +import javax.ws.rs.Produces;
 +import javax.ws.rs.QueryParam;
 +import javax.ws.rs.core.MediaType;
 +import javax.ws.rs.core.Response;
 +import org.apache.cxf.jaxrs.model.wadl.Description;
 +import org.apache.cxf.jaxrs.model.wadl.Descriptions;
 +import org.apache.cxf.jaxrs.model.wadl.DocTarget;
 +import org.apache.syncope.common.lib.to.PagedResult;
 +import org.apache.syncope.common.lib.to.ReportExecTO;
 +import org.apache.syncope.common.lib.to.ReportTO;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
 +import org.apache.syncope.common.lib.types.ReportExecExportFormat;
 +import org.apache.syncope.common.lib.wrap.ReportletConfClass;
 +import org.apache.syncope.common.rest.api.beans.ListQuery;
 +
 +/**
 + * REST operations for reports.
 + */
 +@Path("reports")
 +public interface ReportService extends JAXRSService {
 +
 +    /**
 +     * Returns a list of available classes for reportlet configuration.
 +     *
 +     * @return list of available classes for reportlet configuration
 +     */
 +    @GET
 +    @Path("reportletConfClasses")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    List<ReportletConfClass> getReportletConfClasses();
 +
 +    /**
 +     * Returns report with matching key.
 +     *
 +     * @param reportKey key of report to be read
 +     * @return report with matching key
 +     */
 +    @GET
 +    @Path("{reportKey}")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    ReportTO read(@NotNull @PathParam("reportKey") Long reportKey);
 +
 +    /**
 +     * Returns report execution with matching key.
 +     *
 +     * @param executionKey report execution id to be selected
 +     * @return report execution with matching key
 +     */
 +    @GET
 +    @Path("executions/{executionKey}")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    ReportExecTO readExecution(@NotNull @PathParam("executionKey") Long executionKey);
 +
 +    /**
 +     * Returns a paged list of all existing reports matching the given query;
 +     *
 +     * @param listQuery query conditions
 +     * @return paged list of existing reports matching the given query
 +     */
 +    @GET
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    PagedResult<ReportTO> list(@BeanParam ListQuery listQuery);
 +
 +    /**
 +     * Creates a new report.
 +     *
 +     * @param reportTO report to be created
 +     * @return <tt>Response</tt> object featuring <tt>Location</tt> header of created report
 +     */
 +    @Descriptions({
 +        @Description(target = DocTarget.RESPONSE, value = "Featuring <tt>Location</tt> header of created report")
 +    })
 +    @POST
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    Response create(@NotNull ReportTO reportTO);
 +
 +    /**
 +     * Updates report with matching key.
 +     *
 +     * @param reportKey id for report to be updated
 +     * @param reportTO report to be stored
 +     */
 +    @PUT
 +    @Path("{reportKey}")
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    void update(@NotNull @PathParam("reportKey") Long reportKey, ReportTO reportTO);
 +
 +    /**
 +     * Deletes report with matching key.
 +     *
 +     * @param reportKey Deletes report with matching key
 +     */
 +    @DELETE
 +    @Path("{reportKey}")
 +    void delete(@NotNull @PathParam("reportKey") Long reportKey);
 +
 +    /**
 +     * Deletes report execution with matching key.
 +     *
 +     * @param executionKey key of execution report to be deleted
 +     */
 +    @DELETE
 +    @Path("executions/{executionKey}")
 +    void deleteExecution(@NotNull @PathParam("executionKey") Long executionKey);
 +
 +    /**
 +     * Executes the report with matching key.
 +     *
 +     * @param reportKey key of report to be executed
 +     * @return report execution result
 +     */
 +    @POST
 +    @Path("{reportKey}/execute")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    ReportExecTO execute(@NotNull @PathParam("reportKey") Long reportKey);
 +
 +    /**
 +     * Exports the report execution with matching key in the requested format.
 +     *
 +     * @param executionKey key of execution report to be selected
 +     * @param fmt file-format selection
 +     * @return a stream for content download
 +     */
 +    @GET
 +    @Path("executions/{executionKey}/stream")
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    Response exportExecutionResult(@NotNull @PathParam("executionKey") Long executionKey,
 +            @QueryParam("format") ReportExecExportFormat fmt);
++
++    /**
++     * List report jobs of the given type
++     *
++     * @param type of report job
++     * @return List of ReportExecTO
++     */
++    @GET
++    @Path("jobs")
++    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
++    List<ReportExecTO> list(@MatrixParam("type") JobStatusType type);
++
++    /**
++     * Execute a control action on an existing report
++     *
++     * @param action
++     * @param reportId id of report
++     */
++    @POST
++    @Path("{reportId}")
++    void process(@QueryParam("action") JobAction action, @PathParam("reportId") Long reportId);
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/TaskService.java
----------------------------------------------------------------------
diff --cc common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/TaskService.java
index 8240a23,0000000..3d6c3f5
mode 100644,000000..100644
--- a/common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/TaskService.java
+++ b/common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/TaskService.java
@@@ -1,170 -1,0 +1,194 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.common.rest.api.service;
 +
++import java.util.List;
 +import javax.validation.constraints.NotNull;
 +import javax.ws.rs.BeanParam;
 +import javax.ws.rs.Consumes;
 +import javax.ws.rs.DELETE;
 +import javax.ws.rs.DefaultValue;
 +import javax.ws.rs.GET;
 +import javax.ws.rs.MatrixParam;
 +import javax.ws.rs.POST;
 +import javax.ws.rs.PUT;
 +import javax.ws.rs.Path;
 +import javax.ws.rs.PathParam;
 +import javax.ws.rs.Produces;
 +import javax.ws.rs.QueryParam;
 +import javax.ws.rs.core.MediaType;
 +import javax.ws.rs.core.Response;
 +import org.apache.cxf.jaxrs.model.wadl.Description;
 +import org.apache.cxf.jaxrs.model.wadl.Descriptions;
 +import org.apache.cxf.jaxrs.model.wadl.DocTarget;
 +import org.apache.syncope.common.lib.to.AbstractTaskTO;
 +import org.apache.syncope.common.lib.to.BulkAction;
 +import org.apache.syncope.common.lib.to.BulkActionResult;
 +import org.apache.syncope.common.lib.to.PagedResult;
 +import org.apache.syncope.common.lib.to.ReportExecTO;
 +import org.apache.syncope.common.lib.to.SchedTaskTO;
 +import org.apache.syncope.common.lib.to.TaskExecTO;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
 +import org.apache.syncope.common.lib.types.TaskType;
 +import org.apache.syncope.common.rest.api.beans.ListQuery;
 +
 +/**
 + * REST operations for tasks.
 + */
 +@Path("tasks")
 +public interface TaskService extends JAXRSService {
 +
 +    /**
 +     * Returns the task matching the given key.
 +     *
 +     * @param taskKey key of task to be read
 +     * @param <T> type of taskTO
 +     * @return task with matching id
 +     */
 +    @GET
 +    @Path("{taskKey}")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    <T extends AbstractTaskTO> T read(@NotNull @PathParam("taskKey") Long taskKey);
 +
 +    /**
 +     * Returns the task execution with the given id.
 +     *
 +     * @param executionKey key of task execution to be read
 +     * @return task execution with matching Id
 +     */
 +    @GET
 +    @Path("executions/{executionKey}")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    TaskExecTO readExecution(@NotNull @PathParam("executionKey") Long executionKey);
 +
 +    /**
 +     * Returns a paged list of existing tasks matching type and the given query.
 +     *
 +     * @param taskType type of tasks to be listed
 +     * @param listQuery query conditions
 +     * @param <T> type of taskTO
 +     * @return paged list of existing tasks matching type and the given query
 +     */
 +    @GET
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    <T extends AbstractTaskTO> PagedResult<T> list(
 +            @NotNull @MatrixParam("type") TaskType taskType,
 +            @BeanParam ListQuery listQuery);
 +
 +    /**
 +     * Creates a new task.
 +     *
 +     * @param taskTO task to be created
 +     * @param <T> type of taskTO
 +     * @return <tt>Response</tt> object featuring <tt>Location</tt> header of created task
 +     */
 +    @Descriptions({
 +        @Description(target = DocTarget.RESPONSE, value = "Featuring <tt>Location</tt> header of created task")
 +    })
 +    @POST
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    <T extends SchedTaskTO> Response create(@NotNull T taskTO);
 +
 +    /**
 +     * Updates the task matching the provided key.
 +     *
 +     * @param taskKey key of task to be updated
 +     * @param taskTO updated task to be stored
 +     */
 +    @PUT
 +    @Path("{taskKey}")
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    void update(@NotNull @PathParam("taskKey") Long taskKey, @NotNull AbstractTaskTO taskTO);
 +
 +    /**
 +     * Deletes the task matching the provided key.
 +     *
 +     * @param taskKey key of task to be deleted
 +     */
 +    @DELETE
 +    @Path("{taskKey}")
 +    void delete(@NotNull @PathParam("taskKey") Long taskKey);
 +
 +    /**
 +     * Deletes the task execution matching the provided key.
 +     *
 +     * @param executionKey key of task execution to be deleted
 +     */
 +    @DELETE
 +    @Path("executions/{executionKey}")
 +    void deleteExecution(@NotNull @PathParam("executionKey") Long executionKey);
 +
 +    /**
 +     * Executes the task matching the given id.
 +     *
 +     * @param taskKey key of task to be executed
 +     * @param dryRun if true, task will only be simulated
 +     * @return execution report for the task matching the given id
 +     */
 +    @POST
 +    @Path("{taskKey}/execute")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    TaskExecTO execute(@NotNull @PathParam("taskKey") Long taskKey,
 +            @QueryParam("dryRun") @DefaultValue("false") boolean dryRun);
 +
 +    /**
 +     * Reports task execution result.
 +     *
 +     * @param executionKey key of task execution being reported
 +     * @param reportExec execution being reported
 +     */
 +    @POST
 +    @Path("executions/{executionKey}/report")
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    void report(@NotNull @PathParam("executionKey") Long executionKey, @NotNull ReportExecTO reportExec);
 +
 +    /**
 +     * Executes the provided bulk action.
 +     *
 +     * @param bulkAction list of task ids against which the bulk action will be performed.
 +     * @return Bulk action result
 +     */
 +    @POST
 +    @Path("bulk")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    BulkActionResult bulk(@NotNull BulkAction bulkAction);
++
++    /**
++     * List task jobs of the given type
++     *
++     * @param type of task job
++     * @return List of TaskExecTO
++     */
++    @GET
++    @Path("jobs")
++    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
++    List<TaskExecTO> list(@MatrixParam("type") JobStatusType type);
++
++    /**
++     * Execute a control action on an existing task
++     *
++     * @param action
++     * @param taskId id of task
++     */
++    @POST
++    @Path("{taskId}")
++    void process(@QueryParam("action") JobAction action, @PathParam("taskId") Long taskId);
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/logic/src/main/java/org/apache/syncope/core/logic/AbstractJobLogic.java
----------------------------------------------------------------------
diff --cc core/logic/src/main/java/org/apache/syncope/core/logic/AbstractJobLogic.java
index 0000000,0000000..9f20ee1
new file mode 100644
--- /dev/null
+++ b/core/logic/src/main/java/org/apache/syncope/core/logic/AbstractJobLogic.java
@@@ -1,0 -1,0 +1,155 @@@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ *
++ *   http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied.  See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++package org.apache.syncope.core.logic;
++
++import java.util.ArrayList;
++import java.util.List;
++import org.apache.syncope.common.lib.AbstractBaseBean;
++import org.apache.syncope.common.lib.to.AbstractExecTO;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
++import org.quartz.JobExecutionContext;
++import org.quartz.JobKey;
++import org.quartz.Scheduler;
++import org.quartz.SchedulerException;
++import org.quartz.Trigger;
++import org.quartz.impl.matchers.GroupMatcher;
++import org.springframework.beans.factory.annotation.Autowired;
++import org.springframework.scheduling.quartz.SchedulerFactoryBean;
++
++abstract class AbstractJobLogic<T extends AbstractBaseBean> extends AbstractTransactionalLogic<T> {
++
++    @Autowired
++    protected SchedulerFactoryBean scheduler;
++
++    protected abstract Long getKeyFromJobName(final JobKey jobKey);
++
++    public <E extends AbstractExecTO> List<E> list(final JobStatusType type, final Class<E> reference) {
++        List<E> jobExecTOs = new ArrayList<E>();
++
++        switch (type) {
++            case ALL:
++                try {
++                    for (String groupName : scheduler.getScheduler().getJobGroupNames()) {
++                        for (JobKey jobKey : scheduler.getScheduler().getJobKeys(GroupMatcher.
++                                jobGroupEquals(groupName))) {
++
++                            Long jobId = getKeyFromJobName(jobKey);
++                            if (jobId != null) {
++                                List<? extends Trigger> jobTriggers = scheduler.getScheduler().getTriggersOfJob(jobKey);
++                                if (jobTriggers.size() > 0) {
++                                    for (Trigger t : jobTriggers) {
++                                        E jobExecTO = reference.newInstance();
++                                        jobExecTO.setKey(jobId);
++                                        jobExecTO.
++                                                setStatus(scheduler.getScheduler().getTriggerState(t.getKey()).name());
++                                        jobExecTO.setStartDate(t.getStartTime());
++                                        jobExecTOs.add(jobExecTO);
++                                    }
++                                } else {
++                                    E jobExecTO = reference.newInstance();
++                                    jobExecTO.setKey(jobId);
++                                    jobExecTO.setStatus("Not Scheduled");
++                                    jobExecTOs.add(jobExecTO);
++                                }
++                            }
++                        }
++                    }
++                } catch (SchedulerException ex) {
++                    LOG.debug("Problems during retrieving all scheduled jobs {}", ex);
++                } catch (InstantiationException ex) {
++                    LOG.debug("Problems during instantiating {}  {}", reference, ex);
++                } catch (IllegalAccessException ex) {
++                    LOG.debug("Problems during accessing {}  {}", reference, ex);
++                }
++                break;
++            case RUNNING:
++                try {
++                    for (JobExecutionContext jec : scheduler.getScheduler().getCurrentlyExecutingJobs()) {
++                        Long jobId = getKeyFromJobName(jec.getJobDetail().getKey());
++                        if (jobId != null) {
++                            E jobExecTO = reference.newInstance();
++                            jobExecTO.setKey(jobId);
++                            jobExecTO.setStatus(scheduler.getScheduler().getTriggerState(jec.getTrigger().getKey()).
++                                    name());
++                            jobExecTO.setStartDate(jec.getFireTime());
++                            jobExecTOs.add(jobExecTO);
++                        }
++                    }
++                } catch (SchedulerException ex) {
++                    LOG.debug("Problems during retrieving all currently executing jobs {}", ex);
++                } catch (InstantiationException ex) {
++                    LOG.debug("Problems during instantiating {}  {}", reference, ex);
++                } catch (IllegalAccessException ex) {
++                    LOG.debug("Problems during accessing {}  {}", reference, ex);
++                }
++                break;
++            case SCHEDULED:
++                try {
++                    for (String groupName : scheduler.getScheduler().getJobGroupNames()) {
++                        for (JobKey jobKey : scheduler.getScheduler().getJobKeys(GroupMatcher.
++                                jobGroupEquals(groupName))) {
++                            Long jobId = getKeyFromJobName(jobKey);
++                            if (jobId != null) {
++                                List<? extends Trigger> jobTriggers = scheduler.getScheduler().getTriggersOfJob(jobKey);
++                                for (Trigger t : jobTriggers) {
++                                    E jobExecTO = reference.newInstance();
++                                    jobExecTO.setKey(jobId);
++                                    jobExecTO.setStatus(scheduler.getScheduler().getTriggerState(t.getKey()).name());
++                                    jobExecTO.setStartDate(t.getStartTime());
++                                    jobExecTOs.add(jobExecTO);
++                                }
++                            }
++                        }
++                    }
++                } catch (SchedulerException ex) {
++                    LOG.debug("Problems during retrieving all scheduled jobs {}", ex);
++                } catch (InstantiationException ex) {
++                    LOG.debug("Problems during instantiating {}  {}", reference, ex);
++                } catch (IllegalAccessException ex) {
++                    LOG.debug("Problems during accessing {}  {}", reference, ex);
++                }
++                break;
++            default:
++        }
++        return jobExecTOs;
++    }
++
++    protected void process(final JobAction action, final String jobName) {
++
++        if (jobName != null) {
++            JobKey jobKey = new JobKey(jobName, Scheduler.DEFAULT_GROUP);
++            try {
++                if (scheduler.getScheduler().checkExists(jobKey)) {
++                    switch (action) {
++                        case START:
++                            scheduler.getScheduler().triggerJob(jobKey);
++                            break;
++                        case STOP:
++                            scheduler.getScheduler().interrupt(jobKey);
++                            break;
++                        default:
++                    }
++                }
++            } catch (SchedulerException ex) {
++                LOG.debug("Problems during {} operation on job with id {}", action.toString(), ex);
++            }
++        }
++    }
++}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/logic/src/main/java/org/apache/syncope/core/logic/ReportLogic.java
----------------------------------------------------------------------
diff --cc core/logic/src/main/java/org/apache/syncope/core/logic/ReportLogic.java
index 2c09d37,0000000..33e2102
mode 100644,000000..100644
--- a/core/logic/src/main/java/org/apache/syncope/core/logic/ReportLogic.java
+++ b/core/logic/src/main/java/org/apache/syncope/core/logic/ReportLogic.java
@@@ -1,404 -1,0 +1,424 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.logic;
 +
 +import java.io.ByteArrayInputStream;
 +import java.io.OutputStream;
 +import java.lang.reflect.Method;
 +import java.util.ArrayList;
 +import java.util.Date;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Set;
 +import java.util.zip.ZipInputStream;
 +import org.apache.cocoon.optional.pipeline.components.sax.fop.FopSerializer;
 +import org.apache.cocoon.pipeline.NonCachingPipeline;
 +import org.apache.cocoon.pipeline.Pipeline;
 +import org.apache.cocoon.sax.SAXPipelineComponent;
 +import org.apache.cocoon.sax.component.XMLGenerator;
 +import org.apache.cocoon.sax.component.XMLSerializer;
 +import org.apache.cocoon.sax.component.XSLTTransformer;
 +import org.apache.commons.collections4.CollectionUtils;
 +import org.apache.commons.collections4.PredicateUtils;
 +import org.apache.commons.collections4.Transformer;
 +import org.apache.commons.io.IOUtils;
 +import org.apache.commons.lang3.ArrayUtils;
 +import org.apache.syncope.common.lib.SyncopeClientException;
 +import org.apache.syncope.common.lib.report.ReportletConf;
 +import org.apache.syncope.common.lib.to.ReportExecTO;
 +import org.apache.syncope.common.lib.to.ReportTO;
 +import org.apache.syncope.common.lib.types.ClientExceptionType;
 +import org.apache.syncope.common.lib.types.ReportExecExportFormat;
 +import org.apache.syncope.common.lib.types.ReportExecStatus;
 +import org.apache.syncope.core.persistence.api.dao.NotFoundException;
 +import org.apache.syncope.core.persistence.api.dao.ReportDAO;
 +import org.apache.syncope.core.persistence.api.dao.ReportExecDAO;
 +import org.apache.syncope.core.persistence.api.dao.search.OrderByClause;
 +import org.apache.syncope.core.persistence.api.entity.EntityFactory;
 +import org.apache.syncope.core.persistence.api.entity.Report;
 +import org.apache.syncope.core.persistence.api.entity.ReportExec;
 +import org.apache.syncope.core.provisioning.api.data.ReportDataBinder;
 +import org.apache.syncope.core.provisioning.api.job.JobNamer;
 +import org.apache.syncope.core.logic.init.ImplementationClassNamesLoader;
 +import org.apache.syncope.core.provisioning.api.job.JobInstanceLoader;
 +import org.apache.syncope.core.logic.report.Reportlet;
 +import org.apache.syncope.core.logic.report.ReportletConfClass;
 +import org.apache.syncope.core.logic.report.TextSerializer;
 +import org.apache.syncope.common.lib.CollectionUtils2;
++import org.apache.syncope.common.lib.to.AbstractExecTO;
 +import org.apache.syncope.common.lib.types.Entitlement;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
 +import org.apache.xmlgraphics.util.MimeConstants;
 +import org.quartz.JobKey;
 +import org.quartz.Scheduler;
 +import org.springframework.beans.factory.annotation.Autowired;
- import org.springframework.scheduling.quartz.SchedulerFactoryBean;
 +import org.springframework.security.access.prepost.PreAuthorize;
 +import org.springframework.stereotype.Component;
 +import org.springframework.transaction.annotation.Transactional;
 +import org.springframework.util.ClassUtils;
 +
 +@Component
- public class ReportLogic extends AbstractTransactionalLogic<ReportTO> {
++public class ReportLogic extends AbstractJobLogic<ReportTO> {
 +
 +    @Autowired
 +    private ReportDAO reportDAO;
 +
 +    @Autowired
 +    private ReportExecDAO reportExecDAO;
 +
 +    @Autowired
 +    private JobInstanceLoader jobInstanceLoader;
 +
 +    @Autowired
-     private SchedulerFactoryBean scheduler;
- 
-     @Autowired
 +    private ReportDataBinder binder;
 +
 +    @Autowired
 +    private EntityFactory entityFactory;
 +
 +    @Autowired
 +    private ImplementationClassNamesLoader classNamesLoader;
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_CREATE + "')")
 +    public ReportTO create(final ReportTO reportTO) {
 +        Report report = entityFactory.newEntity(Report.class);
 +        binder.getReport(report, reportTO);
 +        report = reportDAO.save(report);
 +
 +        try {
 +            jobInstanceLoader.registerJob(report);
 +        } catch (Exception e) {
 +            LOG.error("While registering quartz job for report " + report.getKey(), e);
 +
 +            SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
 +            sce.getElements().add(e.getMessage());
 +            throw sce;
 +        }
 +
 +        return binder.getReportTO(report);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_UPDATE + "')")
 +    public ReportTO update(final ReportTO reportTO) {
 +        Report report = reportDAO.find(reportTO.getKey());
 +        if (report == null) {
 +            throw new NotFoundException("Report " + reportTO.getKey());
 +        }
 +
 +        binder.getReport(report, reportTO);
 +        report = reportDAO.save(report);
 +
 +        try {
 +            jobInstanceLoader.registerJob(report);
 +        } catch (Exception e) {
 +            LOG.error("While registering quartz job for report " + report.getKey(), e);
 +
 +            SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
 +            sce.getElements().add(e.getMessage());
 +            throw sce;
 +        }
 +
 +        return binder.getReportTO(report);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_LIST + "')")
 +    public int count() {
 +        return reportDAO.count();
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_LIST + "')")
 +    public List<ReportTO> list(final int page, final int size, final List<OrderByClause> orderByClauses) {
 +        return CollectionUtils.collect(reportDAO.findAll(page, size, orderByClauses),
 +                new Transformer<Report, ReportTO>() {
 +
 +                    @Override
 +                    public ReportTO transform(final Report input) {
 +                        return binder.getReportTO(input);
 +                    }
 +                }, new ArrayList<ReportTO>());
 +    }
 +
 +    private Class<? extends ReportletConf> getReportletConfClass(final Class<Reportlet> reportletClass) {
 +        Class<? extends ReportletConf> result = null;
 +
 +        ReportletConfClass annotation = reportletClass.getAnnotation(ReportletConfClass.class);
 +        if (annotation != null) {
 +            result = annotation.value();
 +        }
 +
 +        return result;
 +    }
 +
 +    @SuppressWarnings({ "rawtypes" })
 +    private Set<Class<Reportlet>> getAllReportletClasses() {
 +        return CollectionUtils2.collect(classNamesLoader.getClassNames(ImplementationClassNamesLoader.Type.REPORTLET),
 +                new Transformer<String, Class<Reportlet>>() {
 +
 +                    @SuppressWarnings("unchecked")
 +                    @Override
 +                    public Class<Reportlet> transform(final String className) {
 +                        Class<Reportlet> result = null;
 +                        try {
 +                            Class reportletClass = ClassUtils.forName(className, ClassUtils.getDefaultClassLoader());
 +                            result = reportletClass;
 +                        } catch (ClassNotFoundException e) {
 +                            LOG.warn("Could not load class {}", className);
 +                        } catch (LinkageError e) {
 +                            LOG.warn("Could not link class {}", className);
 +                        }
 +
 +                        return result;
 +                    }
 +                },
 +                PredicateUtils.notNullPredicate(), new HashSet<Class<Reportlet>>());
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_LIST + "')")
 +    public Set<String> getReportletConfClasses() {
 +        return CollectionUtils2.collect(getAllReportletClasses(),
 +                new Transformer<Class<Reportlet>, String>() {
 +
 +                    @Override
 +                    public String transform(final Class<Reportlet> reportletClass) {
 +                        Class<? extends ReportletConf> reportletConfClass = getReportletConfClass(reportletClass);
 +                        return reportletConfClass == null ? null : reportletConfClass.getName();
 +                    }
 +                }, PredicateUtils.notNullPredicate(), new HashSet<String>());
 +    }
 +
 +    public Class<Reportlet> findReportletClassHavingConfClass(final Class<? extends ReportletConf> reportletConfClass) {
 +        Class<Reportlet> result = null;
 +        for (Class<Reportlet> reportletClass : getAllReportletClasses()) {
 +            Class<? extends ReportletConf> found = getReportletConfClass(reportletClass);
 +            if (found != null && found.equals(reportletConfClass)) {
 +                result = reportletClass;
 +            }
 +        }
 +
 +        return result;
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_READ + "')")
 +    public ReportTO read(final Long reportKey) {
 +        Report report = reportDAO.find(reportKey);
 +        if (report == null) {
 +            throw new NotFoundException("Report " + reportKey);
 +        }
 +        return binder.getReportTO(report);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_READ + "')")
 +    @Transactional(readOnly = true)
 +    public ReportExecTO readExecution(final Long executionKey) {
 +        ReportExec reportExec = reportExecDAO.find(executionKey);
 +        if (reportExec == null) {
 +            throw new NotFoundException("Report execution " + executionKey);
 +        }
 +        return binder.getReportExecTO(reportExec);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_READ + "')")
 +    public void exportExecutionResult(final OutputStream os, final ReportExec reportExec,
 +            final ReportExecExportFormat format) {
 +
 +        // streaming SAX handler from a compressed byte array stream
 +        ByteArrayInputStream bais = new ByteArrayInputStream(reportExec.getExecResult());
 +        ZipInputStream zis = new ZipInputStream(bais);
 +        try {
 +            // a single ZipEntry in the ZipInputStream (see ReportJob)
 +            zis.getNextEntry();
 +
 +            Pipeline<SAXPipelineComponent> pipeline = new NonCachingPipeline<>();
 +            pipeline.addComponent(new XMLGenerator(zis));
 +
 +            Map<String, Object> parameters = new HashMap<>();
 +            parameters.put("status", reportExec.getStatus());
 +            parameters.put("message", reportExec.getMessage());
 +            parameters.put("startDate", reportExec.getStartDate());
 +            parameters.put("endDate", reportExec.getEndDate());
 +
 +            switch (format) {
 +                case HTML:
 +                    XSLTTransformer xsl2html = new XSLTTransformer(getClass().getResource("/report/report2html.xsl"));
 +                    xsl2html.setParameters(parameters);
 +                    pipeline.addComponent(xsl2html);
 +                    pipeline.addComponent(XMLSerializer.createXHTMLSerializer());
 +                    break;
 +
 +                case PDF:
 +                    XSLTTransformer xsl2pdf = new XSLTTransformer(getClass().getResource("/report/report2fo.xsl"));
 +                    xsl2pdf.setParameters(parameters);
 +                    pipeline.addComponent(xsl2pdf);
 +                    pipeline.addComponent(new FopSerializer(MimeConstants.MIME_PDF));
 +                    break;
 +
 +                case RTF:
 +                    XSLTTransformer xsl2rtf = new XSLTTransformer(getClass().getResource("/report/report2fo.xsl"));
 +                    xsl2rtf.setParameters(parameters);
 +                    pipeline.addComponent(xsl2rtf);
 +                    pipeline.addComponent(new FopSerializer(MimeConstants.MIME_RTF));
 +                    break;
 +
 +                case CSV:
 +                    XSLTTransformer xsl2csv = new XSLTTransformer(getClass().getResource("/report/report2csv.xsl"));
 +                    xsl2csv.setParameters(parameters);
 +                    pipeline.addComponent(xsl2csv);
 +                    pipeline.addComponent(new TextSerializer());
 +                    break;
 +
 +                case XML:
 +                default:
 +                    pipeline.addComponent(XMLSerializer.createXMLSerializer());
 +            }
 +
 +            pipeline.setup(os);
 +            pipeline.execute();
 +
 +            LOG.debug("Result of {} successfully exported as {}", reportExec, format);
 +        } catch (Exception e) {
 +            LOG.error("While exporting content", e);
 +        } finally {
 +            IOUtils.closeQuietly(zis);
 +            IOUtils.closeQuietly(bais);
 +        }
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_READ + "')")
 +    public ReportExec getAndCheckReportExec(final Long executionKey) {
 +        ReportExec reportExec = reportExecDAO.find(executionKey);
 +        if (reportExec == null) {
 +            throw new NotFoundException("Report execution " + executionKey);
 +        }
 +        if (!ReportExecStatus.SUCCESS.name().equals(reportExec.getStatus()) || reportExec.getExecResult() == null) {
 +            SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.InvalidReportExec);
 +            sce.getElements().add(reportExec.getExecResult() == null
 +                    ? "No report data produced"
 +                    : "Report did not run successfully");
 +            throw sce;
 +        }
 +        return reportExec;
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_EXECUTE + "')")
 +    public ReportExecTO execute(final Long reportKey) {
 +        Report report = reportDAO.find(reportKey);
 +        if (report == null) {
 +            throw new NotFoundException("Report " + reportKey);
 +        }
 +
 +        try {
 +            jobInstanceLoader.registerJob(report);
 +
 +            scheduler.getScheduler().triggerJob(
 +                    new JobKey(JobNamer.getJobName(report), Scheduler.DEFAULT_GROUP));
 +        } catch (Exception e) {
 +            LOG.error("While executing report {}", report, e);
 +
 +            SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
 +            sce.getElements().add(e.getMessage());
 +            throw sce;
 +        }
 +
 +        ReportExecTO result = new ReportExecTO();
 +        result.setReport(reportKey);
 +        result.setStartDate(new Date());
 +        result.setStatus(ReportExecStatus.STARTED.name());
 +        result.setMessage("Job fired; waiting for results...");
 +
 +        return result;
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_DELETE + "')")
 +    public ReportTO delete(final Long reportKey) {
 +        Report report = reportDAO.find(reportKey);
 +        if (report == null) {
 +            throw new NotFoundException("Report " + reportKey);
 +        }
 +
 +        ReportTO deletedReport = binder.getReportTO(report);
 +        jobInstanceLoader.unregisterJob(report);
 +        reportDAO.delete(report);
 +        return deletedReport;
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_DELETE + "')")
 +    public ReportExecTO deleteExecution(final Long executionKey) {
 +        ReportExec reportExec = reportExecDAO.find(executionKey);
 +        if (reportExec == null) {
 +            throw new NotFoundException("Report execution " + executionKey);
 +        }
 +
 +        ReportExecTO reportExecToDelete = binder.getReportExecTO(reportExec);
 +        reportExecDAO.delete(reportExec);
 +        return reportExecToDelete;
 +    }
 +
 +    @Override
 +    protected ReportTO resolveReference(final Method method, final Object... args)
 +            throws UnresolvedReferenceException {
 +
 +        Long key = null;
 +
 +        if (ArrayUtils.isNotEmpty(args) && ("create".equals(method.getName())
 +                || "update".equals(method.getName())
 +                || "delete".equals(method.getName()))) {
 +            for (int i = 0; key == null && i < args.length; i++) {
 +                if (args[i] instanceof Long) {
 +                    key = (Long) args[i];
 +                } else if (args[i] instanceof ReportTO) {
 +                    key = ((ReportTO) args[i]).getKey();
 +                }
 +            }
 +        }
 +
 +        if ((key != null) && !key.equals(0L)) {
 +            try {
 +                return binder.getReportTO(reportDAO.find(key));
 +            } catch (Throwable ignore) {
 +                LOG.debug("Unresolved reference", ignore);
 +                throw new UnresolvedReferenceException(ignore);
 +            }
 +        }
 +
 +        throw new UnresolvedReferenceException();
 +    }
++
++    @Override
++    @PreAuthorize("hasRole('" + Entitlement.REPORT_LIST + "')")
++    public <E extends AbstractExecTO> List<E> list(final JobStatusType type, final Class<E> reference) {
++        return super.list(type, reference);
++    }
++
++    @PreAuthorize("hasRole('" + Entitlement.REPORT_EXECUTE + "')")
++    public void process(final JobAction action, final Long reportKey) {
++        Report report = reportDAO.find(reportKey);
++        if (report == null) {
++            throw new NotFoundException("Report " + reportKey);
++        }
++        String jobName = JobNamer.getJobName(report);
++        process(action, jobName);
++    }
++
++    @Override
++    protected Long getKeyFromJobName(final JobKey jobKey) {
++        return JobNamer.getReportKeyFromJobName(jobKey.getName());
++    }
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/logic/src/main/java/org/apache/syncope/core/logic/TaskLogic.java
----------------------------------------------------------------------
diff --cc core/logic/src/main/java/org/apache/syncope/core/logic/TaskLogic.java
index 7a64aff,0000000..e12c2da
mode 100644,000000..100644
--- a/core/logic/src/main/java/org/apache/syncope/core/logic/TaskLogic.java
+++ b/core/logic/src/main/java/org/apache/syncope/core/logic/TaskLogic.java
@@@ -1,339 -1,0 +1,363 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.logic;
 +
 +import java.lang.reflect.Method;
 +import java.util.ArrayList;
 +import java.util.Date;
 +import java.util.List;
 +import org.apache.commons.collections4.CollectionUtils;
 +import org.apache.commons.collections4.Transformer;
 +import org.apache.commons.lang3.ArrayUtils;
 +import org.apache.syncope.common.lib.SyncopeClientException;
++import org.apache.syncope.common.lib.to.AbstractExecTO;
 +import org.apache.syncope.common.lib.to.AbstractTaskTO;
 +import org.apache.syncope.common.lib.to.SchedTaskTO;
 +import org.apache.syncope.common.lib.to.SyncTaskTO;
 +import org.apache.syncope.common.lib.to.TaskExecTO;
 +import org.apache.syncope.common.lib.types.ClientExceptionType;
 +import org.apache.syncope.common.lib.types.Entitlement;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
 +import org.apache.syncope.common.lib.types.PropagationMode;
 +import org.apache.syncope.common.lib.types.PropagationTaskExecStatus;
 +import org.apache.syncope.common.lib.types.TaskType;
 +import org.apache.syncope.core.persistence.api.dao.NotFoundException;
 +import org.apache.syncope.core.persistence.api.dao.TaskDAO;
 +import org.apache.syncope.core.persistence.api.dao.TaskExecDAO;
 +import org.apache.syncope.core.persistence.api.dao.search.OrderByClause;
 +import org.apache.syncope.core.persistence.api.entity.task.NotificationTask;
 +import org.apache.syncope.core.persistence.api.entity.task.PropagationTask;
 +import org.apache.syncope.core.persistence.api.entity.task.SchedTask;
 +import org.apache.syncope.core.persistence.api.entity.task.Task;
 +import org.apache.syncope.core.persistence.api.entity.task.TaskExec;
 +import org.apache.syncope.core.persistence.api.entity.task.TaskUtils;
 +import org.apache.syncope.core.persistence.api.entity.task.TaskUtilsFactory;
 +import org.apache.syncope.core.provisioning.api.data.TaskDataBinder;
 +import org.apache.syncope.core.provisioning.api.job.JobNamer;
 +import org.apache.syncope.core.provisioning.api.job.TaskJob;
 +import org.apache.syncope.core.provisioning.api.propagation.PropagationTaskExecutor;
 +import org.apache.syncope.core.provisioning.api.job.JobInstanceLoader;
 +import org.apache.syncope.core.logic.notification.NotificationJob;
 +import org.quartz.JobDataMap;
 +import org.quartz.JobKey;
 +import org.quartz.Scheduler;
 +import org.springframework.beans.factory.annotation.Autowired;
 +import org.springframework.scheduling.quartz.SchedulerFactoryBean;
 +import org.springframework.security.access.prepost.PreAuthorize;
 +import org.springframework.stereotype.Component;
 +
 +@Component
- public class TaskLogic extends AbstractTransactionalLogic<AbstractTaskTO> {
++public class TaskLogic extends AbstractJobLogic<AbstractTaskTO> {
 +
 +    @Autowired
 +    private TaskDAO taskDAO;
 +
 +    @Autowired
 +    private TaskExecDAO taskExecDAO;
 +
 +    @Autowired
 +    private TaskDataBinder binder;
 +
 +    @Autowired
 +    private PropagationTaskExecutor taskExecutor;
 +
 +    @Autowired
 +    private NotificationJob notificationJob;
 +
 +    @Autowired
 +    private JobInstanceLoader jobInstanceLoader;
 +
 +    @Autowired
 +    private SchedulerFactoryBean scheduler;
 +
 +    @Autowired
 +    private TaskUtilsFactory taskUtilsFactory;
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_CREATE + "')")
 +    public <T extends SchedTaskTO> T createSchedTask(final T taskTO) {
 +        TaskUtils taskUtils = taskUtilsFactory.getInstance(taskTO);
 +
 +        SchedTask task = binder.createSchedTask(taskTO, taskUtils);
 +        task = taskDAO.save(task);
 +
 +        try {
 +            jobInstanceLoader.registerJob(task, task.getJobClassName(), task.getCronExpression());
 +        } catch (Exception e) {
 +            LOG.error("While registering quartz job for task " + task.getKey(), e);
 +
 +            SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
 +            sce.getElements().add(e.getMessage());
 +            throw sce;
 +        }
 +
 +        return binder.getTaskTO(task, taskUtils);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_UPDATE + "')")
 +    public SyncTaskTO updateSync(final SyncTaskTO taskTO) {
 +        return updateSched(taskTO);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_UPDATE + "')")
 +    public <T extends SchedTaskTO> T updateSched(final SchedTaskTO taskTO) {
 +        SchedTask task = taskDAO.find(taskTO.getKey());
 +        if (task == null) {
 +            throw new NotFoundException("Task " + taskTO.getKey());
 +        }
 +
 +        TaskUtils taskUtils = taskUtilsFactory.getInstance(task);
 +
 +        binder.updateSchedTask(task, taskTO, taskUtils);
 +        task = taskDAO.save(task);
 +
 +        try {
 +            jobInstanceLoader.registerJob(task, task.getJobClassName(), task.getCronExpression());
 +        } catch (Exception e) {
 +            LOG.error("While registering quartz job for task " + task.getKey(), e);
 +
 +            SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
 +            sce.getElements().add(e.getMessage());
 +            throw sce;
 +        }
 +
 +        return binder.getTaskTO(task, taskUtils);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_LIST + "')")
 +    public int count(final TaskType taskType) {
 +        return taskDAO.count(taskType);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_LIST + "')")
 +    @SuppressWarnings("unchecked")
 +    public <T extends AbstractTaskTO> List<T> list(final TaskType taskType,
 +            final int page, final int size, final List<OrderByClause> orderByClauses) {
 +
 +        final TaskUtils taskUtilss = taskUtilsFactory.getInstance(taskType);
 +
 +        return CollectionUtils.collect(taskDAO.findAll(page, size, orderByClauses, taskType),
 +                new Transformer<Task, T>() {
 +
 +                    @Override
 +                    public T transform(final Task task) {
 +                        return (T) binder.getTaskTO(task, taskUtilss);
 +                    }
 +                }, new ArrayList<T>());
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_READ + "')")
 +    public <T extends AbstractTaskTO> T read(final Long taskId) {
 +        Task task = taskDAO.find(taskId);
 +        if (task == null) {
 +            throw new NotFoundException("Task " + taskId);
 +        }
 +        return binder.getTaskTO(task, taskUtilsFactory.getInstance(task));
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_READ + "')")
 +    public TaskExecTO readExecution(final Long executionId) {
 +        TaskExec taskExec = taskExecDAO.find(executionId);
 +        if (taskExec == null) {
 +            throw new NotFoundException("Task execution " + executionId);
 +        }
 +        return binder.getTaskExecTO(taskExec);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_EXECUTE + "')")
 +    public TaskExecTO execute(final Long taskId, final boolean dryRun) {
 +        Task task = taskDAO.find(taskId);
 +        if (task == null) {
 +            throw new NotFoundException("Task " + taskId);
 +        }
 +        TaskUtils taskUtils = taskUtilsFactory.getInstance(task);
 +
 +        TaskExecTO result = null;
 +        switch (taskUtils.getType()) {
 +            case PROPAGATION:
 +                final TaskExec propExec = taskExecutor.execute((PropagationTask) task);
 +                result = binder.getTaskExecTO(propExec);
 +                break;
 +
 +            case NOTIFICATION:
 +                final TaskExec notExec = notificationJob.executeSingle((NotificationTask) task);
 +                result = binder.getTaskExecTO(notExec);
 +                break;
 +
 +            case SCHEDULED:
 +            case SYNCHRONIZATION:
 +            case PUSH:
 +                try {
 +                    jobInstanceLoader.registerJob(task,
 +                            ((SchedTask) task).getJobClassName(),
 +                            ((SchedTask) task).getCronExpression());
 +
 +                    JobDataMap map = new JobDataMap();
 +                    map.put(TaskJob.DRY_RUN_JOBDETAIL_KEY, dryRun);
 +
 +                    scheduler.getScheduler().triggerJob(
 +                            new JobKey(JobNamer.getJobName(task), Scheduler.DEFAULT_GROUP), map);
 +                } catch (Exception e) {
 +                    LOG.error("While executing task {}", task, e);
 +
 +                    SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
 +                    sce.getElements().add(e.getMessage());
 +                    throw sce;
 +                }
 +
 +                result = new TaskExecTO();
 +                result.setTask(taskId);
 +                result.setStartDate(new Date());
 +                result.setStatus("JOB_FIRED");
 +                result.setMessage("Job fired; waiting for results...");
 +                break;
 +
 +            default:
 +        }
 +
 +        return result;
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_READ + "')")
 +    public TaskExecTO report(final Long executionId, final PropagationTaskExecStatus status, final String message) {
 +        TaskExec exec = taskExecDAO.find(executionId);
 +        if (exec == null) {
 +            throw new NotFoundException("Task execution " + executionId);
 +        }
 +
 +        SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.InvalidPropagationTaskExecReport);
 +
 +        TaskUtils taskUtils = taskUtilsFactory.getInstance(exec.getTask());
 +        if (TaskType.PROPAGATION == taskUtils.getType()) {
 +            PropagationTask task = (PropagationTask) exec.getTask();
 +            if (task.getPropagationMode() != PropagationMode.TWO_PHASES) {
 +                sce.getElements().add("Propagation mode: " + task.getPropagationMode());
 +            }
 +        } else {
 +            sce.getElements().add("Task type: " + taskUtils);
 +        }
 +
 +        switch (status) {
 +            case SUCCESS:
 +            case FAILURE:
 +                break;
 +
 +            case CREATED:
 +            case SUBMITTED:
 +            case UNSUBMITTED:
 +                sce.getElements().add("Execution status to be set: " + status);
 +                break;
 +
 +            default:
 +        }
 +
 +        if (!sce.isEmpty()) {
 +            throw sce;
 +        }
 +
 +        exec.setStatus(status.toString());
 +        exec.setMessage(message);
 +        return binder.getTaskExecTO(taskExecDAO.save(exec));
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_DELETE + "')")
 +    public <T extends AbstractTaskTO> T delete(final Long taskId) {
 +        Task task = taskDAO.find(taskId);
 +        if (task == null) {
 +            throw new NotFoundException("Task " + taskId);
 +        }
 +        TaskUtils taskUtils = taskUtilsFactory.getInstance(task);
 +
 +        T taskToDelete = binder.getTaskTO(task, taskUtils);
 +
 +        if (TaskType.SCHEDULED == taskUtils.getType()
 +                || TaskType.SYNCHRONIZATION == taskUtils.getType()
 +                || TaskType.PUSH == taskUtils.getType()) {
 +
 +            jobInstanceLoader.unregisterJob(task);
 +        }
 +
 +        taskDAO.delete(task);
 +        return taskToDelete;
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_DELETE + "')")
 +    public TaskExecTO deleteExecution(final Long executionId) {
 +        TaskExec taskExec = taskExecDAO.find(executionId);
 +        if (taskExec == null) {
 +            throw new NotFoundException("Task execution " + executionId);
 +        }
 +
 +        TaskExecTO taskExecutionToDelete = binder.getTaskExecTO(taskExec);
 +        taskExecDAO.delete(taskExec);
 +        return taskExecutionToDelete;
 +    }
 +
 +    @Override
 +    protected AbstractTaskTO resolveReference(final Method method, final Object... args)
 +            throws UnresolvedReferenceException {
 +
 +        Long key = null;
 +
 +        if (ArrayUtils.isNotEmpty(args)
 +                && !"deleteExecution".equals(method.getName()) && !"readExecution".equals(method.getName())) {
 +
 +            for (int i = 0; key == null && i < args.length; i++) {
 +                if (args[i] instanceof Long) {
 +                    key = (Long) args[i];
 +                } else if (args[i] instanceof AbstractTaskTO) {
 +                    key = ((AbstractTaskTO) args[i]).getKey();
 +                }
 +            }
 +        }
 +
 +        if ((key != null) && !key.equals(0L)) {
 +            try {
 +                final Task task = taskDAO.find(key);
 +                return binder.getTaskTO(task, taskUtilsFactory.getInstance(task));
 +            } catch (Throwable ignore) {
 +                LOG.debug("Unresolved reference", ignore);
 +                throw new UnresolvedReferenceException(ignore);
 +            }
 +        }
 +
 +        throw new UnresolvedReferenceException();
 +    }
++
++    @Override
++    @PreAuthorize("hasRole('" + Entitlement.TASK_LIST + "')")
++    public <E extends AbstractExecTO> List<E> list(final JobStatusType type, final Class<E> reference) {
++        return super.list(type, reference);
++    }
++
++    @PreAuthorize("hasRole('" + Entitlement.TASK_EXECUTE + "')")
++    public void process(final JobAction action, final Long taskId) {
++        Task task = taskDAO.find(taskId);
++        if (task == null) {
++            throw new NotFoundException("Task " + taskId);
++        }
++        String jobName = JobNamer.getJobName(task);
++        process(action, jobName);
++    }
++
++    @Override
++    protected Long getKeyFromJobName(final JobKey jobKey) {
++        return JobNamer.getTaskKeyFromJobName(jobKey.getName());
++    }
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/JobNamer.java
----------------------------------------------------------------------
diff --cc core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/JobNamer.java
index fe64a7b,0000000..9ddf563
mode 100644,000000..100644
--- a/core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/JobNamer.java
+++ b/core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/JobNamer.java
@@@ -1,72 -1,0 +1,72 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.provisioning.api.job;
 +
 +import java.util.regex.Matcher;
 +import java.util.regex.Pattern;
 +import org.apache.syncope.core.persistence.api.entity.Report;
 +import org.apache.syncope.core.persistence.api.entity.task.Task;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +public final class JobNamer {
 +
 +    private static final Logger LOG = LoggerFactory.getLogger(JobNamer.class);
 +
 +    private static Long getIdFromJobName(final String name, final String pattern, final int prefixLength) {
 +        Long result = null;
 +
 +        Matcher jobMatcher = Pattern.compile(pattern).matcher(name);
 +        if (jobMatcher.matches()) {
 +            try {
 +                result = Long.valueOf(name.substring(prefixLength));
 +            } catch (NumberFormatException e) {
 +                LOG.error("Unparsable id: {}", name.substring(prefixLength), e);
 +            }
 +        }
 +
 +        return result;
 +    }
 +
-     public static Long getTaskIdFromJobName(final String name) {
++    public static Long getTaskKeyFromJobName(final String name) {
 +        return getIdFromJobName(name, "taskJob[0-9]+", 7);
 +    }
 +
-     public static Long getReportIdFromJobName(final String name) {
++    public static Long getReportKeyFromJobName(final String name) {
 +        return getIdFromJobName(name, "reportJob[0-9]+", 9);
 +    }
 +
 +    public static String getJobName(final Task task) {
 +        return task == null
 +                ? "taskNotificationJob"
 +                : "taskJob" + task.getKey();
 +    }
 +
 +    public static String getJobName(final Report report) {
 +        return "reportJob" + report.getKey();
 +    }
 +
 +    public static String getTriggerName(final String jobName) {
 +        return "Trigger_" + jobName;
 +    }
 +
 +    private JobNamer() {
 +        // private constructor for static utility class
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/TaskJob.java
----------------------------------------------------------------------
diff --cc core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/TaskJob.java
index b6efd53,0000000..3df89bc
mode 100644,000000..100644
--- a/core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/TaskJob.java
+++ b/core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/TaskJob.java
@@@ -1,43 -1,0 +1,43 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.provisioning.api.job;
 +
 +import org.quartz.DisallowConcurrentExecution;
- import org.quartz.Job;
++import org.quartz.InterruptableJob;
 +
 +/**
 + * Interface for Quartz jobs bound to a given Task.
 + */
 +@DisallowConcurrentExecution
- public interface TaskJob extends Job {
++public interface TaskJob extends InterruptableJob {
 +
 +    String DRY_RUN_JOBDETAIL_KEY = "dryRun";
 +
 +    /**
 +     * Task execution status.
 +     */
 +    public enum Status {
 +
 +        SUCCESS,
 +        FAILURE
 +
 +    }
 +
 +    void setTaskId(Long taskId);
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/AbstractTaskJob.java
----------------------------------------------------------------------
diff --cc core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/AbstractTaskJob.java
index 6659d7d,0000000..688ca99
mode 100644,000000..100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/AbstractTaskJob.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/AbstractTaskJob.java
@@@ -1,181 -1,0 +1,204 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.provisioning.java.job;
 +
++import java.text.SimpleDateFormat;
 +import java.util.Date;
++import java.util.Locale;
++import java.util.concurrent.atomic.AtomicReference;
++import org.apache.syncope.common.lib.SyncopeConstants;
 +import org.apache.syncope.common.lib.types.AuditElements;
 +import org.apache.syncope.common.lib.types.AuditElements.Result;
 +import org.apache.syncope.core.persistence.api.dao.TaskDAO;
 +import org.apache.syncope.core.persistence.api.dao.TaskExecDAO;
 +import org.apache.syncope.core.persistence.api.entity.EntityFactory;
 +import org.apache.syncope.core.persistence.api.entity.task.Task;
 +import org.apache.syncope.core.persistence.api.entity.task.TaskExec;
 +import org.apache.syncope.core.provisioning.api.job.TaskJob;
 +import org.apache.syncope.core.misc.AuditManager;
 +import org.apache.syncope.core.misc.ExceptionUtils2;
 +import org.apache.syncope.core.provisioning.api.notification.NotificationManager;
 +import org.quartz.DisallowConcurrentExecution;
 +import org.quartz.JobExecutionContext;
 +import org.quartz.JobExecutionException;
++import org.quartz.UnableToInterruptJobException;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +import org.springframework.beans.factory.annotation.Autowired;
 +
 +/**
 + * Abstract job implementation that delegates to concrete implementation the actual job execution and provides some
 + * base features.
 + * <strong>Extending this class will not provide support transaction management.</strong><br/>
 + * Extend <tt>AbstractTransactionalTaskJob</tt> for this purpose.
 + *
 + * @see AbstractTransactionalTaskJob
 + */
 +@DisallowConcurrentExecution
 +public abstract class AbstractTaskJob implements TaskJob {
 +
 +    /**
 +     * Task execution status.
 +     */
 +    public enum Status {
 +
 +        SUCCESS,
 +        FAILURE
 +
 +    }
 +
 +    /**
 +     * Logger.
 +     */
 +    protected static final Logger LOG = LoggerFactory.getLogger(AbstractTaskJob.class);
 +
 +    /**
 +     * Task DAO.
 +     */
 +    @Autowired
 +    protected TaskDAO taskDAO;
 +
 +    /**
 +     * Task execution DAO.
 +     */
 +    @Autowired
 +    private TaskExecDAO taskExecDAO;
 +
 +    /**
 +     * Notification manager.
 +     */
 +    @Autowired
 +    private NotificationManager notificationManager;
 +
 +    /**
 +     * Audit manager.
 +     */
 +    @Autowired
 +    private AuditManager auditManager;
 +
 +    @Autowired
 +    private EntityFactory entityFactory;
 +
 +    /**
 +     * Id, set by the caller, for identifying the task to be executed.
 +     */
 +    protected Long taskId;
 +
 +    /**
 +     * The actual task to be executed.
 +     */
 +    protected Task task;
 +
 +    /**
++     * The current running thread containing the task to be executed.
++     */
++    protected AtomicReference<Thread> runningThread = new AtomicReference<Thread>();
++
++    /**
 +     * Task id setter.
 +     *
 +     * @param taskId to be set
 +     */
 +    @Override
 +    public void setTaskId(final Long taskId) {
 +        this.taskId = taskId;
 +    }
 +
 +    @Override
 +    public void execute(final JobExecutionContext context) throws JobExecutionException {
++        this.runningThread.set(Thread.currentThread());
 +        task = taskDAO.find(taskId);
 +        if (task == null) {
 +            throw new JobExecutionException("Task " + taskId + " not found");
 +        }
 +
 +        TaskExec execution = entityFactory.newEntity(TaskExec.class);
 +        execution.setStartDate(new Date());
 +        execution.setTask(task);
 +
 +        Result result;
 +
 +        try {
 +            execution.setMessage(doExecute(context.getMergedJobDataMap().getBoolean(DRY_RUN_JOBDETAIL_KEY)));
 +            execution.setStatus(Status.SUCCESS.name());
 +            result = Result.SUCCESS;
 +        } catch (JobExecutionException e) {
 +            LOG.error("While executing task " + taskId, e);
 +            result = Result.FAILURE;
 +
 +            execution.setMessage(ExceptionUtils2.getFullStackTrace(e));
 +            execution.setStatus(Status.FAILURE.name());
 +        }
 +        execution.setEndDate(new Date());
 +
 +        if (hasToBeRegistered(execution)) {
 +            taskExecDAO.saveAndAdd(taskId, execution);
 +        }
 +        task = taskDAO.save(task);
 +
 +        notificationManager.createTasks(
 +                AuditElements.EventCategoryType.TASK,
 +                this.getClass().getSimpleName(),
 +                null,
 +                this.getClass().getSimpleName(), // searching for before object is too much expensive ...
 +                result,
 +                task,
 +                execution);
 +
 +        auditManager.audit(
 +                AuditElements.EventCategoryType.TASK,
 +                task.getClass().getSimpleName(),
 +                null,
 +                null, // searching for before object is too much expensive ...
 +                result,
 +                task,
 +                (Object[]) null);
 +    }
 +
 +    /**
 +     * The actual execution, delegated to child classes.
 +     *
 +     * @param dryRun whether to actually touch the data
 +     * @return the task execution status to be set
 +     * @throws JobExecutionException if anything goes wrong
 +     */
 +    protected abstract String doExecute(boolean dryRun) throws JobExecutionException;
 +
 +    /**
 +     * Template method to determine whether this job's task execution has to be persisted or not.
 +     *
 +     * @param execution task execution
 +     * @return wether to persist or not
 +     */
 +    protected boolean hasToBeRegistered(final TaskExec execution) {
 +        return false;
 +    }
++
++    @Override
++    public void interrupt() throws UnableToInterruptJobException {
++        Thread thread = this.runningThread.getAndSet(null);
++        if (thread != null) {
++            LOG.info("Interrupting job time {} ", (new SimpleDateFormat(SyncopeConstants.DEFAULT_DATE_PATTERN, Locale.
++                    getDefault())).format(new Date()));
++            thread.interrupt();
++        } else {
++            LOG.warn("Unable to retrieve the right thread related to the current job execution");
++        }
++    }
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/ReportServiceImpl.java
----------------------------------------------------------------------
diff --cc core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/ReportServiceImpl.java
index dfde009,0000000..4fe02ba
mode 100644,000000..100644
--- a/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/ReportServiceImpl.java
+++ b/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/ReportServiceImpl.java
@@@ -1,122 -1,0 +1,134 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.rest.cxf.service;
 +
 +import java.io.IOException;
 +import java.io.OutputStream;
 +import java.net.URI;
 +import java.util.List;
 +import javax.ws.rs.core.HttpHeaders;
 +import javax.ws.rs.core.Response;
 +import javax.ws.rs.core.StreamingOutput;
 +import org.apache.syncope.common.lib.to.PagedResult;
 +import org.apache.syncope.common.lib.to.ReportExecTO;
 +import org.apache.syncope.common.lib.to.ReportTO;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
 +import org.apache.syncope.common.lib.types.ReportExecExportFormat;
 +import org.apache.syncope.common.lib.wrap.ReportletConfClass;
 +import org.apache.syncope.common.rest.api.CollectionWrapper;
 +import org.apache.syncope.common.rest.api.RESTHeaders;
 +import org.apache.syncope.common.rest.api.beans.ListQuery;
 +import org.apache.syncope.common.rest.api.service.ReportService;
 +import org.apache.syncope.core.logic.ReportLogic;
 +import org.apache.syncope.core.persistence.api.entity.ReportExec;
 +import org.springframework.beans.factory.annotation.Autowired;
 +import org.springframework.stereotype.Service;
 +
 +@Service
 +public class ReportServiceImpl extends AbstractServiceImpl implements ReportService {
 +
 +    @Autowired
 +    private ReportLogic logic;
 +
 +    @Override
 +    public Response create(final ReportTO reportTO) {
 +        ReportTO createdReportTO = logic.create(reportTO);
 +        URI location = uriInfo.getAbsolutePathBuilder().path(String.valueOf(createdReportTO.getKey())).build();
 +        return Response.created(location).
 +                header(RESTHeaders.RESOURCE_ID, createdReportTO.getKey()).
 +                build();
 +    }
 +
 +    @Override
 +    public void update(final Long reportKey, final ReportTO reportTO) {
 +        reportTO.setKey(reportKey);
 +        logic.update(reportTO);
 +    }
 +
 +    @Override
 +    public PagedResult<ReportTO> list(final ListQuery listQuery) {
 +        return buildPagedResult(
 +                logic.list(
 +                        listQuery.getPage(),
 +                        listQuery.getSize(),
 +                        getOrderByClauses(listQuery.getOrderBy())),
 +                listQuery.getPage(),
 +                listQuery.getSize(),
 +                logic.count());
 +    }
 +
 +    @Override
 +    public List<ReportletConfClass> getReportletConfClasses() {
 +        return CollectionWrapper.wrap(logic.getReportletConfClasses(), ReportletConfClass.class);
 +    }
 +
 +    @Override
 +    public ReportTO read(final Long reportKey) {
 +        return logic.read(reportKey);
 +    }
 +
 +    @Override
 +    public ReportExecTO readExecution(final Long executionKey) {
 +        return logic.readExecution(executionKey);
 +    }
 +
 +    @Override
 +    public Response exportExecutionResult(final Long executionKey, final ReportExecExportFormat fmt) {
 +        final ReportExecExportFormat format = (fmt == null) ? ReportExecExportFormat.XML : fmt;
 +        final ReportExec reportExec = logic.getAndCheckReportExec(executionKey);
 +        StreamingOutput sout = new StreamingOutput() {
 +
 +            @Override
 +            public void write(final OutputStream os) throws IOException {
 +                logic.exportExecutionResult(os, reportExec, format);
 +            }
 +        };
 +        String disposition = "attachment; filename=" + reportExec.getReport().getName() + "." + format.name().
 +                toLowerCase();
 +        return Response.ok(sout).
 +                header(HttpHeaders.CONTENT_DISPOSITION, disposition).
 +                build();
 +    }
 +
 +    @Override
 +    public ReportExecTO execute(final Long reportKey) {
 +        return logic.execute(reportKey);
 +    }
 +
 +    @Override
 +    public void delete(final Long reportKey) {
 +        logic.delete(reportKey);
 +    }
 +
 +    @Override
 +    public void deleteExecution(final Long executionKey) {
 +        logic.deleteExecution(executionKey);
 +    }
++
++    @Override
++    public List<ReportExecTO> list(final JobStatusType type) {
++        return logic.list(type, ReportExecTO.class);
++    }
++
++    @Override
++    public void process(final JobAction action, final Long reportId) {
++        logic.process(action, reportId);
++    }
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/TaskServiceImpl.java
----------------------------------------------------------------------
diff --cc core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/TaskServiceImpl.java
index 83edcc0,0000000..957891a
mode 100644,000000..100644
--- a/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/TaskServiceImpl.java
+++ b/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/TaskServiceImpl.java
@@@ -1,166 -1,0 +1,179 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.rest.cxf.service;
 +
 +import java.net.URI;
++import java.util.List;
 +import javax.ws.rs.BadRequestException;
 +import javax.ws.rs.core.Response;
 +import org.apache.syncope.common.lib.to.AbstractTaskTO;
 +import org.apache.syncope.common.lib.to.BulkAction;
 +import org.apache.syncope.common.lib.to.BulkActionResult;
 +import org.apache.syncope.common.lib.to.PagedResult;
 +import org.apache.syncope.common.lib.to.PushTaskTO;
 +import org.apache.syncope.common.lib.to.ReportExecTO;
 +import org.apache.syncope.common.lib.to.SchedTaskTO;
 +import org.apache.syncope.common.lib.to.SyncTaskTO;
 +import org.apache.syncope.common.lib.to.TaskExecTO;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
 +import org.apache.syncope.common.lib.types.PropagationTaskExecStatus;
 +import org.apache.syncope.common.lib.types.TaskType;
 +import org.apache.syncope.common.rest.api.RESTHeaders;
 +import org.apache.syncope.common.rest.api.beans.ListQuery;
 +import org.apache.syncope.common.rest.api.service.TaskService;
 +import org.apache.syncope.core.logic.TaskLogic;
 +import org.springframework.beans.factory.annotation.Autowired;
 +import org.springframework.stereotype.Service;
 +
 +@Service
 +public class TaskServiceImpl extends AbstractServiceImpl implements TaskService {
 +
 +    @Autowired
 +    private TaskLogic logic;
 +
 +    @Override
 +    public <T extends SchedTaskTO> Response create(final T taskTO) {
 +        T createdTask;
 +        if (taskTO instanceof SyncTaskTO || taskTO instanceof PushTaskTO || taskTO instanceof SchedTaskTO) {
 +            createdTask = logic.createSchedTask(taskTO);
 +        } else {
 +            throw new BadRequestException();
 +        }
 +
 +        URI location = uriInfo.getAbsolutePathBuilder().path(String.valueOf(createdTask.getKey())).build();
 +        return Response.created(location).
 +                header(RESTHeaders.RESOURCE_ID, createdTask.getKey()).
 +                build();
 +    }
 +
 +    @Override
 +    public void delete(final Long taskKey) {
 +        logic.delete(taskKey);
 +    }
 +
 +    @Override
 +    public void deleteExecution(final Long executionKey) {
 +        logic.deleteExecution(executionKey);
 +    }
 +
 +    @Override
 +    public TaskExecTO execute(final Long taskKey, final boolean dryRun) {
 +        return logic.execute(taskKey, dryRun);
 +    }
 +
 +    @SuppressWarnings("unchecked")
 +    @Override
 +    public <T extends AbstractTaskTO> PagedResult<T> list(final TaskType taskType, final ListQuery listQuery) {
 +        return (PagedResult<T>) buildPagedResult(
 +                logic.list(
 +                        taskType,
 +                        listQuery.getPage(),
 +                        listQuery.getSize(),
 +                        getOrderByClauses(listQuery.getOrderBy())),
 +                listQuery.getPage(),
 +                listQuery.getSize(),
 +                logic.count(taskType));
 +    }
 +
 +    @Override
 +    public <T extends AbstractTaskTO> T read(final Long taskKey) {
 +        return logic.read(taskKey);
 +    }
 +
 +    @Override
 +    public TaskExecTO readExecution(final Long executionKey) {
 +        return logic.readExecution(executionKey);
 +    }
 +
 +    @Override
 +    public void report(final Long executionKey, final ReportExecTO reportExec) {
 +        reportExec.setKey(executionKey);
 +        logic.report(
 +                executionKey, PropagationTaskExecStatus.fromString(reportExec.getStatus()), reportExec.getMessage());
 +    }
 +
 +    @Override
 +    public void update(final Long taskKey, final AbstractTaskTO taskTO) {
 +        taskTO.setKey(taskKey);
 +        if (taskTO instanceof SyncTaskTO) {
 +            logic.updateSync((SyncTaskTO) taskTO);
 +        } else if (taskTO instanceof SchedTaskTO) {
 +            logic.updateSched((SchedTaskTO) taskTO);
 +        } else {
 +            throw new BadRequestException();
 +        }
 +    }
 +
 +    @Override
 +    public BulkActionResult bulk(final BulkAction bulkAction) {
 +        BulkActionResult result = new BulkActionResult();
 +
 +        switch (bulkAction.getOperation()) {
 +            case DELETE:
 +                for (String taskKey : bulkAction.getTargets()) {
 +                    try {
 +                        result.add(logic.delete(Long.valueOf(taskKey)).getKey(), BulkActionResult.Status.SUCCESS);
 +                    } catch (Exception e) {
 +                        LOG.error("Error performing delete for task {}", taskKey, e);
 +                        result.add(taskKey, BulkActionResult.Status.FAILURE);
 +                    }
 +                }
 +                break;
 +
 +            case DRYRUN:
 +                for (String taskKey : bulkAction.getTargets()) {
 +                    try {
 +                        logic.execute(Long.valueOf(taskKey), true);
 +                        result.add(taskKey, BulkActionResult.Status.SUCCESS);
 +                    } catch (Exception e) {
 +                        LOG.error("Error performing dryrun for task {}", taskKey, e);
 +                        result.add(taskKey, BulkActionResult.Status.FAILURE);
 +                    }
 +                }
 +                break;
 +
 +            case EXECUTE:
 +                for (String taskKey : bulkAction.getTargets()) {
 +                    try {
 +                        logic.execute(Long.valueOf(taskKey), false);
 +                        result.add(taskKey, BulkActionResult.Status.SUCCESS);
 +                    } catch (Exception e) {
 +                        LOG.error("Error performing execute for task {}", taskKey, e);
 +                        result.add(taskKey, BulkActionResult.Status.FAILURE);
 +                    }
 +                }
 +                break;
 +
 +            default:
 +        }
 +
 +        return result;
 +    }
++
++    @Override
++    public List<TaskExecTO> list(final JobStatusType type) {
++        return logic.list(type, TaskExecTO.class);
++    }
++
++    @Override
++    public void process(final JobAction action, final Long taskId) {
++        logic.process(action, taskId);
++    }
 +}


[2/4] syncope git commit: Merge branch '1_2_X' of https://github.com/giacomolm/syncope into 1_2_X

Posted by gi...@apache.org.
Merge branch '1_2_X' of https://github.com/giacomolm/syncope into 1_2_X


Project: http://git-wip-us.apache.org/repos/asf/syncope/repo
Commit: http://git-wip-us.apache.org/repos/asf/syncope/commit/b43c9c86
Tree: http://git-wip-us.apache.org/repos/asf/syncope/tree/b43c9c86
Diff: http://git-wip-us.apache.org/repos/asf/syncope/diff/b43c9c86

Branch: refs/heads/master
Commit: b43c9c86d2004aecdba594c594a7eeee92c866a1
Parents: b68778e 358aef7
Author: giacomolm <gi...@hotmail.it>
Authored: Thu May 21 17:27:40 2015 +0200
Committer: giacomolm <gi...@hotmail.it>
Committed: Thu May 21 17:27:40 2015 +0200

----------------------------------------------------------------------
 .../syncope/common/services/ReportService.java  |  24 +++
 .../syncope/common/services/TaskService.java    |  23 +++
 .../apache/syncope/common/types/JobAction.java  |  29 ++++
 .../syncope/common/types/JobStatusType.java     |  30 ++++
 core/pom.xml                                    |   1 +
 .../syncope/core/quartz/AbstractTaskJob.java    |  25 ++-
 .../org/apache/syncope/core/quartz/TaskJob.java |   4 +-
 .../rest/controller/AbstractJobController.java  | 155 +++++++++++++++++++
 .../core/rest/controller/ReportController.java  |  30 +++-
 .../core/rest/controller/TaskController.java    |  26 +++-
 .../core/services/ReportServiceImpl.java        |  42 +++--
 .../syncope/core/services/TaskServiceImpl.java  |  12 ++
 .../syncope/core/quartz/TestSampleJob.java      |  63 ++++++++
 .../syncope/core/rest/TaskTestITCase.java       |  60 +++++++
 14 files changed, 500 insertions(+), 24 deletions(-)
----------------------------------------------------------------------