You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@syncope.apache.org by gi...@apache.org on 2015/05/22 09:47:45 UTC

[4/4] syncope git commit: [SYNCOPE-660] Merge from 1_2_X; This closes #5

[SYNCOPE-660] Merge from 1_2_X; This closes #5


Project: http://git-wip-us.apache.org/repos/asf/syncope/repo
Commit: http://git-wip-us.apache.org/repos/asf/syncope/commit/d489e8c5
Tree: http://git-wip-us.apache.org/repos/asf/syncope/tree/d489e8c5
Diff: http://git-wip-us.apache.org/repos/asf/syncope/diff/d489e8c5

Branch: refs/heads/master
Commit: d489e8c59a93bf137d75af4222b5b87f4c7ad925
Parents: d19133e b43c9c8
Author: giacomolm <gi...@hotmail.it>
Authored: Fri May 22 09:46:31 2015 +0200
Committer: giacomolm <gi...@hotmail.it>
Committed: Fri May 22 09:46:31 2015 +0200

----------------------------------------------------------------------
 .../syncope/common/lib/types/JobAction.java     |  29 ++++
 .../syncope/common/lib/types/JobStatusType.java |  30 ++++
 .../common/rest/api/service/ReportService.java  |  24 +++
 .../common/rest/api/service/TaskService.java    |  24 +++
 .../syncope/core/logic/AbstractJobLogic.java    | 155 +++++++++++++++++++
 .../apache/syncope/core/logic/ReportLogic.java  |  30 +++-
 .../apache/syncope/core/logic/TaskLogic.java    |  26 +++-
 .../core/provisioning/api/job/JobNamer.java     |   4 +-
 .../core/provisioning/api/job/TaskJob.java      |   4 +-
 .../provisioning/java/job/AbstractTaskJob.java  |  23 +++
 .../rest/cxf/service/ReportServiceImpl.java     |  12 ++
 .../core/rest/cxf/service/TaskServiceImpl.java  |  13 ++
 .../fit/core/reference/TestSampleJob.java       |  64 ++++++++
 .../fit/core/reference/SchedTaskITCase.java     |  60 +++++++
 14 files changed, 488 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/common/lib/src/main/java/org/apache/syncope/common/lib/types/JobAction.java
----------------------------------------------------------------------
diff --cc common/lib/src/main/java/org/apache/syncope/common/lib/types/JobAction.java
index 0000000,0000000..920393b
new file mode 100644
--- /dev/null
+++ b/common/lib/src/main/java/org/apache/syncope/common/lib/types/JobAction.java
@@@ -1,0 -1,0 +1,29 @@@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ *
++ *   http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied.  See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++package org.apache.syncope.common.lib.types;
++
++import javax.xml.bind.annotation.XmlEnum;
++
++@XmlEnum
++public enum JobAction {
++
++    START,
++    STOP;
++
++}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/common/lib/src/main/java/org/apache/syncope/common/lib/types/JobStatusType.java
----------------------------------------------------------------------
diff --cc common/lib/src/main/java/org/apache/syncope/common/lib/types/JobStatusType.java
index 0000000,0000000..3a43807
new file mode 100644
--- /dev/null
+++ b/common/lib/src/main/java/org/apache/syncope/common/lib/types/JobStatusType.java
@@@ -1,0 -1,0 +1,30 @@@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ *
++ *   http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied.  See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++package org.apache.syncope.common.lib.types;
++
++import javax.xml.bind.annotation.XmlEnum;
++
++@XmlEnum
++public enum JobStatusType {
++
++    ALL,
++    RUNNING,
++    SCHEDULED;
++
++}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/ReportService.java
----------------------------------------------------------------------
diff --cc common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/ReportService.java
index 0037d0a,0000000..fc37192
mode 100644,000000..100644
--- a/common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/ReportService.java
+++ b/common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/ReportService.java
@@@ -1,158 -1,0 +1,182 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.common.rest.api.service;
 +
 +import java.util.List;
 +import javax.validation.constraints.NotNull;
 +import javax.ws.rs.BeanParam;
 +import javax.ws.rs.Consumes;
 +import javax.ws.rs.DELETE;
 +import javax.ws.rs.GET;
++import javax.ws.rs.MatrixParam;
 +import javax.ws.rs.POST;
 +import javax.ws.rs.PUT;
 +import javax.ws.rs.Path;
 +import javax.ws.rs.PathParam;
 +import javax.ws.rs.Produces;
 +import javax.ws.rs.QueryParam;
 +import javax.ws.rs.core.MediaType;
 +import javax.ws.rs.core.Response;
 +import org.apache.cxf.jaxrs.model.wadl.Description;
 +import org.apache.cxf.jaxrs.model.wadl.Descriptions;
 +import org.apache.cxf.jaxrs.model.wadl.DocTarget;
 +import org.apache.syncope.common.lib.to.PagedResult;
 +import org.apache.syncope.common.lib.to.ReportExecTO;
 +import org.apache.syncope.common.lib.to.ReportTO;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
 +import org.apache.syncope.common.lib.types.ReportExecExportFormat;
 +import org.apache.syncope.common.lib.wrap.ReportletConfClass;
 +import org.apache.syncope.common.rest.api.beans.ListQuery;
 +
 +/**
 + * REST operations for reports.
 + */
 +@Path("reports")
 +public interface ReportService extends JAXRSService {
 +
 +    /**
 +     * Returns a list of available classes for reportlet configuration.
 +     *
 +     * @return list of available classes for reportlet configuration
 +     */
 +    @GET
 +    @Path("reportletConfClasses")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    List<ReportletConfClass> getReportletConfClasses();
 +
 +    /**
 +     * Returns report with matching key.
 +     *
 +     * @param reportKey key of report to be read
 +     * @return report with matching key
 +     */
 +    @GET
 +    @Path("{reportKey}")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    ReportTO read(@NotNull @PathParam("reportKey") Long reportKey);
 +
 +    /**
 +     * Returns report execution with matching key.
 +     *
 +     * @param executionKey report execution id to be selected
 +     * @return report execution with matching key
 +     */
 +    @GET
 +    @Path("executions/{executionKey}")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    ReportExecTO readExecution(@NotNull @PathParam("executionKey") Long executionKey);
 +
 +    /**
 +     * Returns a paged list of all existing reports matching the given query;
 +     *
 +     * @param listQuery query conditions
 +     * @return paged list of existing reports matching the given query
 +     */
 +    @GET
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    PagedResult<ReportTO> list(@BeanParam ListQuery listQuery);
 +
 +    /**
 +     * Creates a new report.
 +     *
 +     * @param reportTO report to be created
 +     * @return <tt>Response</tt> object featuring <tt>Location</tt> header of created report
 +     */
 +    @Descriptions({
 +        @Description(target = DocTarget.RESPONSE, value = "Featuring <tt>Location</tt> header of created report")
 +    })
 +    @POST
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    Response create(@NotNull ReportTO reportTO);
 +
 +    /**
 +     * Updates report with matching key.
 +     *
 +     * @param reportKey id for report to be updated
 +     * @param reportTO report to be stored
 +     */
 +    @PUT
 +    @Path("{reportKey}")
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    void update(@NotNull @PathParam("reportKey") Long reportKey, ReportTO reportTO);
 +
 +    /**
 +     * Deletes report with matching key.
 +     *
 +     * @param reportKey Deletes report with matching key
 +     */
 +    @DELETE
 +    @Path("{reportKey}")
 +    void delete(@NotNull @PathParam("reportKey") Long reportKey);
 +
 +    /**
 +     * Deletes report execution with matching key.
 +     *
 +     * @param executionKey key of execution report to be deleted
 +     */
 +    @DELETE
 +    @Path("executions/{executionKey}")
 +    void deleteExecution(@NotNull @PathParam("executionKey") Long executionKey);
 +
 +    /**
 +     * Executes the report with matching key.
 +     *
 +     * @param reportKey key of report to be executed
 +     * @return report execution result
 +     */
 +    @POST
 +    @Path("{reportKey}/execute")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    ReportExecTO execute(@NotNull @PathParam("reportKey") Long reportKey);
 +
 +    /**
 +     * Exports the report execution with matching key in the requested format.
 +     *
 +     * @param executionKey key of execution report to be selected
 +     * @param fmt file-format selection
 +     * @return a stream for content download
 +     */
 +    @GET
 +    @Path("executions/{executionKey}/stream")
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    Response exportExecutionResult(@NotNull @PathParam("executionKey") Long executionKey,
 +            @QueryParam("format") ReportExecExportFormat fmt);
++
++    /**
++     * List report jobs of the given type
++     *
++     * @param type of report job
++     * @return List of ReportExecTO
++     */
++    @GET
++    @Path("jobs")
++    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
++    List<ReportExecTO> list(@MatrixParam("type") JobStatusType type);
++
++    /**
++     * Execute a control action on an existing report
++     *
++     * @param action
++     * @param reportId id of report
++     */
++    @POST
++    @Path("{reportId}")
++    void process(@QueryParam("action") JobAction action, @PathParam("reportId") Long reportId);
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/TaskService.java
----------------------------------------------------------------------
diff --cc common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/TaskService.java
index 8240a23,0000000..3d6c3f5
mode 100644,000000..100644
--- a/common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/TaskService.java
+++ b/common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/TaskService.java
@@@ -1,170 -1,0 +1,194 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.common.rest.api.service;
 +
++import java.util.List;
 +import javax.validation.constraints.NotNull;
 +import javax.ws.rs.BeanParam;
 +import javax.ws.rs.Consumes;
 +import javax.ws.rs.DELETE;
 +import javax.ws.rs.DefaultValue;
 +import javax.ws.rs.GET;
 +import javax.ws.rs.MatrixParam;
 +import javax.ws.rs.POST;
 +import javax.ws.rs.PUT;
 +import javax.ws.rs.Path;
 +import javax.ws.rs.PathParam;
 +import javax.ws.rs.Produces;
 +import javax.ws.rs.QueryParam;
 +import javax.ws.rs.core.MediaType;
 +import javax.ws.rs.core.Response;
 +import org.apache.cxf.jaxrs.model.wadl.Description;
 +import org.apache.cxf.jaxrs.model.wadl.Descriptions;
 +import org.apache.cxf.jaxrs.model.wadl.DocTarget;
 +import org.apache.syncope.common.lib.to.AbstractTaskTO;
 +import org.apache.syncope.common.lib.to.BulkAction;
 +import org.apache.syncope.common.lib.to.BulkActionResult;
 +import org.apache.syncope.common.lib.to.PagedResult;
 +import org.apache.syncope.common.lib.to.ReportExecTO;
 +import org.apache.syncope.common.lib.to.SchedTaskTO;
 +import org.apache.syncope.common.lib.to.TaskExecTO;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
 +import org.apache.syncope.common.lib.types.TaskType;
 +import org.apache.syncope.common.rest.api.beans.ListQuery;
 +
 +/**
 + * REST operations for tasks.
 + */
 +@Path("tasks")
 +public interface TaskService extends JAXRSService {
 +
 +    /**
 +     * Returns the task matching the given key.
 +     *
 +     * @param taskKey key of task to be read
 +     * @param <T> type of taskTO
 +     * @return task with matching id
 +     */
 +    @GET
 +    @Path("{taskKey}")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    <T extends AbstractTaskTO> T read(@NotNull @PathParam("taskKey") Long taskKey);
 +
 +    /**
 +     * Returns the task execution with the given id.
 +     *
 +     * @param executionKey key of task execution to be read
 +     * @return task execution with matching Id
 +     */
 +    @GET
 +    @Path("executions/{executionKey}")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    TaskExecTO readExecution(@NotNull @PathParam("executionKey") Long executionKey);
 +
 +    /**
 +     * Returns a paged list of existing tasks matching type and the given query.
 +     *
 +     * @param taskType type of tasks to be listed
 +     * @param listQuery query conditions
 +     * @param <T> type of taskTO
 +     * @return paged list of existing tasks matching type and the given query
 +     */
 +    @GET
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    <T extends AbstractTaskTO> PagedResult<T> list(
 +            @NotNull @MatrixParam("type") TaskType taskType,
 +            @BeanParam ListQuery listQuery);
 +
 +    /**
 +     * Creates a new task.
 +     *
 +     * @param taskTO task to be created
 +     * @param <T> type of taskTO
 +     * @return <tt>Response</tt> object featuring <tt>Location</tt> header of created task
 +     */
 +    @Descriptions({
 +        @Description(target = DocTarget.RESPONSE, value = "Featuring <tt>Location</tt> header of created task")
 +    })
 +    @POST
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    <T extends SchedTaskTO> Response create(@NotNull T taskTO);
 +
 +    /**
 +     * Updates the task matching the provided key.
 +     *
 +     * @param taskKey key of task to be updated
 +     * @param taskTO updated task to be stored
 +     */
 +    @PUT
 +    @Path("{taskKey}")
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    void update(@NotNull @PathParam("taskKey") Long taskKey, @NotNull AbstractTaskTO taskTO);
 +
 +    /**
 +     * Deletes the task matching the provided key.
 +     *
 +     * @param taskKey key of task to be deleted
 +     */
 +    @DELETE
 +    @Path("{taskKey}")
 +    void delete(@NotNull @PathParam("taskKey") Long taskKey);
 +
 +    /**
 +     * Deletes the task execution matching the provided key.
 +     *
 +     * @param executionKey key of task execution to be deleted
 +     */
 +    @DELETE
 +    @Path("executions/{executionKey}")
 +    void deleteExecution(@NotNull @PathParam("executionKey") Long executionKey);
 +
 +    /**
 +     * Executes the task matching the given id.
 +     *
 +     * @param taskKey key of task to be executed
 +     * @param dryRun if true, task will only be simulated
 +     * @return execution report for the task matching the given id
 +     */
 +    @POST
 +    @Path("{taskKey}/execute")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    TaskExecTO execute(@NotNull @PathParam("taskKey") Long taskKey,
 +            @QueryParam("dryRun") @DefaultValue("false") boolean dryRun);
 +
 +    /**
 +     * Reports task execution result.
 +     *
 +     * @param executionKey key of task execution being reported
 +     * @param reportExec execution being reported
 +     */
 +    @POST
 +    @Path("executions/{executionKey}/report")
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    void report(@NotNull @PathParam("executionKey") Long executionKey, @NotNull ReportExecTO reportExec);
 +
 +    /**
 +     * Executes the provided bulk action.
 +     *
 +     * @param bulkAction list of task ids against which the bulk action will be performed.
 +     * @return Bulk action result
 +     */
 +    @POST
 +    @Path("bulk")
 +    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
 +    BulkActionResult bulk(@NotNull BulkAction bulkAction);
++
++    /**
++     * List task jobs of the given type
++     *
++     * @param type of task job
++     * @return List of TaskExecTO
++     */
++    @GET
++    @Path("jobs")
++    @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
++    List<TaskExecTO> list(@MatrixParam("type") JobStatusType type);
++
++    /**
++     * Execute a control action on an existing task
++     *
++     * @param action
++     * @param taskId id of task
++     */
++    @POST
++    @Path("{taskId}")
++    void process(@QueryParam("action") JobAction action, @PathParam("taskId") Long taskId);
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/logic/src/main/java/org/apache/syncope/core/logic/AbstractJobLogic.java
----------------------------------------------------------------------
diff --cc core/logic/src/main/java/org/apache/syncope/core/logic/AbstractJobLogic.java
index 0000000,0000000..9f20ee1
new file mode 100644
--- /dev/null
+++ b/core/logic/src/main/java/org/apache/syncope/core/logic/AbstractJobLogic.java
@@@ -1,0 -1,0 +1,155 @@@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ *
++ *   http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied.  See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++package org.apache.syncope.core.logic;
++
++import java.util.ArrayList;
++import java.util.List;
++import org.apache.syncope.common.lib.AbstractBaseBean;
++import org.apache.syncope.common.lib.to.AbstractExecTO;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
++import org.quartz.JobExecutionContext;
++import org.quartz.JobKey;
++import org.quartz.Scheduler;
++import org.quartz.SchedulerException;
++import org.quartz.Trigger;
++import org.quartz.impl.matchers.GroupMatcher;
++import org.springframework.beans.factory.annotation.Autowired;
++import org.springframework.scheduling.quartz.SchedulerFactoryBean;
++
++abstract class AbstractJobLogic<T extends AbstractBaseBean> extends AbstractTransactionalLogic<T> {
++
++    @Autowired
++    protected SchedulerFactoryBean scheduler;
++
++    protected abstract Long getKeyFromJobName(final JobKey jobKey);
++
++    public <E extends AbstractExecTO> List<E> list(final JobStatusType type, final Class<E> reference) {
++        List<E> jobExecTOs = new ArrayList<E>();
++
++        switch (type) {
++            case ALL:
++                try {
++                    for (String groupName : scheduler.getScheduler().getJobGroupNames()) {
++                        for (JobKey jobKey : scheduler.getScheduler().getJobKeys(GroupMatcher.
++                                jobGroupEquals(groupName))) {
++
++                            Long jobId = getKeyFromJobName(jobKey);
++                            if (jobId != null) {
++                                List<? extends Trigger> jobTriggers = scheduler.getScheduler().getTriggersOfJob(jobKey);
++                                if (jobTriggers.size() > 0) {
++                                    for (Trigger t : jobTriggers) {
++                                        E jobExecTO = reference.newInstance();
++                                        jobExecTO.setKey(jobId);
++                                        jobExecTO.
++                                                setStatus(scheduler.getScheduler().getTriggerState(t.getKey()).name());
++                                        jobExecTO.setStartDate(t.getStartTime());
++                                        jobExecTOs.add(jobExecTO);
++                                    }
++                                } else {
++                                    E jobExecTO = reference.newInstance();
++                                    jobExecTO.setKey(jobId);
++                                    jobExecTO.setStatus("Not Scheduled");
++                                    jobExecTOs.add(jobExecTO);
++                                }
++                            }
++                        }
++                    }
++                } catch (SchedulerException ex) {
++                    LOG.debug("Problems during retrieving all scheduled jobs {}", ex);
++                } catch (InstantiationException ex) {
++                    LOG.debug("Problems during instantiating {}  {}", reference, ex);
++                } catch (IllegalAccessException ex) {
++                    LOG.debug("Problems during accessing {}  {}", reference, ex);
++                }
++                break;
++            case RUNNING:
++                try {
++                    for (JobExecutionContext jec : scheduler.getScheduler().getCurrentlyExecutingJobs()) {
++                        Long jobId = getKeyFromJobName(jec.getJobDetail().getKey());
++                        if (jobId != null) {
++                            E jobExecTO = reference.newInstance();
++                            jobExecTO.setKey(jobId);
++                            jobExecTO.setStatus(scheduler.getScheduler().getTriggerState(jec.getTrigger().getKey()).
++                                    name());
++                            jobExecTO.setStartDate(jec.getFireTime());
++                            jobExecTOs.add(jobExecTO);
++                        }
++                    }
++                } catch (SchedulerException ex) {
++                    LOG.debug("Problems during retrieving all currently executing jobs {}", ex);
++                } catch (InstantiationException ex) {
++                    LOG.debug("Problems during instantiating {}  {}", reference, ex);
++                } catch (IllegalAccessException ex) {
++                    LOG.debug("Problems during accessing {}  {}", reference, ex);
++                }
++                break;
++            case SCHEDULED:
++                try {
++                    for (String groupName : scheduler.getScheduler().getJobGroupNames()) {
++                        for (JobKey jobKey : scheduler.getScheduler().getJobKeys(GroupMatcher.
++                                jobGroupEquals(groupName))) {
++                            Long jobId = getKeyFromJobName(jobKey);
++                            if (jobId != null) {
++                                List<? extends Trigger> jobTriggers = scheduler.getScheduler().getTriggersOfJob(jobKey);
++                                for (Trigger t : jobTriggers) {
++                                    E jobExecTO = reference.newInstance();
++                                    jobExecTO.setKey(jobId);
++                                    jobExecTO.setStatus(scheduler.getScheduler().getTriggerState(t.getKey()).name());
++                                    jobExecTO.setStartDate(t.getStartTime());
++                                    jobExecTOs.add(jobExecTO);
++                                }
++                            }
++                        }
++                    }
++                } catch (SchedulerException ex) {
++                    LOG.debug("Problems during retrieving all scheduled jobs {}", ex);
++                } catch (InstantiationException ex) {
++                    LOG.debug("Problems during instantiating {}  {}", reference, ex);
++                } catch (IllegalAccessException ex) {
++                    LOG.debug("Problems during accessing {}  {}", reference, ex);
++                }
++                break;
++            default:
++        }
++        return jobExecTOs;
++    }
++
++    protected void process(final JobAction action, final String jobName) {
++
++        if (jobName != null) {
++            JobKey jobKey = new JobKey(jobName, Scheduler.DEFAULT_GROUP);
++            try {
++                if (scheduler.getScheduler().checkExists(jobKey)) {
++                    switch (action) {
++                        case START:
++                            scheduler.getScheduler().triggerJob(jobKey);
++                            break;
++                        case STOP:
++                            scheduler.getScheduler().interrupt(jobKey);
++                            break;
++                        default:
++                    }
++                }
++            } catch (SchedulerException ex) {
++                LOG.debug("Problems during {} operation on job with id {}", action.toString(), ex);
++            }
++        }
++    }
++}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/logic/src/main/java/org/apache/syncope/core/logic/ReportLogic.java
----------------------------------------------------------------------
diff --cc core/logic/src/main/java/org/apache/syncope/core/logic/ReportLogic.java
index 2c09d37,0000000..33e2102
mode 100644,000000..100644
--- a/core/logic/src/main/java/org/apache/syncope/core/logic/ReportLogic.java
+++ b/core/logic/src/main/java/org/apache/syncope/core/logic/ReportLogic.java
@@@ -1,404 -1,0 +1,424 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.logic;
 +
 +import java.io.ByteArrayInputStream;
 +import java.io.OutputStream;
 +import java.lang.reflect.Method;
 +import java.util.ArrayList;
 +import java.util.Date;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Set;
 +import java.util.zip.ZipInputStream;
 +import org.apache.cocoon.optional.pipeline.components.sax.fop.FopSerializer;
 +import org.apache.cocoon.pipeline.NonCachingPipeline;
 +import org.apache.cocoon.pipeline.Pipeline;
 +import org.apache.cocoon.sax.SAXPipelineComponent;
 +import org.apache.cocoon.sax.component.XMLGenerator;
 +import org.apache.cocoon.sax.component.XMLSerializer;
 +import org.apache.cocoon.sax.component.XSLTTransformer;
 +import org.apache.commons.collections4.CollectionUtils;
 +import org.apache.commons.collections4.PredicateUtils;
 +import org.apache.commons.collections4.Transformer;
 +import org.apache.commons.io.IOUtils;
 +import org.apache.commons.lang3.ArrayUtils;
 +import org.apache.syncope.common.lib.SyncopeClientException;
 +import org.apache.syncope.common.lib.report.ReportletConf;
 +import org.apache.syncope.common.lib.to.ReportExecTO;
 +import org.apache.syncope.common.lib.to.ReportTO;
 +import org.apache.syncope.common.lib.types.ClientExceptionType;
 +import org.apache.syncope.common.lib.types.ReportExecExportFormat;
 +import org.apache.syncope.common.lib.types.ReportExecStatus;
 +import org.apache.syncope.core.persistence.api.dao.NotFoundException;
 +import org.apache.syncope.core.persistence.api.dao.ReportDAO;
 +import org.apache.syncope.core.persistence.api.dao.ReportExecDAO;
 +import org.apache.syncope.core.persistence.api.dao.search.OrderByClause;
 +import org.apache.syncope.core.persistence.api.entity.EntityFactory;
 +import org.apache.syncope.core.persistence.api.entity.Report;
 +import org.apache.syncope.core.persistence.api.entity.ReportExec;
 +import org.apache.syncope.core.provisioning.api.data.ReportDataBinder;
 +import org.apache.syncope.core.provisioning.api.job.JobNamer;
 +import org.apache.syncope.core.logic.init.ImplementationClassNamesLoader;
 +import org.apache.syncope.core.provisioning.api.job.JobInstanceLoader;
 +import org.apache.syncope.core.logic.report.Reportlet;
 +import org.apache.syncope.core.logic.report.ReportletConfClass;
 +import org.apache.syncope.core.logic.report.TextSerializer;
 +import org.apache.syncope.common.lib.CollectionUtils2;
++import org.apache.syncope.common.lib.to.AbstractExecTO;
 +import org.apache.syncope.common.lib.types.Entitlement;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
 +import org.apache.xmlgraphics.util.MimeConstants;
 +import org.quartz.JobKey;
 +import org.quartz.Scheduler;
 +import org.springframework.beans.factory.annotation.Autowired;
- import org.springframework.scheduling.quartz.SchedulerFactoryBean;
 +import org.springframework.security.access.prepost.PreAuthorize;
 +import org.springframework.stereotype.Component;
 +import org.springframework.transaction.annotation.Transactional;
 +import org.springframework.util.ClassUtils;
 +
 +@Component
- public class ReportLogic extends AbstractTransactionalLogic<ReportTO> {
++public class ReportLogic extends AbstractJobLogic<ReportTO> {
 +
 +    @Autowired
 +    private ReportDAO reportDAO;
 +
 +    @Autowired
 +    private ReportExecDAO reportExecDAO;
 +
 +    @Autowired
 +    private JobInstanceLoader jobInstanceLoader;
 +
 +    @Autowired
-     private SchedulerFactoryBean scheduler;
- 
-     @Autowired
 +    private ReportDataBinder binder;
 +
 +    @Autowired
 +    private EntityFactory entityFactory;
 +
 +    @Autowired
 +    private ImplementationClassNamesLoader classNamesLoader;
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_CREATE + "')")
 +    public ReportTO create(final ReportTO reportTO) {
 +        Report report = entityFactory.newEntity(Report.class);
 +        binder.getReport(report, reportTO);
 +        report = reportDAO.save(report);
 +
 +        try {
 +            jobInstanceLoader.registerJob(report);
 +        } catch (Exception e) {
 +            LOG.error("While registering quartz job for report " + report.getKey(), e);
 +
 +            SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
 +            sce.getElements().add(e.getMessage());
 +            throw sce;
 +        }
 +
 +        return binder.getReportTO(report);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_UPDATE + "')")
 +    public ReportTO update(final ReportTO reportTO) {
 +        Report report = reportDAO.find(reportTO.getKey());
 +        if (report == null) {
 +            throw new NotFoundException("Report " + reportTO.getKey());
 +        }
 +
 +        binder.getReport(report, reportTO);
 +        report = reportDAO.save(report);
 +
 +        try {
 +            jobInstanceLoader.registerJob(report);
 +        } catch (Exception e) {
 +            LOG.error("While registering quartz job for report " + report.getKey(), e);
 +
 +            SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
 +            sce.getElements().add(e.getMessage());
 +            throw sce;
 +        }
 +
 +        return binder.getReportTO(report);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_LIST + "')")
 +    public int count() {
 +        return reportDAO.count();
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_LIST + "')")
 +    public List<ReportTO> list(final int page, final int size, final List<OrderByClause> orderByClauses) {
 +        return CollectionUtils.collect(reportDAO.findAll(page, size, orderByClauses),
 +                new Transformer<Report, ReportTO>() {
 +
 +                    @Override
 +                    public ReportTO transform(final Report input) {
 +                        return binder.getReportTO(input);
 +                    }
 +                }, new ArrayList<ReportTO>());
 +    }
 +
 +    private Class<? extends ReportletConf> getReportletConfClass(final Class<Reportlet> reportletClass) {
 +        Class<? extends ReportletConf> result = null;
 +
 +        ReportletConfClass annotation = reportletClass.getAnnotation(ReportletConfClass.class);
 +        if (annotation != null) {
 +            result = annotation.value();
 +        }
 +
 +        return result;
 +    }
 +
 +    @SuppressWarnings({ "rawtypes" })
 +    private Set<Class<Reportlet>> getAllReportletClasses() {
 +        return CollectionUtils2.collect(classNamesLoader.getClassNames(ImplementationClassNamesLoader.Type.REPORTLET),
 +                new Transformer<String, Class<Reportlet>>() {
 +
 +                    @SuppressWarnings("unchecked")
 +                    @Override
 +                    public Class<Reportlet> transform(final String className) {
 +                        Class<Reportlet> result = null;
 +                        try {
 +                            Class reportletClass = ClassUtils.forName(className, ClassUtils.getDefaultClassLoader());
 +                            result = reportletClass;
 +                        } catch (ClassNotFoundException e) {
 +                            LOG.warn("Could not load class {}", className);
 +                        } catch (LinkageError e) {
 +                            LOG.warn("Could not link class {}", className);
 +                        }
 +
 +                        return result;
 +                    }
 +                },
 +                PredicateUtils.notNullPredicate(), new HashSet<Class<Reportlet>>());
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_LIST + "')")
 +    public Set<String> getReportletConfClasses() {
 +        return CollectionUtils2.collect(getAllReportletClasses(),
 +                new Transformer<Class<Reportlet>, String>() {
 +
 +                    @Override
 +                    public String transform(final Class<Reportlet> reportletClass) {
 +                        Class<? extends ReportletConf> reportletConfClass = getReportletConfClass(reportletClass);
 +                        return reportletConfClass == null ? null : reportletConfClass.getName();
 +                    }
 +                }, PredicateUtils.notNullPredicate(), new HashSet<String>());
 +    }
 +
 +    public Class<Reportlet> findReportletClassHavingConfClass(final Class<? extends ReportletConf> reportletConfClass) {
 +        Class<Reportlet> result = null;
 +        for (Class<Reportlet> reportletClass : getAllReportletClasses()) {
 +            Class<? extends ReportletConf> found = getReportletConfClass(reportletClass);
 +            if (found != null && found.equals(reportletConfClass)) {
 +                result = reportletClass;
 +            }
 +        }
 +
 +        return result;
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_READ + "')")
 +    public ReportTO read(final Long reportKey) {
 +        Report report = reportDAO.find(reportKey);
 +        if (report == null) {
 +            throw new NotFoundException("Report " + reportKey);
 +        }
 +        return binder.getReportTO(report);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_READ + "')")
 +    @Transactional(readOnly = true)
 +    public ReportExecTO readExecution(final Long executionKey) {
 +        ReportExec reportExec = reportExecDAO.find(executionKey);
 +        if (reportExec == null) {
 +            throw new NotFoundException("Report execution " + executionKey);
 +        }
 +        return binder.getReportExecTO(reportExec);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_READ + "')")
 +    public void exportExecutionResult(final OutputStream os, final ReportExec reportExec,
 +            final ReportExecExportFormat format) {
 +
 +        // streaming SAX handler from a compressed byte array stream
 +        ByteArrayInputStream bais = new ByteArrayInputStream(reportExec.getExecResult());
 +        ZipInputStream zis = new ZipInputStream(bais);
 +        try {
 +            // a single ZipEntry in the ZipInputStream (see ReportJob)
 +            zis.getNextEntry();
 +
 +            Pipeline<SAXPipelineComponent> pipeline = new NonCachingPipeline<>();
 +            pipeline.addComponent(new XMLGenerator(zis));
 +
 +            Map<String, Object> parameters = new HashMap<>();
 +            parameters.put("status", reportExec.getStatus());
 +            parameters.put("message", reportExec.getMessage());
 +            parameters.put("startDate", reportExec.getStartDate());
 +            parameters.put("endDate", reportExec.getEndDate());
 +
 +            switch (format) {
 +                case HTML:
 +                    XSLTTransformer xsl2html = new XSLTTransformer(getClass().getResource("/report/report2html.xsl"));
 +                    xsl2html.setParameters(parameters);
 +                    pipeline.addComponent(xsl2html);
 +                    pipeline.addComponent(XMLSerializer.createXHTMLSerializer());
 +                    break;
 +
 +                case PDF:
 +                    XSLTTransformer xsl2pdf = new XSLTTransformer(getClass().getResource("/report/report2fo.xsl"));
 +                    xsl2pdf.setParameters(parameters);
 +                    pipeline.addComponent(xsl2pdf);
 +                    pipeline.addComponent(new FopSerializer(MimeConstants.MIME_PDF));
 +                    break;
 +
 +                case RTF:
 +                    XSLTTransformer xsl2rtf = new XSLTTransformer(getClass().getResource("/report/report2fo.xsl"));
 +                    xsl2rtf.setParameters(parameters);
 +                    pipeline.addComponent(xsl2rtf);
 +                    pipeline.addComponent(new FopSerializer(MimeConstants.MIME_RTF));
 +                    break;
 +
 +                case CSV:
 +                    XSLTTransformer xsl2csv = new XSLTTransformer(getClass().getResource("/report/report2csv.xsl"));
 +                    xsl2csv.setParameters(parameters);
 +                    pipeline.addComponent(xsl2csv);
 +                    pipeline.addComponent(new TextSerializer());
 +                    break;
 +
 +                case XML:
 +                default:
 +                    pipeline.addComponent(XMLSerializer.createXMLSerializer());
 +            }
 +
 +            pipeline.setup(os);
 +            pipeline.execute();
 +
 +            LOG.debug("Result of {} successfully exported as {}", reportExec, format);
 +        } catch (Exception e) {
 +            LOG.error("While exporting content", e);
 +        } finally {
 +            IOUtils.closeQuietly(zis);
 +            IOUtils.closeQuietly(bais);
 +        }
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_READ + "')")
 +    public ReportExec getAndCheckReportExec(final Long executionKey) {
 +        ReportExec reportExec = reportExecDAO.find(executionKey);
 +        if (reportExec == null) {
 +            throw new NotFoundException("Report execution " + executionKey);
 +        }
 +        if (!ReportExecStatus.SUCCESS.name().equals(reportExec.getStatus()) || reportExec.getExecResult() == null) {
 +            SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.InvalidReportExec);
 +            sce.getElements().add(reportExec.getExecResult() == null
 +                    ? "No report data produced"
 +                    : "Report did not run successfully");
 +            throw sce;
 +        }
 +        return reportExec;
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_EXECUTE + "')")
 +    public ReportExecTO execute(final Long reportKey) {
 +        Report report = reportDAO.find(reportKey);
 +        if (report == null) {
 +            throw new NotFoundException("Report " + reportKey);
 +        }
 +
 +        try {
 +            jobInstanceLoader.registerJob(report);
 +
 +            scheduler.getScheduler().triggerJob(
 +                    new JobKey(JobNamer.getJobName(report), Scheduler.DEFAULT_GROUP));
 +        } catch (Exception e) {
 +            LOG.error("While executing report {}", report, e);
 +
 +            SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
 +            sce.getElements().add(e.getMessage());
 +            throw sce;
 +        }
 +
 +        ReportExecTO result = new ReportExecTO();
 +        result.setReport(reportKey);
 +        result.setStartDate(new Date());
 +        result.setStatus(ReportExecStatus.STARTED.name());
 +        result.setMessage("Job fired; waiting for results...");
 +
 +        return result;
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_DELETE + "')")
 +    public ReportTO delete(final Long reportKey) {
 +        Report report = reportDAO.find(reportKey);
 +        if (report == null) {
 +            throw new NotFoundException("Report " + reportKey);
 +        }
 +
 +        ReportTO deletedReport = binder.getReportTO(report);
 +        jobInstanceLoader.unregisterJob(report);
 +        reportDAO.delete(report);
 +        return deletedReport;
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.REPORT_DELETE + "')")
 +    public ReportExecTO deleteExecution(final Long executionKey) {
 +        ReportExec reportExec = reportExecDAO.find(executionKey);
 +        if (reportExec == null) {
 +            throw new NotFoundException("Report execution " + executionKey);
 +        }
 +
 +        ReportExecTO reportExecToDelete = binder.getReportExecTO(reportExec);
 +        reportExecDAO.delete(reportExec);
 +        return reportExecToDelete;
 +    }
 +
 +    @Override
 +    protected ReportTO resolveReference(final Method method, final Object... args)
 +            throws UnresolvedReferenceException {
 +
 +        Long key = null;
 +
 +        if (ArrayUtils.isNotEmpty(args) && ("create".equals(method.getName())
 +                || "update".equals(method.getName())
 +                || "delete".equals(method.getName()))) {
 +            for (int i = 0; key == null && i < args.length; i++) {
 +                if (args[i] instanceof Long) {
 +                    key = (Long) args[i];
 +                } else if (args[i] instanceof ReportTO) {
 +                    key = ((ReportTO) args[i]).getKey();
 +                }
 +            }
 +        }
 +
 +        if ((key != null) && !key.equals(0L)) {
 +            try {
 +                return binder.getReportTO(reportDAO.find(key));
 +            } catch (Throwable ignore) {
 +                LOG.debug("Unresolved reference", ignore);
 +                throw new UnresolvedReferenceException(ignore);
 +            }
 +        }
 +
 +        throw new UnresolvedReferenceException();
 +    }
++
++    @Override
++    @PreAuthorize("hasRole('" + Entitlement.REPORT_LIST + "')")
++    public <E extends AbstractExecTO> List<E> list(final JobStatusType type, final Class<E> reference) {
++        return super.list(type, reference);
++    }
++
++    @PreAuthorize("hasRole('" + Entitlement.REPORT_EXECUTE + "')")
++    public void process(final JobAction action, final Long reportKey) {
++        Report report = reportDAO.find(reportKey);
++        if (report == null) {
++            throw new NotFoundException("Report " + reportKey);
++        }
++        String jobName = JobNamer.getJobName(report);
++        process(action, jobName);
++    }
++
++    @Override
++    protected Long getKeyFromJobName(final JobKey jobKey) {
++        return JobNamer.getReportKeyFromJobName(jobKey.getName());
++    }
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/logic/src/main/java/org/apache/syncope/core/logic/TaskLogic.java
----------------------------------------------------------------------
diff --cc core/logic/src/main/java/org/apache/syncope/core/logic/TaskLogic.java
index 7a64aff,0000000..e12c2da
mode 100644,000000..100644
--- a/core/logic/src/main/java/org/apache/syncope/core/logic/TaskLogic.java
+++ b/core/logic/src/main/java/org/apache/syncope/core/logic/TaskLogic.java
@@@ -1,339 -1,0 +1,363 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.logic;
 +
 +import java.lang.reflect.Method;
 +import java.util.ArrayList;
 +import java.util.Date;
 +import java.util.List;
 +import org.apache.commons.collections4.CollectionUtils;
 +import org.apache.commons.collections4.Transformer;
 +import org.apache.commons.lang3.ArrayUtils;
 +import org.apache.syncope.common.lib.SyncopeClientException;
++import org.apache.syncope.common.lib.to.AbstractExecTO;
 +import org.apache.syncope.common.lib.to.AbstractTaskTO;
 +import org.apache.syncope.common.lib.to.SchedTaskTO;
 +import org.apache.syncope.common.lib.to.SyncTaskTO;
 +import org.apache.syncope.common.lib.to.TaskExecTO;
 +import org.apache.syncope.common.lib.types.ClientExceptionType;
 +import org.apache.syncope.common.lib.types.Entitlement;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
 +import org.apache.syncope.common.lib.types.PropagationMode;
 +import org.apache.syncope.common.lib.types.PropagationTaskExecStatus;
 +import org.apache.syncope.common.lib.types.TaskType;
 +import org.apache.syncope.core.persistence.api.dao.NotFoundException;
 +import org.apache.syncope.core.persistence.api.dao.TaskDAO;
 +import org.apache.syncope.core.persistence.api.dao.TaskExecDAO;
 +import org.apache.syncope.core.persistence.api.dao.search.OrderByClause;
 +import org.apache.syncope.core.persistence.api.entity.task.NotificationTask;
 +import org.apache.syncope.core.persistence.api.entity.task.PropagationTask;
 +import org.apache.syncope.core.persistence.api.entity.task.SchedTask;
 +import org.apache.syncope.core.persistence.api.entity.task.Task;
 +import org.apache.syncope.core.persistence.api.entity.task.TaskExec;
 +import org.apache.syncope.core.persistence.api.entity.task.TaskUtils;
 +import org.apache.syncope.core.persistence.api.entity.task.TaskUtilsFactory;
 +import org.apache.syncope.core.provisioning.api.data.TaskDataBinder;
 +import org.apache.syncope.core.provisioning.api.job.JobNamer;
 +import org.apache.syncope.core.provisioning.api.job.TaskJob;
 +import org.apache.syncope.core.provisioning.api.propagation.PropagationTaskExecutor;
 +import org.apache.syncope.core.provisioning.api.job.JobInstanceLoader;
 +import org.apache.syncope.core.logic.notification.NotificationJob;
 +import org.quartz.JobDataMap;
 +import org.quartz.JobKey;
 +import org.quartz.Scheduler;
 +import org.springframework.beans.factory.annotation.Autowired;
 +import org.springframework.scheduling.quartz.SchedulerFactoryBean;
 +import org.springframework.security.access.prepost.PreAuthorize;
 +import org.springframework.stereotype.Component;
 +
 +@Component
- public class TaskLogic extends AbstractTransactionalLogic<AbstractTaskTO> {
++public class TaskLogic extends AbstractJobLogic<AbstractTaskTO> {
 +
 +    @Autowired
 +    private TaskDAO taskDAO;
 +
 +    @Autowired
 +    private TaskExecDAO taskExecDAO;
 +
 +    @Autowired
 +    private TaskDataBinder binder;
 +
 +    @Autowired
 +    private PropagationTaskExecutor taskExecutor;
 +
 +    @Autowired
 +    private NotificationJob notificationJob;
 +
 +    @Autowired
 +    private JobInstanceLoader jobInstanceLoader;
 +
 +    @Autowired
 +    private SchedulerFactoryBean scheduler;
 +
 +    @Autowired
 +    private TaskUtilsFactory taskUtilsFactory;
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_CREATE + "')")
 +    public <T extends SchedTaskTO> T createSchedTask(final T taskTO) {
 +        TaskUtils taskUtils = taskUtilsFactory.getInstance(taskTO);
 +
 +        SchedTask task = binder.createSchedTask(taskTO, taskUtils);
 +        task = taskDAO.save(task);
 +
 +        try {
 +            jobInstanceLoader.registerJob(task, task.getJobClassName(), task.getCronExpression());
 +        } catch (Exception e) {
 +            LOG.error("While registering quartz job for task " + task.getKey(), e);
 +
 +            SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
 +            sce.getElements().add(e.getMessage());
 +            throw sce;
 +        }
 +
 +        return binder.getTaskTO(task, taskUtils);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_UPDATE + "')")
 +    public SyncTaskTO updateSync(final SyncTaskTO taskTO) {
 +        return updateSched(taskTO);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_UPDATE + "')")
 +    public <T extends SchedTaskTO> T updateSched(final SchedTaskTO taskTO) {
 +        SchedTask task = taskDAO.find(taskTO.getKey());
 +        if (task == null) {
 +            throw new NotFoundException("Task " + taskTO.getKey());
 +        }
 +
 +        TaskUtils taskUtils = taskUtilsFactory.getInstance(task);
 +
 +        binder.updateSchedTask(task, taskTO, taskUtils);
 +        task = taskDAO.save(task);
 +
 +        try {
 +            jobInstanceLoader.registerJob(task, task.getJobClassName(), task.getCronExpression());
 +        } catch (Exception e) {
 +            LOG.error("While registering quartz job for task " + task.getKey(), e);
 +
 +            SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
 +            sce.getElements().add(e.getMessage());
 +            throw sce;
 +        }
 +
 +        return binder.getTaskTO(task, taskUtils);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_LIST + "')")
 +    public int count(final TaskType taskType) {
 +        return taskDAO.count(taskType);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_LIST + "')")
 +    @SuppressWarnings("unchecked")
 +    public <T extends AbstractTaskTO> List<T> list(final TaskType taskType,
 +            final int page, final int size, final List<OrderByClause> orderByClauses) {
 +
 +        final TaskUtils taskUtilss = taskUtilsFactory.getInstance(taskType);
 +
 +        return CollectionUtils.collect(taskDAO.findAll(page, size, orderByClauses, taskType),
 +                new Transformer<Task, T>() {
 +
 +                    @Override
 +                    public T transform(final Task task) {
 +                        return (T) binder.getTaskTO(task, taskUtilss);
 +                    }
 +                }, new ArrayList<T>());
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_READ + "')")
 +    public <T extends AbstractTaskTO> T read(final Long taskId) {
 +        Task task = taskDAO.find(taskId);
 +        if (task == null) {
 +            throw new NotFoundException("Task " + taskId);
 +        }
 +        return binder.getTaskTO(task, taskUtilsFactory.getInstance(task));
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_READ + "')")
 +    public TaskExecTO readExecution(final Long executionId) {
 +        TaskExec taskExec = taskExecDAO.find(executionId);
 +        if (taskExec == null) {
 +            throw new NotFoundException("Task execution " + executionId);
 +        }
 +        return binder.getTaskExecTO(taskExec);
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_EXECUTE + "')")
 +    public TaskExecTO execute(final Long taskId, final boolean dryRun) {
 +        Task task = taskDAO.find(taskId);
 +        if (task == null) {
 +            throw new NotFoundException("Task " + taskId);
 +        }
 +        TaskUtils taskUtils = taskUtilsFactory.getInstance(task);
 +
 +        TaskExecTO result = null;
 +        switch (taskUtils.getType()) {
 +            case PROPAGATION:
 +                final TaskExec propExec = taskExecutor.execute((PropagationTask) task);
 +                result = binder.getTaskExecTO(propExec);
 +                break;
 +
 +            case NOTIFICATION:
 +                final TaskExec notExec = notificationJob.executeSingle((NotificationTask) task);
 +                result = binder.getTaskExecTO(notExec);
 +                break;
 +
 +            case SCHEDULED:
 +            case SYNCHRONIZATION:
 +            case PUSH:
 +                try {
 +                    jobInstanceLoader.registerJob(task,
 +                            ((SchedTask) task).getJobClassName(),
 +                            ((SchedTask) task).getCronExpression());
 +
 +                    JobDataMap map = new JobDataMap();
 +                    map.put(TaskJob.DRY_RUN_JOBDETAIL_KEY, dryRun);
 +
 +                    scheduler.getScheduler().triggerJob(
 +                            new JobKey(JobNamer.getJobName(task), Scheduler.DEFAULT_GROUP), map);
 +                } catch (Exception e) {
 +                    LOG.error("While executing task {}", task, e);
 +
 +                    SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
 +                    sce.getElements().add(e.getMessage());
 +                    throw sce;
 +                }
 +
 +                result = new TaskExecTO();
 +                result.setTask(taskId);
 +                result.setStartDate(new Date());
 +                result.setStatus("JOB_FIRED");
 +                result.setMessage("Job fired; waiting for results...");
 +                break;
 +
 +            default:
 +        }
 +
 +        return result;
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_READ + "')")
 +    public TaskExecTO report(final Long executionId, final PropagationTaskExecStatus status, final String message) {
 +        TaskExec exec = taskExecDAO.find(executionId);
 +        if (exec == null) {
 +            throw new NotFoundException("Task execution " + executionId);
 +        }
 +
 +        SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.InvalidPropagationTaskExecReport);
 +
 +        TaskUtils taskUtils = taskUtilsFactory.getInstance(exec.getTask());
 +        if (TaskType.PROPAGATION == taskUtils.getType()) {
 +            PropagationTask task = (PropagationTask) exec.getTask();
 +            if (task.getPropagationMode() != PropagationMode.TWO_PHASES) {
 +                sce.getElements().add("Propagation mode: " + task.getPropagationMode());
 +            }
 +        } else {
 +            sce.getElements().add("Task type: " + taskUtils);
 +        }
 +
 +        switch (status) {
 +            case SUCCESS:
 +            case FAILURE:
 +                break;
 +
 +            case CREATED:
 +            case SUBMITTED:
 +            case UNSUBMITTED:
 +                sce.getElements().add("Execution status to be set: " + status);
 +                break;
 +
 +            default:
 +        }
 +
 +        if (!sce.isEmpty()) {
 +            throw sce;
 +        }
 +
 +        exec.setStatus(status.toString());
 +        exec.setMessage(message);
 +        return binder.getTaskExecTO(taskExecDAO.save(exec));
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_DELETE + "')")
 +    public <T extends AbstractTaskTO> T delete(final Long taskId) {
 +        Task task = taskDAO.find(taskId);
 +        if (task == null) {
 +            throw new NotFoundException("Task " + taskId);
 +        }
 +        TaskUtils taskUtils = taskUtilsFactory.getInstance(task);
 +
 +        T taskToDelete = binder.getTaskTO(task, taskUtils);
 +
 +        if (TaskType.SCHEDULED == taskUtils.getType()
 +                || TaskType.SYNCHRONIZATION == taskUtils.getType()
 +                || TaskType.PUSH == taskUtils.getType()) {
 +
 +            jobInstanceLoader.unregisterJob(task);
 +        }
 +
 +        taskDAO.delete(task);
 +        return taskToDelete;
 +    }
 +
 +    @PreAuthorize("hasRole('" + Entitlement.TASK_DELETE + "')")
 +    public TaskExecTO deleteExecution(final Long executionId) {
 +        TaskExec taskExec = taskExecDAO.find(executionId);
 +        if (taskExec == null) {
 +            throw new NotFoundException("Task execution " + executionId);
 +        }
 +
 +        TaskExecTO taskExecutionToDelete = binder.getTaskExecTO(taskExec);
 +        taskExecDAO.delete(taskExec);
 +        return taskExecutionToDelete;
 +    }
 +
 +    @Override
 +    protected AbstractTaskTO resolveReference(final Method method, final Object... args)
 +            throws UnresolvedReferenceException {
 +
 +        Long key = null;
 +
 +        if (ArrayUtils.isNotEmpty(args)
 +                && !"deleteExecution".equals(method.getName()) && !"readExecution".equals(method.getName())) {
 +
 +            for (int i = 0; key == null && i < args.length; i++) {
 +                if (args[i] instanceof Long) {
 +                    key = (Long) args[i];
 +                } else if (args[i] instanceof AbstractTaskTO) {
 +                    key = ((AbstractTaskTO) args[i]).getKey();
 +                }
 +            }
 +        }
 +
 +        if ((key != null) && !key.equals(0L)) {
 +            try {
 +                final Task task = taskDAO.find(key);
 +                return binder.getTaskTO(task, taskUtilsFactory.getInstance(task));
 +            } catch (Throwable ignore) {
 +                LOG.debug("Unresolved reference", ignore);
 +                throw new UnresolvedReferenceException(ignore);
 +            }
 +        }
 +
 +        throw new UnresolvedReferenceException();
 +    }
++
++    @Override
++    @PreAuthorize("hasRole('" + Entitlement.TASK_LIST + "')")
++    public <E extends AbstractExecTO> List<E> list(final JobStatusType type, final Class<E> reference) {
++        return super.list(type, reference);
++    }
++
++    @PreAuthorize("hasRole('" + Entitlement.TASK_EXECUTE + "')")
++    public void process(final JobAction action, final Long taskId) {
++        Task task = taskDAO.find(taskId);
++        if (task == null) {
++            throw new NotFoundException("Task " + taskId);
++        }
++        String jobName = JobNamer.getJobName(task);
++        process(action, jobName);
++    }
++
++    @Override
++    protected Long getKeyFromJobName(final JobKey jobKey) {
++        return JobNamer.getTaskKeyFromJobName(jobKey.getName());
++    }
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/JobNamer.java
----------------------------------------------------------------------
diff --cc core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/JobNamer.java
index fe64a7b,0000000..9ddf563
mode 100644,000000..100644
--- a/core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/JobNamer.java
+++ b/core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/JobNamer.java
@@@ -1,72 -1,0 +1,72 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.provisioning.api.job;
 +
 +import java.util.regex.Matcher;
 +import java.util.regex.Pattern;
 +import org.apache.syncope.core.persistence.api.entity.Report;
 +import org.apache.syncope.core.persistence.api.entity.task.Task;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +public final class JobNamer {
 +
 +    private static final Logger LOG = LoggerFactory.getLogger(JobNamer.class);
 +
 +    private static Long getIdFromJobName(final String name, final String pattern, final int prefixLength) {
 +        Long result = null;
 +
 +        Matcher jobMatcher = Pattern.compile(pattern).matcher(name);
 +        if (jobMatcher.matches()) {
 +            try {
 +                result = Long.valueOf(name.substring(prefixLength));
 +            } catch (NumberFormatException e) {
 +                LOG.error("Unparsable id: {}", name.substring(prefixLength), e);
 +            }
 +        }
 +
 +        return result;
 +    }
 +
-     public static Long getTaskIdFromJobName(final String name) {
++    public static Long getTaskKeyFromJobName(final String name) {
 +        return getIdFromJobName(name, "taskJob[0-9]+", 7);
 +    }
 +
-     public static Long getReportIdFromJobName(final String name) {
++    public static Long getReportKeyFromJobName(final String name) {
 +        return getIdFromJobName(name, "reportJob[0-9]+", 9);
 +    }
 +
 +    public static String getJobName(final Task task) {
 +        return task == null
 +                ? "taskNotificationJob"
 +                : "taskJob" + task.getKey();
 +    }
 +
 +    public static String getJobName(final Report report) {
 +        return "reportJob" + report.getKey();
 +    }
 +
 +    public static String getTriggerName(final String jobName) {
 +        return "Trigger_" + jobName;
 +    }
 +
 +    private JobNamer() {
 +        // private constructor for static utility class
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/TaskJob.java
----------------------------------------------------------------------
diff --cc core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/TaskJob.java
index b6efd53,0000000..3df89bc
mode 100644,000000..100644
--- a/core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/TaskJob.java
+++ b/core/provisioning-api/src/main/java/org/apache/syncope/core/provisioning/api/job/TaskJob.java
@@@ -1,43 -1,0 +1,43 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.provisioning.api.job;
 +
 +import org.quartz.DisallowConcurrentExecution;
- import org.quartz.Job;
++import org.quartz.InterruptableJob;
 +
 +/**
 + * Interface for Quartz jobs bound to a given Task.
 + */
 +@DisallowConcurrentExecution
- public interface TaskJob extends Job {
++public interface TaskJob extends InterruptableJob {
 +
 +    String DRY_RUN_JOBDETAIL_KEY = "dryRun";
 +
 +    /**
 +     * Task execution status.
 +     */
 +    public enum Status {
 +
 +        SUCCESS,
 +        FAILURE
 +
 +    }
 +
 +    void setTaskId(Long taskId);
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/AbstractTaskJob.java
----------------------------------------------------------------------
diff --cc core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/AbstractTaskJob.java
index 6659d7d,0000000..688ca99
mode 100644,000000..100644
--- a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/AbstractTaskJob.java
+++ b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/AbstractTaskJob.java
@@@ -1,181 -1,0 +1,204 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.provisioning.java.job;
 +
++import java.text.SimpleDateFormat;
 +import java.util.Date;
++import java.util.Locale;
++import java.util.concurrent.atomic.AtomicReference;
++import org.apache.syncope.common.lib.SyncopeConstants;
 +import org.apache.syncope.common.lib.types.AuditElements;
 +import org.apache.syncope.common.lib.types.AuditElements.Result;
 +import org.apache.syncope.core.persistence.api.dao.TaskDAO;
 +import org.apache.syncope.core.persistence.api.dao.TaskExecDAO;
 +import org.apache.syncope.core.persistence.api.entity.EntityFactory;
 +import org.apache.syncope.core.persistence.api.entity.task.Task;
 +import org.apache.syncope.core.persistence.api.entity.task.TaskExec;
 +import org.apache.syncope.core.provisioning.api.job.TaskJob;
 +import org.apache.syncope.core.misc.AuditManager;
 +import org.apache.syncope.core.misc.ExceptionUtils2;
 +import org.apache.syncope.core.provisioning.api.notification.NotificationManager;
 +import org.quartz.DisallowConcurrentExecution;
 +import org.quartz.JobExecutionContext;
 +import org.quartz.JobExecutionException;
++import org.quartz.UnableToInterruptJobException;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +import org.springframework.beans.factory.annotation.Autowired;
 +
 +/**
 + * Abstract job implementation that delegates to concrete implementation the actual job execution and provides some
 + * base features.
 + * <strong>Extending this class will not provide support transaction management.</strong><br/>
 + * Extend <tt>AbstractTransactionalTaskJob</tt> for this purpose.
 + *
 + * @see AbstractTransactionalTaskJob
 + */
 +@DisallowConcurrentExecution
 +public abstract class AbstractTaskJob implements TaskJob {
 +
 +    /**
 +     * Task execution status.
 +     */
 +    public enum Status {
 +
 +        SUCCESS,
 +        FAILURE
 +
 +    }
 +
 +    /**
 +     * Logger.
 +     */
 +    protected static final Logger LOG = LoggerFactory.getLogger(AbstractTaskJob.class);
 +
 +    /**
 +     * Task DAO.
 +     */
 +    @Autowired
 +    protected TaskDAO taskDAO;
 +
 +    /**
 +     * Task execution DAO.
 +     */
 +    @Autowired
 +    private TaskExecDAO taskExecDAO;
 +
 +    /**
 +     * Notification manager.
 +     */
 +    @Autowired
 +    private NotificationManager notificationManager;
 +
 +    /**
 +     * Audit manager.
 +     */
 +    @Autowired
 +    private AuditManager auditManager;
 +
 +    @Autowired
 +    private EntityFactory entityFactory;
 +
 +    /**
 +     * Id, set by the caller, for identifying the task to be executed.
 +     */
 +    protected Long taskId;
 +
 +    /**
 +     * The actual task to be executed.
 +     */
 +    protected Task task;
 +
 +    /**
++     * The current running thread containing the task to be executed.
++     */
++    protected AtomicReference<Thread> runningThread = new AtomicReference<Thread>();
++
++    /**
 +     * Task id setter.
 +     *
 +     * @param taskId to be set
 +     */
 +    @Override
 +    public void setTaskId(final Long taskId) {
 +        this.taskId = taskId;
 +    }
 +
 +    @Override
 +    public void execute(final JobExecutionContext context) throws JobExecutionException {
++        this.runningThread.set(Thread.currentThread());
 +        task = taskDAO.find(taskId);
 +        if (task == null) {
 +            throw new JobExecutionException("Task " + taskId + " not found");
 +        }
 +
 +        TaskExec execution = entityFactory.newEntity(TaskExec.class);
 +        execution.setStartDate(new Date());
 +        execution.setTask(task);
 +
 +        Result result;
 +
 +        try {
 +            execution.setMessage(doExecute(context.getMergedJobDataMap().getBoolean(DRY_RUN_JOBDETAIL_KEY)));
 +            execution.setStatus(Status.SUCCESS.name());
 +            result = Result.SUCCESS;
 +        } catch (JobExecutionException e) {
 +            LOG.error("While executing task " + taskId, e);
 +            result = Result.FAILURE;
 +
 +            execution.setMessage(ExceptionUtils2.getFullStackTrace(e));
 +            execution.setStatus(Status.FAILURE.name());
 +        }
 +        execution.setEndDate(new Date());
 +
 +        if (hasToBeRegistered(execution)) {
 +            taskExecDAO.saveAndAdd(taskId, execution);
 +        }
 +        task = taskDAO.save(task);
 +
 +        notificationManager.createTasks(
 +                AuditElements.EventCategoryType.TASK,
 +                this.getClass().getSimpleName(),
 +                null,
 +                this.getClass().getSimpleName(), // searching for before object is too much expensive ...
 +                result,
 +                task,
 +                execution);
 +
 +        auditManager.audit(
 +                AuditElements.EventCategoryType.TASK,
 +                task.getClass().getSimpleName(),
 +                null,
 +                null, // searching for before object is too much expensive ...
 +                result,
 +                task,
 +                (Object[]) null);
 +    }
 +
 +    /**
 +     * The actual execution, delegated to child classes.
 +     *
 +     * @param dryRun whether to actually touch the data
 +     * @return the task execution status to be set
 +     * @throws JobExecutionException if anything goes wrong
 +     */
 +    protected abstract String doExecute(boolean dryRun) throws JobExecutionException;
 +
 +    /**
 +     * Template method to determine whether this job's task execution has to be persisted or not.
 +     *
 +     * @param execution task execution
 +     * @return wether to persist or not
 +     */
 +    protected boolean hasToBeRegistered(final TaskExec execution) {
 +        return false;
 +    }
++
++    @Override
++    public void interrupt() throws UnableToInterruptJobException {
++        Thread thread = this.runningThread.getAndSet(null);
++        if (thread != null) {
++            LOG.info("Interrupting job time {} ", (new SimpleDateFormat(SyncopeConstants.DEFAULT_DATE_PATTERN, Locale.
++                    getDefault())).format(new Date()));
++            thread.interrupt();
++        } else {
++            LOG.warn("Unable to retrieve the right thread related to the current job execution");
++        }
++    }
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/ReportServiceImpl.java
----------------------------------------------------------------------
diff --cc core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/ReportServiceImpl.java
index dfde009,0000000..4fe02ba
mode 100644,000000..100644
--- a/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/ReportServiceImpl.java
+++ b/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/ReportServiceImpl.java
@@@ -1,122 -1,0 +1,134 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.rest.cxf.service;
 +
 +import java.io.IOException;
 +import java.io.OutputStream;
 +import java.net.URI;
 +import java.util.List;
 +import javax.ws.rs.core.HttpHeaders;
 +import javax.ws.rs.core.Response;
 +import javax.ws.rs.core.StreamingOutput;
 +import org.apache.syncope.common.lib.to.PagedResult;
 +import org.apache.syncope.common.lib.to.ReportExecTO;
 +import org.apache.syncope.common.lib.to.ReportTO;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
 +import org.apache.syncope.common.lib.types.ReportExecExportFormat;
 +import org.apache.syncope.common.lib.wrap.ReportletConfClass;
 +import org.apache.syncope.common.rest.api.CollectionWrapper;
 +import org.apache.syncope.common.rest.api.RESTHeaders;
 +import org.apache.syncope.common.rest.api.beans.ListQuery;
 +import org.apache.syncope.common.rest.api.service.ReportService;
 +import org.apache.syncope.core.logic.ReportLogic;
 +import org.apache.syncope.core.persistence.api.entity.ReportExec;
 +import org.springframework.beans.factory.annotation.Autowired;
 +import org.springframework.stereotype.Service;
 +
 +@Service
 +public class ReportServiceImpl extends AbstractServiceImpl implements ReportService {
 +
 +    @Autowired
 +    private ReportLogic logic;
 +
 +    @Override
 +    public Response create(final ReportTO reportTO) {
 +        ReportTO createdReportTO = logic.create(reportTO);
 +        URI location = uriInfo.getAbsolutePathBuilder().path(String.valueOf(createdReportTO.getKey())).build();
 +        return Response.created(location).
 +                header(RESTHeaders.RESOURCE_ID, createdReportTO.getKey()).
 +                build();
 +    }
 +
 +    @Override
 +    public void update(final Long reportKey, final ReportTO reportTO) {
 +        reportTO.setKey(reportKey);
 +        logic.update(reportTO);
 +    }
 +
 +    @Override
 +    public PagedResult<ReportTO> list(final ListQuery listQuery) {
 +        return buildPagedResult(
 +                logic.list(
 +                        listQuery.getPage(),
 +                        listQuery.getSize(),
 +                        getOrderByClauses(listQuery.getOrderBy())),
 +                listQuery.getPage(),
 +                listQuery.getSize(),
 +                logic.count());
 +    }
 +
 +    @Override
 +    public List<ReportletConfClass> getReportletConfClasses() {
 +        return CollectionWrapper.wrap(logic.getReportletConfClasses(), ReportletConfClass.class);
 +    }
 +
 +    @Override
 +    public ReportTO read(final Long reportKey) {
 +        return logic.read(reportKey);
 +    }
 +
 +    @Override
 +    public ReportExecTO readExecution(final Long executionKey) {
 +        return logic.readExecution(executionKey);
 +    }
 +
 +    @Override
 +    public Response exportExecutionResult(final Long executionKey, final ReportExecExportFormat fmt) {
 +        final ReportExecExportFormat format = (fmt == null) ? ReportExecExportFormat.XML : fmt;
 +        final ReportExec reportExec = logic.getAndCheckReportExec(executionKey);
 +        StreamingOutput sout = new StreamingOutput() {
 +
 +            @Override
 +            public void write(final OutputStream os) throws IOException {
 +                logic.exportExecutionResult(os, reportExec, format);
 +            }
 +        };
 +        String disposition = "attachment; filename=" + reportExec.getReport().getName() + "." + format.name().
 +                toLowerCase();
 +        return Response.ok(sout).
 +                header(HttpHeaders.CONTENT_DISPOSITION, disposition).
 +                build();
 +    }
 +
 +    @Override
 +    public ReportExecTO execute(final Long reportKey) {
 +        return logic.execute(reportKey);
 +    }
 +
 +    @Override
 +    public void delete(final Long reportKey) {
 +        logic.delete(reportKey);
 +    }
 +
 +    @Override
 +    public void deleteExecution(final Long executionKey) {
 +        logic.deleteExecution(executionKey);
 +    }
++
++    @Override
++    public List<ReportExecTO> list(final JobStatusType type) {
++        return logic.list(type, ReportExecTO.class);
++    }
++
++    @Override
++    public void process(final JobAction action, final Long reportId) {
++        logic.process(action, reportId);
++    }
 +}

http://git-wip-us.apache.org/repos/asf/syncope/blob/d489e8c5/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/TaskServiceImpl.java
----------------------------------------------------------------------
diff --cc core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/TaskServiceImpl.java
index 83edcc0,0000000..957891a
mode 100644,000000..100644
--- a/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/TaskServiceImpl.java
+++ b/core/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/TaskServiceImpl.java
@@@ -1,166 -1,0 +1,179 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.syncope.core.rest.cxf.service;
 +
 +import java.net.URI;
++import java.util.List;
 +import javax.ws.rs.BadRequestException;
 +import javax.ws.rs.core.Response;
 +import org.apache.syncope.common.lib.to.AbstractTaskTO;
 +import org.apache.syncope.common.lib.to.BulkAction;
 +import org.apache.syncope.common.lib.to.BulkActionResult;
 +import org.apache.syncope.common.lib.to.PagedResult;
 +import org.apache.syncope.common.lib.to.PushTaskTO;
 +import org.apache.syncope.common.lib.to.ReportExecTO;
 +import org.apache.syncope.common.lib.to.SchedTaskTO;
 +import org.apache.syncope.common.lib.to.SyncTaskTO;
 +import org.apache.syncope.common.lib.to.TaskExecTO;
++import org.apache.syncope.common.lib.types.JobAction;
++import org.apache.syncope.common.lib.types.JobStatusType;
 +import org.apache.syncope.common.lib.types.PropagationTaskExecStatus;
 +import org.apache.syncope.common.lib.types.TaskType;
 +import org.apache.syncope.common.rest.api.RESTHeaders;
 +import org.apache.syncope.common.rest.api.beans.ListQuery;
 +import org.apache.syncope.common.rest.api.service.TaskService;
 +import org.apache.syncope.core.logic.TaskLogic;
 +import org.springframework.beans.factory.annotation.Autowired;
 +import org.springframework.stereotype.Service;
 +
 +@Service
 +public class TaskServiceImpl extends AbstractServiceImpl implements TaskService {
 +
 +    @Autowired
 +    private TaskLogic logic;
 +
 +    @Override
 +    public <T extends SchedTaskTO> Response create(final T taskTO) {
 +        T createdTask;
 +        if (taskTO instanceof SyncTaskTO || taskTO instanceof PushTaskTO || taskTO instanceof SchedTaskTO) {
 +            createdTask = logic.createSchedTask(taskTO);
 +        } else {
 +            throw new BadRequestException();
 +        }
 +
 +        URI location = uriInfo.getAbsolutePathBuilder().path(String.valueOf(createdTask.getKey())).build();
 +        return Response.created(location).
 +                header(RESTHeaders.RESOURCE_ID, createdTask.getKey()).
 +                build();
 +    }
 +
 +    @Override
 +    public void delete(final Long taskKey) {
 +        logic.delete(taskKey);
 +    }
 +
 +    @Override
 +    public void deleteExecution(final Long executionKey) {
 +        logic.deleteExecution(executionKey);
 +    }
 +
 +    @Override
 +    public TaskExecTO execute(final Long taskKey, final boolean dryRun) {
 +        return logic.execute(taskKey, dryRun);
 +    }
 +
 +    @SuppressWarnings("unchecked")
 +    @Override
 +    public <T extends AbstractTaskTO> PagedResult<T> list(final TaskType taskType, final ListQuery listQuery) {
 +        return (PagedResult<T>) buildPagedResult(
 +                logic.list(
 +                        taskType,
 +                        listQuery.getPage(),
 +                        listQuery.getSize(),
 +                        getOrderByClauses(listQuery.getOrderBy())),
 +                listQuery.getPage(),
 +                listQuery.getSize(),
 +                logic.count(taskType));
 +    }
 +
 +    @Override
 +    public <T extends AbstractTaskTO> T read(final Long taskKey) {
 +        return logic.read(taskKey);
 +    }
 +
 +    @Override
 +    public TaskExecTO readExecution(final Long executionKey) {
 +        return logic.readExecution(executionKey);
 +    }
 +
 +    @Override
 +    public void report(final Long executionKey, final ReportExecTO reportExec) {
 +        reportExec.setKey(executionKey);
 +        logic.report(
 +                executionKey, PropagationTaskExecStatus.fromString(reportExec.getStatus()), reportExec.getMessage());
 +    }
 +
 +    @Override
 +    public void update(final Long taskKey, final AbstractTaskTO taskTO) {
 +        taskTO.setKey(taskKey);
 +        if (taskTO instanceof SyncTaskTO) {
 +            logic.updateSync((SyncTaskTO) taskTO);
 +        } else if (taskTO instanceof SchedTaskTO) {
 +            logic.updateSched((SchedTaskTO) taskTO);
 +        } else {
 +            throw new BadRequestException();
 +        }
 +    }
 +
 +    @Override
 +    public BulkActionResult bulk(final BulkAction bulkAction) {
 +        BulkActionResult result = new BulkActionResult();
 +
 +        switch (bulkAction.getOperation()) {
 +            case DELETE:
 +                for (String taskKey : bulkAction.getTargets()) {
 +                    try {
 +                        result.add(logic.delete(Long.valueOf(taskKey)).getKey(), BulkActionResult.Status.SUCCESS);
 +                    } catch (Exception e) {
 +                        LOG.error("Error performing delete for task {}", taskKey, e);
 +                        result.add(taskKey, BulkActionResult.Status.FAILURE);
 +                    }
 +                }
 +                break;
 +
 +            case DRYRUN:
 +                for (String taskKey : bulkAction.getTargets()) {
 +                    try {
 +                        logic.execute(Long.valueOf(taskKey), true);
 +                        result.add(taskKey, BulkActionResult.Status.SUCCESS);
 +                    } catch (Exception e) {
 +                        LOG.error("Error performing dryrun for task {}", taskKey, e);
 +                        result.add(taskKey, BulkActionResult.Status.FAILURE);
 +                    }
 +                }
 +                break;
 +
 +            case EXECUTE:
 +                for (String taskKey : bulkAction.getTargets()) {
 +                    try {
 +                        logic.execute(Long.valueOf(taskKey), false);
 +                        result.add(taskKey, BulkActionResult.Status.SUCCESS);
 +                    } catch (Exception e) {
 +                        LOG.error("Error performing execute for task {}", taskKey, e);
 +                        result.add(taskKey, BulkActionResult.Status.FAILURE);
 +                    }
 +                }
 +                break;
 +
 +            default:
 +        }
 +
 +        return result;
 +    }
++
++    @Override
++    public List<TaskExecTO> list(final JobStatusType type) {
++        return logic.list(type, TaskExecTO.class);
++    }
++
++    @Override
++    public void process(final JobAction action, final Long taskId) {
++        logic.process(action, taskId);
++    }
 +}