You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@shardingsphere.apache.org by zh...@apache.org on 2020/07/13 14:58:21 UTC

[shardingsphere-elasticjob-lite] branch master updated: Maven install throw exception (#1035) (#1043)

This is an automated email from the ASF dual-hosted git repository.

zhangliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere-elasticjob-lite.git


The following commit(s) were added to refs/heads/master by this push:
     new 4225dab   Maven install throw exception (#1035) (#1043)
4225dab is described below

commit 4225dab232b43e80b96dbe092514b2d30e24fb5a
Author: Tboy <gu...@immomo.com>
AuthorDate: Mon Jul 13 22:58:09 2020 +0800

     Maven install throw exception (#1035) (#1043)
---
 .../cloud/executor/DaemonTaskScheduler.java        | 32 +++++++++++++++-------
 1 file changed, 22 insertions(+), 10 deletions(-)

diff --git a/elasticjob-cloud/elasticjob-cloud-executor/src/main/java/org/apache/shardingsphere/elasticjob/cloud/executor/DaemonTaskScheduler.java b/elasticjob-cloud/elasticjob-cloud-executor/src/main/java/org/apache/shardingsphere/elasticjob/cloud/executor/DaemonTaskScheduler.java
index d038f2b..758b47d 100755
--- a/elasticjob-cloud/elasticjob-cloud-executor/src/main/java/org/apache/shardingsphere/elasticjob/cloud/executor/DaemonTaskScheduler.java
+++ b/elasticjob-cloud/elasticjob-cloud-executor/src/main/java/org/apache/shardingsphere/elasticjob/cloud/executor/DaemonTaskScheduler.java
@@ -160,6 +160,26 @@ public final class DaemonTaskScheduler {
         @Setter
         private Protos.TaskID taskId;
         
+        private volatile ElasticJobExecutor jobExecutor;
+        
+        private ElasticJobExecutor getJobExecutor() {
+            if (null == jobExecutor) {
+                createJobExecutor();
+            }
+            return jobExecutor;
+        }
+        
+        private synchronized void createJobExecutor() {
+            if (null != jobExecutor) {
+                return;
+            }
+            if (null == elasticJob) {
+                jobExecutor = new ElasticJobExecutor(elasticJobType, jobFacade.loadJobConfiguration(true), jobFacade);
+            } else {
+                jobExecutor = new ElasticJobExecutor(elasticJob, jobFacade.loadJobConfiguration(true), jobFacade);
+            }
+        }
+        
         @Override
         public void execute(final JobExecutionContext context) {
             ShardingContexts shardingContexts = jobFacade.getShardingContexts();
@@ -168,19 +188,11 @@ public final class DaemonTaskScheduler {
             if (jobEventSamplingCount > 0 && ++currentJobEventSamplingCount < jobEventSamplingCount) {
                 shardingContexts.setCurrentJobEventSamplingCount(currentJobEventSamplingCount);
                 jobFacade.getShardingContexts().setAllowSendJobEvent(false);
-                if (null == elasticJob) {
-                    new ElasticJobExecutor(elasticJobType, jobFacade.loadJobConfiguration(true), jobFacade).execute();
-                } else {
-                    new ElasticJobExecutor(elasticJob, jobFacade.loadJobConfiguration(true), jobFacade).execute();
-                }
+                getJobExecutor().execute();
             } else {
                 jobFacade.getShardingContexts().setAllowSendJobEvent(true);
                 executorDriver.sendStatusUpdate(Protos.TaskStatus.newBuilder().setTaskId(taskId).setState(Protos.TaskState.TASK_RUNNING).setMessage("BEGIN").build());
-                if (null == elasticJob) {
-                    new ElasticJobExecutor(elasticJobType, jobFacade.loadJobConfiguration(true), jobFacade).execute();
-                } else {
-                    new ElasticJobExecutor(elasticJob, jobFacade.loadJobConfiguration(true), jobFacade).execute();
-                }
+                getJobExecutor().execute();
                 executorDriver.sendStatusUpdate(Protos.TaskStatus.newBuilder().setTaskId(taskId).setState(Protos.TaskState.TASK_RUNNING).setMessage("COMPLETE").build());
                 shardingContexts.setCurrentJobEventSamplingCount(0);
             }