001
014
015 package com.liferay.portal.scheduler.job;
016
017 import com.liferay.portal.kernel.cluster.ClusterExecutorUtil;
018 import com.liferay.portal.kernel.cluster.ClusterRequest;
019 import com.liferay.portal.kernel.concurrent.LockRegistry;
020 import com.liferay.portal.kernel.json.JSONFactoryUtil;
021 import com.liferay.portal.kernel.log.Log;
022 import com.liferay.portal.kernel.log.LogFactoryUtil;
023 import com.liferay.portal.kernel.messaging.Message;
024 import com.liferay.portal.kernel.messaging.MessageBusUtil;
025 import com.liferay.portal.kernel.scheduler.JobState;
026 import com.liferay.portal.kernel.scheduler.JobStateSerializeUtil;
027 import com.liferay.portal.kernel.scheduler.SchedulerEngine;
028 import com.liferay.portal.kernel.scheduler.SchedulerEngineUtil;
029 import com.liferay.portal.kernel.scheduler.StorageType;
030 import com.liferay.portal.kernel.scheduler.TriggerState;
031 import com.liferay.portal.kernel.util.MethodHandler;
032 import com.liferay.portal.kernel.util.MethodKey;
033 import com.liferay.portal.spring.context.PortletContextLoaderListener;
034 import com.liferay.portal.util.PropsValues;
035
036 import java.util.Map;
037 import java.util.concurrent.locks.ReentrantLock;
038
039 import org.quartz.Job;
040 import org.quartz.JobDataMap;
041 import org.quartz.JobDetail;
042 import org.quartz.JobExecutionContext;
043 import org.quartz.Scheduler;
044 import org.quartz.Trigger;
045
046
050 public class MessageSenderJob implements Job {
051
052 public void execute(JobExecutionContext jobExecutionContext) {
053 try {
054 doExecute(jobExecutionContext);
055 }
056 catch (Exception e) {
057 _log.error("Unable to execute job", e);
058 }
059 }
060
061 protected void doExecute(JobExecutionContext jobExecutionContext)
062 throws Exception {
063
064 JobDetail jobDetail = jobExecutionContext.getJobDetail();
065
066 JobDataMap jobDataMap = jobDetail.getJobDataMap();
067
068 String destinationName = jobDataMap.getString(
069 SchedulerEngine.DESTINATION_NAME);
070
071 String messageJSON = (String)jobDataMap.get(SchedulerEngine.MESSAGE);
072
073 Message message = null;
074
075 if (messageJSON == null) {
076 message = new Message();
077 }
078 else {
079 message = (Message)JSONFactoryUtil.deserialize(messageJSON);
080 }
081
082 String contextPath = message.getString(SchedulerEngine.CONTEXT_PATH);
083
084 String lockKey = PortletContextLoaderListener.getLockKey(
085 contextPath);
086
087 ReentrantLock executionLock = null;
088
089 if (lockKey != null) {
090 executionLock = LockRegistry.getLock(lockKey, lockKey);
091
092 if (executionLock != null) {
093 if (executionLock.hasQueuedThreads()) {
094 return;
095 }
096
097 executionLock.lock();
098 }
099 }
100
101 try {
102 message.put(SchedulerEngine.DESTINATION_NAME, destinationName);
103
104 Scheduler scheduler = jobExecutionContext.getScheduler();
105
106 Map<String, Object> jobStateMap =
107 (Map<String, Object>)jobDataMap.get(SchedulerEngine.JOB_STATE);
108
109 JobState jobState = JobStateSerializeUtil.deserialize(jobStateMap);
110
111 if (jobExecutionContext.getNextFireTime() == null) {
112 Trigger trigger = jobExecutionContext.getTrigger();
113
114 StorageType storageType = StorageType.valueOf(
115 jobDataMap.getString(SchedulerEngine.STORAGE_TYPE));
116
117 if (storageType.equals(StorageType.PERSISTED)) {
118 JobState jobStateClone = updatePersistedJobState(
119 jobState, trigger);
120
121 jobDataMap.put(
122 SchedulerEngine.JOB_STATE,
123 JobStateSerializeUtil.serialize(jobStateClone));
124
125 scheduler.addJob(jobDetail, true);
126 }
127 else {
128 message.put(SchedulerEngine.DISABLE, true);
129
130 if (PropsValues.CLUSTER_LINK_ENABLED &&
131 storageType.equals(StorageType.MEMORY_CLUSTERED)) {
132
133 notifyClusterMember(
134 trigger.getJobName(), trigger.getGroup(),
135 storageType);
136 }
137 }
138 }
139
140 message.put(SchedulerEngine.JOB_STATE, jobState);
141
142 MessageBusUtil.sendMessage(destinationName, message);
143 }
144 finally {
145 if (executionLock != null) {
146 executionLock.unlock();
147 }
148 }
149 }
150
151 protected void notifyClusterMember(
152 String jobName, String groupName, StorageType storageType)
153 throws Exception {
154
155 MethodHandler methodHandler = new MethodHandler(
156 _deleteJobMethodKey, jobName, groupName, storageType);
157
158 ClusterRequest clusterRequest =
159 ClusterRequest.createMulticastRequest(methodHandler, true);
160
161 ClusterExecutorUtil.execute(clusterRequest);
162 }
163
164 protected JobState updatePersistedJobState(
165 JobState jobState, Trigger trigger) {
166
167 jobState.setTriggerDate(
168 SchedulerEngine.END_TIME, trigger.getEndTime());
169 jobState.setTriggerDate(
170 SchedulerEngine.FINAL_FIRE_TIME, trigger.getFinalFireTime());
171 jobState.setTriggerDate(SchedulerEngine.NEXT_FIRE_TIME, null);
172 jobState.setTriggerDate(
173 SchedulerEngine.PREVIOUS_FIRE_TIME, trigger.getPreviousFireTime());
174 jobState.setTriggerDate(
175 SchedulerEngine.START_TIME, trigger.getStartTime());
176 jobState.setTriggerState(TriggerState.COMPLETE);
177
178 JobState jobStateClone = (JobState)jobState.clone();
179
180 jobStateClone.clearExceptions();
181
182 return jobStateClone;
183 }
184
185 private static Log _log = LogFactoryUtil.getLog(MessageSenderJob.class);
186
187 private static MethodKey _deleteJobMethodKey = new MethodKey(
188 SchedulerEngineUtil.class.getName(), "delete", String.class,
189 String.class, StorageType.class);
190
191 }