001
014
015 package com.liferay.portal.scheduler.job;
016
017 import com.liferay.portal.kernel.cluster.ClusterExecutorUtil;
018 import com.liferay.portal.kernel.cluster.ClusterRequest;
019 import com.liferay.portal.kernel.concurrent.LockRegistry;
020 import com.liferay.portal.kernel.json.JSONFactoryUtil;
021 import com.liferay.portal.kernel.log.Log;
022 import com.liferay.portal.kernel.log.LogFactoryUtil;
023 import com.liferay.portal.kernel.messaging.Message;
024 import com.liferay.portal.kernel.messaging.MessageBusUtil;
025 import com.liferay.portal.kernel.scheduler.JobState;
026 import com.liferay.portal.kernel.scheduler.JobStateSerializeUtil;
027 import com.liferay.portal.kernel.scheduler.SchedulerEngine;
028 import com.liferay.portal.kernel.scheduler.SchedulerEngineUtil;
029 import com.liferay.portal.kernel.scheduler.StorageType;
030 import com.liferay.portal.kernel.scheduler.TriggerState;
031 import com.liferay.portal.kernel.util.MethodHandler;
032 import com.liferay.portal.kernel.util.MethodKey;
033 import com.liferay.portal.spring.context.PortletContextLoaderListener;
034 import com.liferay.portal.util.PropsValues;
035
036 import java.util.Map;
037 import java.util.concurrent.locks.ReentrantLock;
038
039 import org.quartz.Job;
040 import org.quartz.JobDataMap;
041 import org.quartz.JobDetail;
042 import org.quartz.JobExecutionContext;
043 import org.quartz.Scheduler;
044 import org.quartz.Trigger;
045
046
050 public class MessageSenderJob implements Job {
051
052 public void execute(JobExecutionContext jobExecutionContext) {
053 try {
054 doExecute(jobExecutionContext);
055 }
056 catch (Exception e) {
057 _log.error("Unable to execute job", e);
058 }
059 }
060
061 protected void doExecute(JobExecutionContext jobExecutionContext)
062 throws Exception {
063
064 JobDetail jobDetail = jobExecutionContext.getJobDetail();
065
066 JobDataMap jobDataMap = jobDetail.getJobDataMap();
067
068 String destinationName = jobDataMap.getString(
069 SchedulerEngine.DESTINATION_NAME);
070
071 String messageJSON = (String)jobDataMap.get(SchedulerEngine.MESSAGE);
072
073 Message message = null;
074
075 if (messageJSON == null) {
076 message = new Message();
077 }
078 else {
079 message = (Message)JSONFactoryUtil.deserialize(messageJSON);
080 }
081
082 String contextPath = message.getString(SchedulerEngine.CONTEXT_PATH);
083
084 String lockKey = PortletContextLoaderListener.getLockKey(contextPath);
085
086 ReentrantLock executionLock = null;
087
088 if (lockKey != null) {
089 executionLock = LockRegistry.getLock(lockKey, lockKey);
090
091 if (executionLock != null) {
092 if (executionLock.hasQueuedThreads()) {
093 return;
094 }
095
096 executionLock.lock();
097 }
098 }
099
100 try {
101 message.put(SchedulerEngine.DESTINATION_NAME, destinationName);
102
103 Scheduler scheduler = jobExecutionContext.getScheduler();
104
105 Map<String, Object> jobStateMap =
106 (Map<String, Object>)jobDataMap.get(SchedulerEngine.JOB_STATE);
107
108 JobState jobState = JobStateSerializeUtil.deserialize(jobStateMap);
109
110 if (jobExecutionContext.getNextFireTime() == null) {
111 Trigger trigger = jobExecutionContext.getTrigger();
112
113 StorageType storageType = StorageType.valueOf(
114 jobDataMap.getString(SchedulerEngine.STORAGE_TYPE));
115
116 if (storageType.equals(StorageType.PERSISTED)) {
117 JobState jobStateClone = updatePersistedJobState(
118 jobState, trigger);
119
120 jobDataMap.put(
121 SchedulerEngine.JOB_STATE,
122 JobStateSerializeUtil.serialize(jobStateClone));
123
124 scheduler.addJob(jobDetail, true);
125 }
126 else {
127 message.put(SchedulerEngine.DISABLE, true);
128
129 if (PropsValues.CLUSTER_LINK_ENABLED &&
130 storageType.equals(StorageType.MEMORY_CLUSTERED)) {
131
132 notifyClusterMember(
133 trigger.getJobName(), trigger.getGroup(),
134 storageType);
135 }
136 }
137 }
138
139 message.put(SchedulerEngine.JOB_STATE, jobState);
140
141 MessageBusUtil.sendMessage(destinationName, message);
142 }
143 finally {
144 if (executionLock != null) {
145 executionLock.unlock();
146 }
147 }
148 }
149
150 protected void notifyClusterMember(
151 String jobName, String groupName, StorageType storageType)
152 throws Exception {
153
154 MethodHandler methodHandler = new MethodHandler(
155 _deleteJobMethodKey, jobName, groupName, storageType);
156
157 ClusterRequest clusterRequest =
158 ClusterRequest.createMulticastRequest(methodHandler, true);
159
160 ClusterExecutorUtil.execute(clusterRequest);
161 }
162
163 protected JobState updatePersistedJobState(
164 JobState jobState, Trigger trigger) {
165
166 jobState.setTriggerDate(SchedulerEngine.END_TIME, trigger.getEndTime());
167 jobState.setTriggerDate(
168 SchedulerEngine.FINAL_FIRE_TIME, trigger.getFinalFireTime());
169 jobState.setTriggerDate(SchedulerEngine.NEXT_FIRE_TIME, null);
170 jobState.setTriggerDate(
171 SchedulerEngine.PREVIOUS_FIRE_TIME, trigger.getPreviousFireTime());
172 jobState.setTriggerDate(
173 SchedulerEngine.START_TIME, trigger.getStartTime());
174 jobState.setTriggerState(TriggerState.COMPLETE);
175
176 JobState jobStateClone = (JobState)jobState.clone();
177
178 jobStateClone.clearExceptions();
179
180 return jobStateClone;
181 }
182
183 private static Log _log = LogFactoryUtil.getLog(MessageSenderJob.class);
184
185 private static MethodKey _deleteJobMethodKey = new MethodKey(
186 SchedulerEngineUtil.class.getName(), "delete", String.class,
187 String.class, StorageType.class);
188
189 }