亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來(lái)到蟲(chóng)蟲(chóng)下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲(chóng)蟲(chóng)下載站

?? clusterexample.java

?? Quartz 是個(gè)開(kāi)源的作業(yè)調(diào)度框架
?? JAVA
字號(hào):
/*  * Copyright 2005 OpenSymphony  *  * Licensed under the Apache License, Version 2.0 (the "License"); you may not  * use this file except in compliance with the License. You may obtain a copy  * of the License at  *  *   http://www.apache.org/licenses/LICENSE-2.0  *    * Unless required by applicable law or agreed to in writing, software  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the  * License for the specific language governing permissions and limitations  * under the License. *  */package org.quartz.examples.example13;import java.util.Date;import org.apache.commons.logging.Log;import org.apache.commons.logging.LogFactory;import org.quartz.JobDetail;import org.quartz.Scheduler;import org.quartz.SchedulerFactory;import org.quartz.SimpleTrigger;import org.quartz.impl.StdSchedulerFactory;/** * Used to test/show the clustering features of JDBCJobStore (JobStoreTX or * JobStoreCMT). *  * <p> * All instances MUST use a different properties file, because their instance * Ids must be different, however all other properties should be the same. * </p> *  * <p> * If you want it to clear out existing jobs & triggers, pass a command-line * argument called "clearJobs". * </p> *  * <p> * You should probably start with a "fresh" set of tables (assuming you may * have some data lingering in it from other tests), since mixing data from a * non-clustered setup with a clustered one can be bad. * </p> *  * <p> * Try killing one of the cluster instances while they are running, and see * that the remaining instance(s) recover the in-progress jobs. Note that * detection of the failure may take up to 15 or so seconds with the default * settings. * </p> *  * <p> * Also try running it with/without the shutdown-hook plugin registered with * the scheduler. (org.quartz.plugins.management.ShutdownHookPlugin). * </p> *  * <p> * <i>Note:</i> Never run clustering on separate machines, unless their * clocks are synchronized using some form of time-sync service (daemon). * </p> *  * @see DumbRecoveryJob *  * @author James House */public class ClusterExample {	private static Log _log = LogFactory.getLog(ClusterExample.class);		public void cleanUp(Scheduler inScheduler) throws Exception {        _log.warn("***** Deleting existing jobs/triggers *****");        // unschedule jobs        String[] groups = inScheduler.getTriggerGroupNames();        for (int i = 0; i < groups.length; i++) {            String[] names = inScheduler.getTriggerNames(groups[i]);            for (int j = 0; j < names.length; j++)            	inScheduler.unscheduleJob(names[j], groups[i]);        }        // delete jobs        groups = inScheduler.getJobGroupNames();        for (int i = 0; i < groups.length; i++) {            String[] names = inScheduler.getJobNames(groups[i]);            for (int j = 0; j < names.length; j++)            	inScheduler.deleteJob(names[j], groups[i]);        }	}	    public void run(boolean inClearJobs, boolean inScheduleJobs)     	throws Exception {		// First we must get a reference to a scheduler		SchedulerFactory sf = new StdSchedulerFactory();		Scheduler sched = sf.getScheduler();				if (inClearJobs) {			cleanUp(sched);		}		_log.info("------- Initialization Complete -----------");        if (inScheduleJobs) {            _log.info("------- Scheduling Jobs ------------------");            String schedId = sched.getSchedulerInstanceId();            int count = 1;            JobDetail job = new JobDetail("job_" + count, schedId,                    SimpleRecoveryJob.class);            // ask scheduler to re-execute this job if it was in progress when            // the scheduler went down...            job.setRequestsRecovery(true);            SimpleTrigger trigger =             	new SimpleTrigger("triger_" + count, schedId, 20, 5000L);            trigger.setStartTime(new Date(System.currentTimeMillis() + 1000L));    		_log.info(job.getFullName() +    				" will run at: " + trigger.getNextFireTime() +      				" and repeat: " + trigger.getRepeatCount() +     				" times, every " + trigger.getRepeatInterval() / 1000 + " seconds");            sched.scheduleJob(job, trigger);            count++;            job = new JobDetail("job_" + count, schedId,             		SimpleRecoveryJob.class);            // ask scheduler to re-execute this job if it was in progress when            // the scheduler went down...            job.setRequestsRecovery(true);            trigger = new SimpleTrigger("trig_" + count, schedId, 20, 5000L);            trigger.setStartTime(new Date(System.currentTimeMillis() + 2000L));    		_log.info(job.getFullName() +    				" will run at: " + trigger.getNextFireTime() +      				" and repeat: " + trigger.getRepeatCount() +     				" times, every " + trigger.getRepeatInterval() / 1000 + " seconds");            sched.scheduleJob(job, trigger);            count++;            job = new JobDetail("job_" + count, schedId,                    SimpleRecoveryStatefulJob.class);            // ask scheduler to re-execute this job if it was in progress when            // the scheduler went down...            job.setRequestsRecovery(true);            trigger = new SimpleTrigger("trig_" + count, schedId, 20, 3000L);            trigger.setStartTime(new Date(System.currentTimeMillis() + 1000L));    		_log.info(job.getFullName() +    				" will run at: " + trigger.getNextFireTime() +      				" and repeat: " + trigger.getRepeatCount() +     				" times, every " + trigger.getRepeatInterval() / 1000 + " seconds");            sched.scheduleJob(job, trigger);            count++;            job = new JobDetail("job_" + count, schedId, SimpleRecoveryJob.class);            // ask scheduler to re-execute this job if it was in progress when            // the scheduler went down...            job.setRequestsRecovery(true);            trigger = new SimpleTrigger("trig_" + count, schedId, 20, 4000L);            trigger.setStartTime(new Date(System.currentTimeMillis() + 1000L));            _log.info(job.getFullName() + " will run at: "                    + trigger.getNextFireTime() + " & repeat: "                    + trigger.getRepeatCount() + "/"                    + trigger.getRepeatInterval());            sched.scheduleJob(job, trigger);            count++;            job = new JobDetail("job_" + count, schedId, SimpleRecoveryJob.class);            // ask scheduler to re-execute this job if it was in progress when            // the scheduler went down...            job.setRequestsRecovery(true);            trigger = new SimpleTrigger("trig_" + count, schedId, 20, 4500L);            trigger.setStartTime(new Date(System.currentTimeMillis() + 1000L));            _log.info(job.getFullName() + " will run at: "                    + trigger.getNextFireTime() + " & repeat: "                    + trigger.getRepeatCount() + "/"                    + trigger.getRepeatInterval());            sched.scheduleJob(job, trigger);        }        // jobs don't start firing until start() has been called...        _log.info("------- Starting Scheduler ---------------");        sched.start();        _log.info("------- Started Scheduler ----------------");        _log.info("------- Waiting for one hour... ----------");        try {            Thread.sleep(3600L * 1000L);        }         catch (Exception e) {        }        _log.info("------- Shutting Down --------------------");        sched.shutdown();        _log.info("------- Shutdown Complete ----------------");    }    public static void main(String[] args) throws Exception {        boolean clearJobs = false;        boolean scheduleJobs = true;        for (int i = 0; i < args.length; i++) {            if (args[i].equalsIgnoreCase("clearJobs")) {            	clearJobs = true;            	            }            else if (args[i].equalsIgnoreCase("dontScheduleJobs")) {            	scheduleJobs = false;            }        }        ClusterExample example = new ClusterExample();        example.run(clearJobs, scheduleJobs);    }}

?? 快捷鍵說(shuō)明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號(hào) Ctrl + =
減小字號(hào) Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国产 欧美在线| 免费成人在线网站| 91毛片在线观看| 亚洲人123区| 在线免费av一区| 成人国产精品免费观看动漫| 中文字幕欧美激情一区| gogo大胆日本视频一区| 亚洲裸体在线观看| 欧美人与禽zozo性伦| 免费看欧美女人艹b| 久久久久国产一区二区三区四区 | 久久久影院官网| 不卡区在线中文字幕| 亚洲激情欧美激情| 91精品国产欧美一区二区成人| 久久爱www久久做| 国产精品久线观看视频| 在线观看视频一区| 狠狠色综合色综合网络| 中文字幕欧美区| 欧美日韩一级二级| 国模无码大尺度一区二区三区| 国产精品高潮久久久久无| 欧美视频在线播放| 国产成人免费网站| 亚洲资源中文字幕| 久久久91精品国产一区二区三区| 91在线一区二区| 麻豆视频观看网址久久| 国产精品久久久久久妇女6080 | 久久伊人蜜桃av一区二区| 99精品视频在线观看| 日韩国产在线观看一区| 国产欧美日韩三级| 欧美老女人在线| 成av人片一区二区| 免费久久99精品国产| 亚洲视频在线观看三级| 日韩女优毛片在线| 欧美系列一区二区| 成人精品高清在线| 麻豆国产精品官网| 亚洲一区二区欧美激情| 国产日韩欧美综合在线| 欧美丰满美乳xxx高潮www| 99久久免费精品| 国内精品免费**视频| 亚洲永久免费av| 国产精品蜜臀在线观看| 久久一二三国产| 678五月天丁香亚洲综合网| 91视频com| 成人丝袜视频网| 国产伦精品一区二区三区免费 | 免费在线成人网| 亚洲综合丝袜美腿| 中文字幕亚洲欧美在线不卡| 亚洲综合一二区| 亚洲欧洲国产日韩| 国产欧美一区二区三区鸳鸯浴| 欧美久久久久免费| 欧美在线三级电影| 99久久亚洲一区二区三区青草| 国产真实乱对白精彩久久| 日韩影院免费视频| 日韩av电影免费观看高清完整版 | 欧美午夜精品免费| 一本久久精品一区二区| 成人网男人的天堂| 成人晚上爱看视频| 床上的激情91.| 处破女av一区二区| 福利视频网站一区二区三区| 国产精品一区免费视频| 国产一区二区三区高清播放| 麻豆91免费观看| 久久99日本精品| 久久99精品久久久久久久久久久久| 天天做天天摸天天爽国产一区| 亚洲国产一区二区三区青草影视| 亚洲精品视频在线看| 一区二区三区日本| 午夜精品在线视频一区| 偷窥少妇高潮呻吟av久久免费| 亚洲午夜久久久| 日韩—二三区免费观看av| 青青草97国产精品免费观看无弹窗版| 免费在线观看精品| 国产一区二区三区四区五区美女| 国产suv一区二区三区88区| 国产·精品毛片| 91色porny蝌蚪| 欧美色综合网站| 欧美videossexotv100| 国产亚洲欧美日韩俺去了| 国产精品麻豆欧美日韩ww| 亚洲精品伦理在线| 午夜伊人狠狠久久| 韩国视频一区二区| 成人爱爱电影网址| 国产欧美日韩在线观看| 亚洲天堂2014| 亚洲成人免费影院| 激情久久五月天| 91亚洲精品乱码久久久久久蜜桃 | 欧美日本乱大交xxxxx| 日韩视频国产视频| 国产精品色一区二区三区| 亚洲另类一区二区| 毛片av一区二区| 99久久精品免费看国产免费软件| 欧美午夜精品电影| www国产精品av| 一区二区三区在线不卡| 乱中年女人伦av一区二区| 丰满白嫩尤物一区二区| 精品视频1区2区3区| 久久久久综合网| 亚洲国产成人tv| 国产老妇另类xxxxx| 欧美亚洲综合另类| 欧美精品一区二| 亚洲最色的网站| 国产精品一区二区视频| 欧美三级在线播放| 国产欧美日韩综合精品一区二区| 亚洲chinese男男1069| 国产盗摄视频一区二区三区| 欧美日韩日本视频| 国产精品污www在线观看| 日韩中文字幕1| 在线欧美日韩精品| 国产欧美日韩在线视频| 麻豆精品国产91久久久久久| 色999日韩国产欧美一区二区| 久久中文字幕电影| 日本亚洲免费观看| 色婷婷精品大在线视频| 国产日韩高清在线| 免费成人在线视频观看| 欧美性高清videossexo| 国产精品蜜臀在线观看| 国产美女一区二区三区| 91精品国产色综合久久久蜜香臀| 亚洲男同1069视频| 成人亚洲精品久久久久软件| 欧美mv和日韩mv国产网站| 三级一区在线视频先锋| 欧美中文一区二区三区| 国产精品视频一区二区三区不卡| 久久成人18免费观看| 4438成人网| 亚洲h在线观看| 欧美日韩激情一区二区| 亚洲精品乱码久久久久久黑人 | 亚洲18色成人| 色美美综合视频| 亚洲三级小视频| aaa国产一区| 日韩美女视频一区二区| www.色综合.com| 国产精品美女视频| 成人黄色在线看| 国产精品免费丝袜| 成人久久18免费网站麻豆| 国产日韩精品一区二区三区| 国产一区二区三区电影在线观看 | 激情六月婷婷综合| 久久这里只精品最新地址| 精品午夜久久福利影院| 欧美成人激情免费网| 久草在线在线精品观看| 精品入口麻豆88视频| 国产真实乱子伦精品视频| 久久久精品蜜桃| 成人中文字幕在线| 日韩一区在线播放| 91成人在线免费观看| 亚洲国产欧美在线| 欧美一级在线观看| 国产一区二区三区免费| 欧美激情一区二区三区在线| 成人免费毛片片v| 亚洲免费观看在线视频| 欧美午夜片在线观看| 日本视频中文字幕一区二区三区| 日韩一区二区免费电影| 国产精品中文字幕日韩精品| 欧美高清在线精品一区| 色狠狠桃花综合| 欧美96一区二区免费视频| 久久久综合激的五月天| youjizz久久| 亚洲成人自拍一区| 亚洲精品一线二线三线| 成人18视频日本| 午夜日韩在线观看| 久久亚洲影视婷婷| 99九九99九九九视频精品|