亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來(lái)到蟲(chóng)蟲(chóng)下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲(chóng)蟲(chóng)下載站

?? clusterexample.java

?? Quartz is a full-featured, open source job scheduling system that can be integrated with, or used al
?? JAVA
字號(hào):
/*  * Copyright 2005 OpenSymphony  *  * Licensed under the Apache License, Version 2.0 (the "License"); you may not  * use this file except in compliance with the License. You may obtain a copy  * of the License at  *  *   http://www.apache.org/licenses/LICENSE-2.0  *    * Unless required by applicable law or agreed to in writing, software  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the  * License for the specific language governing permissions and limitations  * under the License. *  */package org.quartz.examples.example13;import java.util.Date;import org.apache.commons.logging.Log;import org.apache.commons.logging.LogFactory;import org.quartz.JobDetail;import org.quartz.Scheduler;import org.quartz.SchedulerFactory;import org.quartz.SimpleTrigger;import org.quartz.impl.StdSchedulerFactory;/** * Used to test/show the clustering features of JDBCJobStore (JobStoreTX or * JobStoreCMT). *  * <p> * All instances MUST use a different properties file, because their instance * Ids must be different, however all other properties should be the same. * </p> *  * <p> * If you want it to clear out existing jobs & triggers, pass a command-line * argument called "clearJobs". * </p> *  * <p> * You should probably start with a "fresh" set of tables (assuming you may * have some data lingering in it from other tests), since mixing data from a * non-clustered setup with a clustered one can be bad. * </p> *  * <p> * Try killing one of the cluster instances while they are running, and see * that the remaining instance(s) recover the in-progress jobs. Note that * detection of the failure may take up to 15 or so seconds with the default * settings. * </p> *  * <p> * Also try running it with/without the shutdown-hook plugin registered with * the scheduler. (org.quartz.plugins.management.ShutdownHookPlugin). * </p> *  * <p> * <i>Note:</i> Never run clustering on separate machines, unless their * clocks are synchronized using some form of time-sync service (daemon). * </p> *  * @see DumbRecoveryJob *  * @author James House */public class ClusterExample {    private static Log _log = LogFactory.getLog(ClusterExample.class);        public void cleanUp(Scheduler inScheduler) throws Exception {        _log.warn("***** Deleting existing jobs/triggers *****");        // unschedule jobs        String[] groups = inScheduler.getTriggerGroupNames();        for (int i = 0; i < groups.length; i++) {            String[] names = inScheduler.getTriggerNames(groups[i]);            for (int j = 0; j < names.length; j++) {                inScheduler.unscheduleJob(names[j], groups[i]);            }        }        // delete jobs        groups = inScheduler.getJobGroupNames();        for (int i = 0; i < groups.length; i++) {            String[] names = inScheduler.getJobNames(groups[i]);            for (int j = 0; j < names.length; j++) {                inScheduler.deleteJob(names[j], groups[i]);            }        }    }        public void run(boolean inClearJobs, boolean inScheduleJobs)         throws Exception {        // First we must get a reference to a scheduler        SchedulerFactory sf = new StdSchedulerFactory();        Scheduler sched = sf.getScheduler();                if (inClearJobs) {            cleanUp(sched);        }        _log.info("------- Initialization Complete -----------");        if (inScheduleJobs) {            _log.info("------- Scheduling Jobs ------------------");            String schedId = sched.getSchedulerInstanceId();            int count = 1;            JobDetail job = new JobDetail("job_" + count, schedId,                    SimpleRecoveryJob.class);            // ask scheduler to re-execute this job if it was in progress when            // the scheduler went down...            job.setRequestsRecovery(true);            SimpleTrigger trigger =                 new SimpleTrigger("triger_" + count, schedId, 20, 5000L);            trigger.setStartTime(new Date(System.currentTimeMillis() + 1000L));            _log.info(job.getFullName() +                    " will run at: " + trigger.getNextFireTime() +                      " and repeat: " + trigger.getRepeatCount() +                     " times, every " + trigger.getRepeatInterval() / 1000 + " seconds");            sched.scheduleJob(job, trigger);            count++;            job = new JobDetail("job_" + count, schedId,                     SimpleRecoveryJob.class);            // ask scheduler to re-execute this job if it was in progress when            // the scheduler went down...            job.setRequestsRecovery(true);            trigger = new SimpleTrigger("trig_" + count, schedId, 20, 5000L);            trigger.setStartTime(new Date(System.currentTimeMillis() + 2000L));            _log.info(job.getFullName() +                    " will run at: " + trigger.getNextFireTime() +                      " and repeat: " + trigger.getRepeatCount() +                     " times, every " + trigger.getRepeatInterval() / 1000 + " seconds");            sched.scheduleJob(job, trigger);            count++;            job = new JobDetail("job_" + count, schedId,                    SimpleRecoveryStatefulJob.class);            // ask scheduler to re-execute this job if it was in progress when            // the scheduler went down...            job.setRequestsRecovery(true);            trigger = new SimpleTrigger("trig_" + count, schedId, 20, 3000L);            trigger.setStartTime(new Date(System.currentTimeMillis() + 1000L));            _log.info(job.getFullName() +                    " will run at: " + trigger.getNextFireTime() +                      " and repeat: " + trigger.getRepeatCount() +                     " times, every " + trigger.getRepeatInterval() / 1000 + " seconds");            sched.scheduleJob(job, trigger);            count++;            job = new JobDetail("job_" + count, schedId, SimpleRecoveryJob.class);            // ask scheduler to re-execute this job if it was in progress when            // the scheduler went down...            job.setRequestsRecovery(true);            trigger = new SimpleTrigger("trig_" + count, schedId, 20, 4000L);            trigger.setStartTime(new Date(System.currentTimeMillis() + 1000L));            _log.info(job.getFullName() + " will run at: "                    + trigger.getNextFireTime() + " & repeat: "                    + trigger.getRepeatCount() + "/"                    + trigger.getRepeatInterval());            sched.scheduleJob(job, trigger);            count++;            job = new JobDetail("job_" + count, schedId, SimpleRecoveryJob.class);            // ask scheduler to re-execute this job if it was in progress when            // the scheduler went down...            job.setRequestsRecovery(true);            trigger = new SimpleTrigger("trig_" + count, schedId, 20, 4500L);            trigger.setStartTime(new Date(System.currentTimeMillis() + 1000L));            _log.info(job.getFullName() + " will run at: "                    + trigger.getNextFireTime() + " & repeat: "                    + trigger.getRepeatCount() + "/"                    + trigger.getRepeatInterval());            sched.scheduleJob(job, trigger);        }        // jobs don't start firing until start() has been called...        _log.info("------- Starting Scheduler ---------------");        sched.start();        _log.info("------- Started Scheduler ----------------");        _log.info("------- Waiting for one hour... ----------");        try {            Thread.sleep(3600L * 1000L);        } catch (Exception e) {        }        _log.info("------- Shutting Down --------------------");        sched.shutdown();        _log.info("------- Shutdown Complete ----------------");    }    public static void main(String[] args) throws Exception {        boolean clearJobs = false;        boolean scheduleJobs = true;        for (int i = 0; i < args.length; i++) {            if (args[i].equalsIgnoreCase("clearJobs")) {                clearJobs = true;                            } else if (args[i].equalsIgnoreCase("dontScheduleJobs")) {                scheduleJobs = false;            }        }        ClusterExample example = new ClusterExample();        example.run(clearJobs, scheduleJobs);    }}

?? 快捷鍵說(shuō)明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號(hào) Ctrl + =
減小字號(hào) Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
欧美视频在线观看一区二区| 国产精品一区二区三区网站| 欧美在线一二三四区| 亚洲日本在线天堂| 色婷婷综合久久久久中文一区二区 | 成人av网站大全| 亚洲欧美一区二区在线观看| 色婷婷综合久久久久中文| 亚洲成av人在线观看| 欧美一卡二卡在线| 国产精品99久久久久久久女警| 欧美高清在线视频| 日本高清不卡视频| 奇米精品一区二区三区在线观看| 精品区一区二区| 成人精品gif动图一区| 亚洲制服丝袜av| 日韩三级免费观看| 成人av手机在线观看| 亚洲成人tv网| 国产日韩亚洲欧美综合| 日本久久一区二区三区| 美国十次了思思久久精品导航| 亚洲精品一区二区三区蜜桃下载 | 青青草原综合久久大伊人精品优势| 91精品国产入口| 成人美女视频在线观看18| 一区二区三区中文字幕精品精品| 欧美一区二区三区四区在线观看| 国产福利不卡视频| 亚洲午夜精品在线| 国产日韩综合av| 精品视频在线免费看| 精品一区二区三区欧美| 亚洲欧美成人一区二区三区| 日韩一区二区三区精品视频| 91麻豆免费看| 久久99精品久久久久婷婷| 亚洲一区二区综合| 国产精品久久久久久一区二区三区 | 欧美一级高清片| 99精品黄色片免费大全| 久久99精品久久久久久| 亚洲第一成年网| 国产精品久久久久aaaa| 26uuu亚洲综合色| 精品1区2区3区| 91丨九色丨蝌蚪丨老版| 国产裸体歌舞团一区二区| 视频在线在亚洲| 亚洲色图清纯唯美| 国产农村妇女精品| 精品区一区二区| 这里只有精品免费| 91国产免费观看| 成人黄页毛片网站| 国产高清成人在线| 久久精品99久久久| www.色精品| 国产成人精品免费一区二区| 中文字幕一区二区三区不卡在线| 日韩美一区二区三区| 欧美日韩精品福利| 成人免费在线视频观看| 欧美福利视频导航| 欧美日韩国产高清一区| 另类小说图片综合网| 国产成人精品午夜视频免费| av爱爱亚洲一区| 欧美日韩高清影院| 久久久精品国产免费观看同学| 中文乱码免费一区二区| 亚洲一线二线三线视频| 乱一区二区av| 成人高清免费观看| 欧美嫩在线观看| 日本一区免费视频| 亚洲成a人v欧美综合天堂| 久久成人综合网| 91麻豆免费看| 2021国产精品久久精品| 亚洲精品第一国产综合野| 久久国产剧场电影| av高清不卡在线| 日韩一区二区在线观看视频播放| 中文字幕高清一区| 偷拍一区二区三区四区| 国产a视频精品免费观看| 欧美亚洲国产bt| 国产日韩v精品一区二区| 亚洲午夜视频在线| 国产91精品一区二区麻豆亚洲| 欧美主播一区二区三区| 久久综合精品国产一区二区三区| 亚洲乱码国产乱码精品精的特点| 精品综合久久久久久8888| 日本久久一区二区三区| 国产免费成人在线视频| 美女视频一区在线观看| 色噜噜久久综合| 国产欧美一区二区三区鸳鸯浴| 日韩精彩视频在线观看| 色综合久久天天| 久久综合久久综合九色| 日韩福利电影在线| 91老司机福利 在线| 久久精品欧美日韩| 麻豆免费看一区二区三区| 91福利区一区二区三区| 亚洲国产精品av| 极品少妇xxxx精品少妇| 欧美日本在线观看| 一区二区欧美精品| 99re66热这里只有精品3直播| 久久综合中文字幕| 麻豆精品一区二区av白丝在线| 一本色道久久综合亚洲aⅴ蜜桃| 国产午夜精品在线观看| 激情五月播播久久久精品| 欧美一区午夜视频在线观看| 亚洲精品国产精品乱码不99| 99在线精品一区二区三区| 久久亚区不卡日本| 国精产品一区一区三区mba桃花| 91精品国产色综合久久不卡蜜臀| 国内成人免费视频| 欧美r级在线观看| 奇米影视一区二区三区小说| 欧美精品在线视频| 丝袜亚洲另类丝袜在线| 欧美三级资源在线| 亚洲一区二区三区视频在线播放| 91浏览器打开| 亚洲美女免费视频| 一本色道亚洲精品aⅴ| 亚洲免费观看高清| 91福利精品视频| 亚洲电影第三页| 欧美精品一二三| 日本亚洲欧美天堂免费| 91精品欧美综合在线观看最新 | 国产精品丝袜一区| 处破女av一区二区| 中文字幕精品一区二区精品绿巨人 | 国产欧美综合在线观看第十页| 国产黄人亚洲片| 国产精品视频一二三| 99精品热视频| 亚洲一区二区美女| 91精品免费观看| 另类小说欧美激情| 久久天天做天天爱综合色| 国产福利视频一区二区三区| 欧美国产成人精品| 91精品福利视频| 性做久久久久久免费观看欧美| 这里只有精品视频在线观看| 美国十次综合导航| 中文一区在线播放 | 日韩精品一区二区三区视频播放| 精品一区二区三区视频在线观看| 国产女人水真多18毛片18精品视频| 不卡的av在线播放| 亚洲国产成人91porn| 日韩免费性生活视频播放| 国产大片一区二区| 一区二区三区加勒比av| 日韩一区二区在线看片| 国产不卡视频一区| 亚洲午夜久久久久| 精品不卡在线视频| 成人av免费网站| 天天色 色综合| 国产视频视频一区| 欧美视频一区在线| 国产精品一区二区在线看| 亚洲精品久久嫩草网站秘色| 日韩欧美一级二级三级久久久| 成人精品亚洲人成在线| 亚洲图片欧美色图| 国产午夜精品福利| 欧美日韩国产首页在线观看| 国产精品18久久久久久久久 | 精久久久久久久久久久| 亚洲天天做日日做天天谢日日欢| 欧美乱熟臀69xxxxxx| 国产99久久久精品| 日韩二区在线观看| 国产精品福利影院| 日韩欧美的一区二区| 色八戒一区二区三区| 国产尤物一区二区| 香蕉久久夜色精品国产使用方法| 国产欧美一区二区三区沐欲| 欧美美女直播网站| 97久久超碰国产精品| 狠狠色丁香婷综合久久| 亚洲国产欧美一区二区三区丁香婷| 久久久99免费| 欧美一区二区不卡视频|