亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? arcwritertest.java

?? 爬蟲
?? JAVA
?? 第 1 頁 / 共 2 頁
字號:
/* ARCWriterTest * * $Id: ARCWriterTest.java,v 1.37 2006/08/25 17:34:38 stack-sf Exp $ * * Created on Dec 31, 2003. * * Copyright (C) 2003 Internet Archive. * * This file is part of the Heritrix web crawler (crawler.archive.org). * * Heritrix is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * any later version. * * Heritrix is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the * GNU Lesser Public License for more details. * * You should have received a copy of the GNU Lesser Public License * along with Heritrix; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA */package org.archive.io.arc;import java.io.ByteArrayOutputStream;import java.io.File;import java.io.FileNotFoundException;import java.io.IOException;import java.io.OutputStream;import java.io.PrintStream;import java.util.Arrays;import java.util.Date;import java.util.Iterator;import java.util.List;import java.util.concurrent.atomic.AtomicInteger;import org.archive.io.ArchiveRecord;import org.archive.io.ReplayInputStream;import org.archive.io.WriterPoolMember;import org.archive.util.ArchiveUtils;import org.archive.util.FileUtils;import org.archive.util.TmpDirTestCase;/** * Test ARCWriter class. * * This code exercises ARCWriter AND ARCReader.  First it writes ARCs w/ * ARCWriter.  Then it validates what was written w/ ARCReader. * * @author stack */public class ARCWriterTestextends TmpDirTestCase implements ARCConstants {    /**     * Prefix to use for ARC files made by JUNIT.     */    private static final String PREFIX =        /* TODO DEFAULT_ARC_FILE_PREFIX*/ "IAH";        private static final String SOME_URL = "http://www.archive.org/test/";        private static final AtomicInteger SERIAL_NO = new AtomicInteger();    /*     * @see TestCase#setUp()     */    protected void setUp() throws Exception {        super.setUp();    }    /*     * @see TestCase#tearDown()     */    protected void tearDown() throws Exception {        super.tearDown();    }        protected static String getContent() {        return getContent(null);    }        protected static String getContent(String indexStr) {        String page = (indexStr != null)? "Page #" + indexStr: "Some Page";        return "HTTP/1.1 200 OK\r\n" +        "Content-Type: text/html\r\n\r\n" +        "<html><head><title>" + page +        "</title></head>" +        "<body>" + page +        "</body></html>";    }    protected int writeRandomHTTPRecord(ARCWriter arcWriter, int index)    throws IOException {        String indexStr = Integer.toString(index);        ByteArrayOutputStream baos = new ByteArrayOutputStream();        // Start the record with an arbitrary 14-digit date per RFC2540        String now = ArchiveUtils.get14DigitDate();        int recordLength = 0;        byte[] record = (getContent(indexStr)).getBytes();        recordLength += record.length;        baos.write(record);        // Add the newline between records back in        baos.write("\n".getBytes());        recordLength += 1;        arcWriter.write("http://www.one.net/id=" + indexStr, "text/html",            "0.1.2.3", Long.parseLong(now), recordLength, baos);        return recordLength;    }    private File writeRecords(String baseName, boolean compress,        int maxSize, int recordCount)    throws IOException {        cleanUpOldFiles(baseName);        File [] files = {getTmpDir()};        ARCWriter arcWriter = new ARCWriter(SERIAL_NO, Arrays.asList(files),            baseName + '-' + PREFIX, compress, maxSize);        assertNotNull(arcWriter);        for (int i = 0; i < recordCount; i++) {            writeRandomHTTPRecord(arcWriter, i);        }        arcWriter.close();        assertTrue("Doesn't exist: " +                arcWriter.getFile().getAbsolutePath(),             arcWriter.getFile().exists());        return arcWriter.getFile();    }    private void validate(File arcFile, int recordCount)    throws FileNotFoundException, IOException {        ARCReader reader = ARCReaderFactory.get(arcFile);        assertNotNull(reader);        List metaDatas = null;        if (recordCount == -1) {            metaDatas = reader.validate();        } else {            metaDatas = reader.validate(recordCount);        }        reader.close();        // Now, run through each of the records doing absolute get going from        // the end to start.  Reopen the arc so no context between this test        // and the previous.        reader = ARCReaderFactory.get(arcFile);        for (int i = metaDatas.size() - 1; i >= 0; i--) {            ARCRecordMetaData meta = (ARCRecordMetaData)metaDatas.get(i);            ArchiveRecord r = reader.get(meta.getOffset());            String mimeType = r.getHeader().getMimetype();            assertTrue("Record is bogus",                mimeType != null && mimeType.length() > 0);        }        reader.close();        assertTrue("Metadatas not equal", metaDatas.size() == recordCount);        for (Iterator i = metaDatas.iterator(); i.hasNext();) {                ARCRecordMetaData r = (ARCRecordMetaData)i.next();                assertTrue("Record is empty", r.getLength() > 0);        }    }    public void testCheckARCFileSize()    throws IOException {        runCheckARCFileSizeTest("checkARCFileSize", false);    }    public void testCheckARCFileSizeCompressed()    throws IOException {        runCheckARCFileSizeTest("checkARCFileSize", true);    }    public void testWriteRecord() throws IOException {        final int recordCount = 2;        File arcFile = writeRecords("writeRecord", false,                DEFAULT_MAX_ARC_FILE_SIZE, recordCount);        validate(arcFile, recordCount  + 1); // Header record.    }        public void testRandomAccess() throws IOException {        final int recordCount = 3;        File arcFile = writeRecords("writeRecord", true,            DEFAULT_MAX_ARC_FILE_SIZE, recordCount);        ARCReader reader = ARCReaderFactory.get(arcFile);        // Get to second record.  Get its offset for later use.        boolean readFirst = false;        String url = null;        long offset = -1;        long totalRecords = 0;        boolean readSecond = false;        for (final Iterator i = reader.iterator(); i.hasNext(); totalRecords++) {            ARCRecord ar = (ARCRecord)i.next();            if (!readFirst) {                readFirst = true;                continue;            }            if (!readSecond) {                url = ar.getMetaData().getUrl();                offset = ar.getMetaData().getOffset();                readSecond = true;            }        }                reader = ARCReaderFactory.get(arcFile, offset);        ArchiveRecord ar = reader.get();        assertEquals(ar.getHeader().getUrl(), url);        ar.close();                // Get reader again.  See how iterator works with offset        reader = ARCReaderFactory.get(arcFile, offset);        int count = 0;        for (final Iterator i = reader.iterator(); i.hasNext(); i.next()) {            count++;        }        reader.close();        assertEquals(totalRecords - 1, count);    }    public void testWriteRecordCompressed() throws IOException {        final int recordCount = 2;        File arcFile = writeRecords("writeRecordCompressed", true,                DEFAULT_MAX_ARC_FILE_SIZE, recordCount);        validate(arcFile, recordCount + 1 /*Header record*/);    }        private void runCheckARCFileSizeTest(String baseName, boolean compress)    throws FileNotFoundException, IOException  {        writeRecords(baseName, compress, 1024, 15);        // Now validate all files just created.        File [] files = FileUtils.getFilesWithPrefix(getTmpDir(), PREFIX);        for (int i = 0; i < files.length; i++) {            validate(files[i], -1);        }    }        protected ARCWriter createARCWriter(String NAME, boolean compress) {        File [] files = {getTmpDir()};        return new ARCWriter(SERIAL_NO, Arrays.asList(files), NAME,            compress, DEFAULT_MAX_ARC_FILE_SIZE);    }        protected static ByteArrayOutputStream getBaos(String str)    throws IOException {        ByteArrayOutputStream baos = new ByteArrayOutputStream();        baos.write(str.getBytes());        return baos;    }        protected static void writeRecord(ARCWriter writer, String url,        String type, int len, ByteArrayOutputStream baos)    throws IOException {        writer.write(url, type, "192.168.1.1", (new Date()).getTime(), len,            baos);    }        protected int iterateRecords(ARCReader r)    throws IOException {        int count = 0;        for (Iterator i = r.iterator(); i.hasNext();) {            ARCRecord rec = (ARCRecord)i.next();            rec.close();            if (count != 0) {                assertTrue("Unexpected URL " + rec.getMetaData().getUrl(),

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
91啪九色porn原创视频在线观看| 亚洲在线免费播放| 国产精品家庭影院| 亚洲精品免费看| 奇米影视在线99精品| 国内外成人在线视频| www..com久久爱| 欧美男人的天堂一二区| 精品粉嫩aⅴ一区二区三区四区 | 日欧美一区二区| 激情综合色播五月| 色哟哟一区二区三区| 日韩欧美国产1| 国产精品久久久久久久第一福利| 亚洲电影第三页| 国产成a人亚洲精品| 欧美在线色视频| 国产欧美一区二区精品性| 一区二区三区视频在线看| 国内精品久久久久影院一蜜桃| 91丨porny丨国产| 26uuu成人网一区二区三区| 亚洲免费三区一区二区| 久久国产乱子精品免费女| 色婷婷综合激情| 国产午夜精品美女毛片视频| 亚洲一级二级在线| 成人丝袜视频网| 日韩精品一区二区三区在线观看 | 久久精品国产秦先生| 99久久免费国产| 欧美大片一区二区| 亚洲一二三四区不卡| 国产精品99久| 91精品国产综合久久久蜜臀图片| 国产精品久久久久国产精品日日| 日韩黄色免费电影| 色综合久久天天综合网| 久久久久高清精品| 另类小说视频一区二区| 欧美影院午夜播放| 中文字幕中文在线不卡住| 精品一区二区免费在线观看| 欧美日韩卡一卡二| 综合欧美一区二区三区| 国产精品亚洲第一区在线暖暖韩国| 欧美日韩国产高清一区| 自拍偷拍亚洲欧美日韩| 国产成人一级电影| 欧美大肚乱孕交hd孕妇| 天天av天天翘天天综合网| 色婷婷av一区| 亚洲色欲色欲www在线观看| 国产成人精品免费一区二区| 欧美不卡视频一区| 日韩福利视频导航| 欧美日韩成人激情| 亚洲国产综合人成综合网站| 色婷婷综合中文久久一本| 国产精品乱人伦中文| 国产一区二区三区在线观看免费| 日韩视频一区二区三区在线播放| 午夜在线电影亚洲一区| 欧美视频一区在线| 亚洲一区在线播放| 欧美伊人久久久久久久久影院 | 91丨porny丨国产| 中文字幕制服丝袜一区二区三区 | 欧美变态口味重另类| 日本视频一区二区三区| 欧美高清www午色夜在线视频| 亚洲韩国一区二区三区| 欧美日产国产精品| 午夜成人在线视频| 91精品国产综合久久精品图片| 亚洲福利视频一区二区| 欧美肥妇毛茸茸| 日韩电影在线看| 欧美日本在线观看| 秋霞电影网一区二区| 欧美一三区三区四区免费在线看| 婷婷中文字幕综合| 日韩欧美国产电影| 黄色成人免费在线| 国产清纯美女被跳蛋高潮一区二区久久w | 色综合天天综合色综合av| 国产欧美视频在线观看| 国产精品小仙女| 日本一区二区视频在线观看| 成人性生交大片| 中文字幕在线观看不卡| 91原创在线视频| 亚洲一区二区中文在线| 欧美日韩国产片| 麻豆免费看一区二区三区| 久久伊99综合婷婷久久伊| 国产精品亚洲第一区在线暖暖韩国 | 国产精品高潮呻吟久久| 色美美综合视频| 午夜精品爽啪视频| 亚洲精品一区二区三区香蕉| 成人一区在线观看| 亚洲欧美另类在线| 337p亚洲精品色噜噜| 99久久精品国产观看| 一级日本不卡的影视| 91精品国产综合久久久久久漫画| 激情亚洲综合在线| 中文字幕在线免费不卡| 欧美日韩精品二区第二页| 国内久久婷婷综合| 亚洲人亚洲人成电影网站色| 欧美日韩在线免费视频| 麻豆视频一区二区| 国产精品你懂的在线欣赏| 欧美视频中文一区二区三区在线观看| 欧美a一区二区| 国产精品毛片久久久久久久| 欧美日韩高清一区二区| 国产成人h网站| 亚洲va欧美va国产va天堂影院| 日韩午夜激情电影| 99riav久久精品riav| 石原莉奈一区二区三区在线观看| 久久久久久久久久久久电影| 在线精品视频一区二区| 久久97超碰国产精品超碰| 亚洲女厕所小便bbb| 欧美sm美女调教| 91免费看片在线观看| 久久精品国产一区二区三| 日韩一区欧美一区| 精品欧美一区二区久久| 欧美亚洲图片小说| 国产电影精品久久禁18| 午夜欧美视频在线观看| 亚洲欧洲av一区二区三区久久| 日韩三级.com| 欧美性受xxxx| 成人国产精品视频| 美女爽到高潮91| 夜夜操天天操亚洲| 国产精品区一区二区三| 欧美电影免费提供在线观看| 欧美亚洲禁片免费| a在线播放不卡| 国产精品影视在线| 日本大胆欧美人术艺术动态| 亚洲精品国产高清久久伦理二区| 精品国产成人系列| 欧美一区二区精品| 欧美性猛交xxxx乱大交退制版| 国产99久久久国产精品| 久久爱另类一区二区小说| 亚洲电影欧美电影有声小说| 最好看的中文字幕久久| 久久久国产精品午夜一区ai换脸| 91精品国产色综合久久ai换脸| 91久久国产最好的精华液| 99热国产精品| 粉嫩一区二区三区性色av| 久久69国产一区二区蜜臀| 日本在线不卡视频一二三区| 亚洲午夜激情网站| 樱花影视一区二区| 亚洲男人天堂av网| 中文字幕一区二区三区不卡在线| 久久尤物电影视频在线观看| 欧美r级电影在线观看| 欧美一级理论片| 91精品国产麻豆国产自产在线| 欧美日韩你懂得| 欧美亚洲禁片免费| 精品视频色一区| 欧美在线视频全部完| 欧美性色欧美a在线播放| 在线观看日韩电影| 国产精品免费网站在线观看| 日本一区二区三区国色天香| 国产欧美日韩精品一区| 国产三级精品视频| 欧美经典一区二区| 国产精品久久免费看| 中文字幕一区二区三区色视频| 综合分类小说区另类春色亚洲小说欧美 | 亚洲精品日韩综合观看成人91| 中文字幕一区二区三区蜜月| 自拍偷拍国产亚洲| 亚洲精品中文在线影院| 亚洲日本在线视频观看| 亚洲精品免费一二三区| 亚洲国产欧美在线| 婷婷综合另类小说色区| 蜜臀av性久久久久av蜜臀妖精 | 欧美电影免费提供在线观看| 精品国产a毛片| 中文av字幕一区| 自拍视频在线观看一区二区| 亚洲综合色自拍一区| 日韩国产在线一|