亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? linearhashindex.java

?? 非常棒的java數(shù)據(jù)庫
?? JAVA
?? 第 1 頁 / 共 2 頁
字號:
/*
 * Copyright 2004-2008 H2 Group. Licensed under the H2 License, Version 1.0
 * (http://h2database.com/html/license.html).
 * Initial Developer: H2 Group
 */
package org.h2.index;

import java.sql.SQLException;

import org.h2.constant.ErrorCode;
import org.h2.constant.SysProperties;
import org.h2.engine.Constants;
import org.h2.engine.Session;
import org.h2.message.Message;
import org.h2.result.Row;
import org.h2.result.SearchRow;
import org.h2.store.DataPage;
import org.h2.store.DiskFile;
import org.h2.store.Record;
import org.h2.store.RecordReader;
import org.h2.store.Storage;
import org.h2.table.Column;
import org.h2.table.IndexColumn;
import org.h2.table.TableData;
import org.h2.util.ObjectArray;
import org.h2.value.Value;
import org.h2.value.ValueArray;

/**
 * A linear hash index implementation.
 * Theoretically, this index type should scale better than a b-tree index.
 * At this time, this index is not fully tested.
 */
public class LinearHashIndex extends BaseIndex implements RecordReader {

    // TODO index / linear hash: tune page size
    // private static final int MAX_PAGE_SIZE = 256;
    private static final int RECORDS_PER_BUCKET = 10;
    private static final int UTILIZATION_FOR_SPLIT = 70;
    private static final int UTILIZATION_FOR_MERGE = 60;
    private int readCount;
    // private static final boolean TRACE = false;
    private DiskFile diskFile;
    private Storage storage;
    private TableData tableData;
    private int bucketSize;
    private int blocksPerBucket;
    private int firstBucketBlock;
    private LinearHashHead head;
    private boolean needRebuild;
    // private ObjectArray buckets = new ObjectArray();

    public LinearHashIndex(Session session, TableData table, int id, String indexName, IndexColumn[] columns, IndexType indexType)
            throws SQLException {
        super(table, id, indexName, columns, indexType);
        this.tableData = table;
        // TODO linear hash: currently, changes are not logged
        String name = database.getName()+"."+id+Constants.SUFFIX_HASH_FILE;
        diskFile = new DiskFile(database, name, "rw", false, false, Constants.DEFAULT_CACHE_SIZE_LINEAR_INDEX);
        diskFile.init();
        bucketSize = 4 * DiskFile.BLOCK_SIZE - diskFile.getRecordOverhead();
        blocksPerBucket = 4;
        firstBucketBlock = 4;
        storage = database.getStorage(id, diskFile);
        storage.setReader(this);
        rowCount = table.getRowCount(session);
        int pos = storage.getNext(null);
        if (pos == -1) {
            truncate(session);
            needRebuild = true;
        } else {
            head = (LinearHashHead) storage.getRecord(session, pos);
        }
    }

    void writeHeader(Session session) throws SQLException {
        storage.updateRecord(session, head);
    }

//    public String getString() throws Exception {
//        // TODO index / linear hash: debug code here
//        StringBuffer buff = new StringBuffer();
//        buff.append("buckets " + bucketCount);
//        int records = 0;
//        int chained = 0;
//        int foreign = 0;
//        int access = 0;
//        for (int i = 0; i < bucketCount; i++) {
//            LinearHashBucket bucket = getBucket(i);
//            if (bucket == null) {
//                throw Message.internal("bucket=" + i + " is empty");
//            }
//            if (bucket.getRecordSize() > RECORDS_PER_BUCKET) {
//                throw Message.internal(
//                    "bucket=" + i + " records=" + bucket.getRecordSize());
//            }
//            records += bucket.getRecordSize();
//            if (bucket.getNextBucket() != -1) {
//                chained++;
//            }
//            for (int j = 0; j < bucket.getRecordSize(); j++) {
//                LinearHashEntry record = bucket.getRecord(j);
//                if (record.home != i) {
//                    foreign++;
//                }
//                int oldReadCount = readCount;
//                get(record.key);
//                access += (readCount - oldReadCount);
//            }
//        }
//        buff.append(" records " + records);
//        buff.append(" utilization " + getUtilization());
//        buff.append(" access " + ((0.0 + access) / records));
//        buff.append(" chained " + chained);
//        buff.append(" foreign " + foreign);
//        if (TRACE) {
//            for (int i = 0; i < bucketCount; i++) {
//                LinearHashBucket bucket = getBucket(i);
//                int f = getForeignHome(i);
//                if (f >= 0) {
//                    buff.append(" from " + f);
//                }
//                buff.append(i);
//                buff.append(" next ");
//                buff.append(bucket.getNextBucket());
//                buff.append("{");
//                for (int j = 0; j < bucket.getRecordSize(); j++) {
//                    if (j > 0) {
//                        buff.append(", ");
//                    }
//                    LinearHashEntry r = bucket.getRecord(j);
//                    buff.append(r.key.toString());
//                    if (r.home != i && r.home != f) {
//                        throw new Exception(
//                            "MULTIPLE LINKS TO! " + buff.toString());
//                    }
//                }
//                buff.append("} ");
//            }
//        }
//        return buff.toString();
//
//    }

    private void add(Session session, Value key, int value) throws SQLException {
        // trace.debug("add "+key.toString() + " " + value);
        if (getUtilization() >= UTILIZATION_FOR_SPLIT) {
            split(session);
        }
        int hash = key.hashCode();
        int home = getPos(hash);
        int index = home;
        LinearHashEntry record = new LinearHashEntry();
        record.hash = hash;
        record.key = key;
        record.home = home;
        record.value = value;
        int free = getNextFree(session, home);
        while (true) {

            LinearHashBucket bucket = getBucket(session, index);
            if (bucket.getRecordSize() < RECORDS_PER_BUCKET) {
                addRecord(session, bucket, record);
                break;
            }
            // this bucket is full
            int foreign = getForeignHome(session, index);
            if (foreign >= 0 && foreign != home) {
                // move out foreign records - add this record - add foreign
                // records again
                ObjectArray old = new ObjectArray();
                moveOut(session, foreign, old);
                addRecord(session, bucket, record);
                addAll(session, old);
                break;
            }
            // there is already a link to next
            if (bucket.getNextBucket() > 0) {
                index = bucket.getNextBucket();
                continue;
            }

            int nextFree = getNextFree(session, free);
            if (nextFree < 0) {
                // trace.debug("split because no chain " + head.bucketCount);
                split(session);
                add(session, key, value);
                break;
            }

            // it's possible that the bucket was removed from 
            // the cache (if searching for a bucket with space 
            // scanned many buckets)
            bucket = getBucket(session, index);

            bucket.setNext(session, free);
            free = nextFree;
            if (getForeignHome(session, free) >= 0) {
                throw Message.getInternalError("next already linked");
            }
            index = bucket.getNextBucket();
        }
    }

    private int getNextFree(Session session, int excluding) throws SQLException {
        for (int i = head.bucketCount - 1; i >= 0; i--) {
            LinearHashBucket bucket = getBucket(session, i);
            if (bucket.getRecordSize() >= RECORDS_PER_BUCKET) {
                continue;
            }
            if (getForeignHome(session, i) < 0 && i != excluding) {
                return i;
            }
        }
        return -1;
    }

    private int getForeignHome(Session session, int bucketId) throws SQLException {
        LinearHashBucket bucket = getBucket(session, bucketId);
        for (int i = 0; i < bucket.getRecordSize(); i++) {
            LinearHashEntry record = bucket.getRecord(i);
            if (record.home != bucketId) {
                return record.home;
            }
        }
        return -1;
    }

    public int getPos(int hash) {
        hash = Math.abs((hash << 7) - hash + (hash >>> 9) + (hash >>> 17));
        int pos = hash % (head.baseSize + head.baseSize);
        int len = head.bucketCount;
        return pos < len ? pos : (pos % head.baseSize);
    }

    private void split(Session session) throws SQLException {
        // trace.debug("split " + head.nextToSplit);
        ObjectArray old = new ObjectArray();
        moveOut(session, head.nextToSplit, old);
        head.nextToSplit++;
        if (head.nextToSplit >= head.baseSize) {
            head.baseSize += head.baseSize;
            head.nextToSplit = 0;
        }
        addBucket(session);
        addAll(session, old);
    }

    private void addAll(Session session, ObjectArray records) throws SQLException {
        for (int i = 0; i < records.size(); i++) {
            LinearHashEntry r = (LinearHashEntry) records.get(i);
            add(session, r.key, r.value);
        }
    }

    // moves all records of a bucket to the array (including chained)
    private void moveOut(Session session, int home, ObjectArray storeIn) throws SQLException {
        LinearHashBucket bucket = getBucket(session, home);
        int foreign = getForeignHome(session, home);
        while (true) {
            for (int i = 0; i < bucket.getRecordSize(); i++) {
                LinearHashEntry r = bucket.getRecord(i);
                if (r.home == home) {
                    storeIn.add(r);
                    removeRecord(session, bucket, i);
                    i--;
                }
            }
            if (foreign >= 0) {
                // this block contains foreign records
                // and therefore all home records have been found
                // (and it would be an error to set next to -1)
                moveOut(session, foreign, storeIn);
                if (SysProperties.CHECK && getBucket(session, foreign).getNextBucket() != -1) {
                    throw Message.getInternalError("moveOut " + foreign);
                }

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国产精品99久久久久久似苏梦涵| 欧美日本韩国一区二区三区视频| www.久久精品| 91精品国产91久久久久久最新毛片| 国产欧美一区二区精品性色| 午夜精品久久久久久久蜜桃app| 国产 欧美在线| 欧美一级高清片在线观看| 亚洲免费视频中文字幕| 国产福利一区二区| 欧美大片日本大片免费观看| 亚洲精品中文在线影院| 成人激情黄色小说| 欧美videos中文字幕| 亚洲小说春色综合另类电影| 成人激情午夜影院| 久久人人超碰精品| 天天操天天干天天综合网| 色婷婷亚洲精品| 亚洲天堂av老司机| 成人在线一区二区三区| xnxx国产精品| 国内精品写真在线观看| 日韩精品专区在线| 蜜臀av一区二区在线免费观看| 欧美日韩免费视频| 亚洲国产精品一区二区久久恐怖片 | 欧美午夜精品免费| 国产精品区一区二区三| 粉嫩嫩av羞羞动漫久久久| 2020国产成人综合网| 美女尤物国产一区| 日韩亚洲欧美在线观看| 老司机精品视频一区二区三区| 欧美另类videos死尸| 亚洲成a人片在线不卡一二三区 | 亚洲国产成人午夜在线一区| 国产剧情在线观看一区二区| 久久影院视频免费| 国产宾馆实践打屁股91| 国产精品美女久久久久久久| 成人激情文学综合网| 日韩毛片精品高清免费| 91免费版pro下载短视频| 亚洲黄色免费电影| 欧美二区三区的天堂| 蜜乳av一区二区| 综合久久国产九一剧情麻豆| 色中色一区二区| 亚洲成av人片在线观看| 日韩欧美三级在线| 国产麻豆精品theporn| 国产日韩精品久久久| 91美女在线视频| 亚洲第一在线综合网站| www一区二区| 99久久精品免费看国产| 亚洲国产综合人成综合网站| 91精品国产入口| 国产乱人伦偷精品视频免下载| 国产精品国产三级国产普通话三级| 色婷婷狠狠综合| 视频一区欧美日韩| 久久九九全国免费| 91福利国产精品| 国内成+人亚洲+欧美+综合在线| 国产欧美久久久精品影院| 在线观看三级视频欧美| 精品一区二区三区蜜桃| 国产精品久久国产精麻豆99网站| 欧美性色黄大片| 国产精品一区专区| 亚洲一区二区三区四区五区黄 | 亚洲一二三区在线观看| 久久无码av三级| 色999日韩国产欧美一区二区| 麻豆91小视频| 亚洲一区精品在线| 久久久久国产一区二区三区四区 | 国产色91在线| 欧美电影一区二区| 成人高清视频免费观看| 天天操天天干天天综合网| 亚洲国产精品ⅴa在线观看| 欧美日韩一卡二卡| 丰满少妇在线播放bd日韩电影| 亚洲一卡二卡三卡四卡无卡久久| 国产午夜精品一区二区三区嫩草| 欧美日韩一卡二卡| 91污片在线观看| 国产成人综合亚洲网站| 视频在线观看一区二区三区| 国产精品久久夜| 久久亚洲影视婷婷| 欧美一级电影网站| 欧美综合在线视频| 成人av资源站| 国产成人精品一区二区三区网站观看| 亚洲成人免费看| 亚洲精品第一国产综合野| 国产精品久久三区| 精品国产91久久久久久久妲己| 欧美日韩你懂的| 欧美色欧美亚洲另类二区| 91丨porny丨国产| 波多野洁衣一区| 国产99久久精品| 欧美三片在线视频观看| 在线一区二区三区四区五区 | 蜜乳av一区二区三区| 日韩成人午夜电影| 三级在线观看一区二区| 亚洲一区二区三区四区五区中文| 一区二区免费在线播放| 亚洲美女视频一区| 亚洲激情欧美激情| 亚洲黄色小说网站| 亚洲电影视频在线| 亚洲电影激情视频网站| 婷婷激情综合网| 天堂精品中文字幕在线| 丝袜诱惑亚洲看片| 琪琪一区二区三区| 精品一区二区三区在线播放视频| 国精品**一区二区三区在线蜜桃| 久久99久国产精品黄毛片色诱| 久久成人免费日本黄色| 国产真实乱对白精彩久久| 国产精品一二三在| 成人不卡免费av| 在线免费观看成人短视频| 欧美日韩视频第一区| 日韩精品影音先锋| 国产亚洲女人久久久久毛片| 国产精品久久久久一区二区三区 | 久久久久久一级片| 国产精品嫩草99a| 一区二区三区中文免费| 日韩成人dvd| 国产精品66部| 一本大道久久a久久综合婷婷| 欧美日韩免费不卡视频一区二区三区| 欧美一区二区成人| 中文久久乱码一区二区| 亚洲视频一区在线| 欧美bbbbb| 国产mv日韩mv欧美| 欧美日韩一区三区| 久久综合给合久久狠狠狠97色69| 国产精品欧美久久久久无广告| 亚洲综合免费观看高清完整版在线 | 日韩国产精品91| 国产91精品免费| 欧美日韩午夜在线| 亚洲图片欧美视频| 国产精品91一区二区| 精品视频免费看| 中文字幕av资源一区| 婷婷成人综合网| 成人av在线网站| 日韩精品一区二区三区在线播放| 国产精品电影一区二区| 日韩不卡一二三区| 色哟哟欧美精品| 国产丝袜美腿一区二区三区| 亚洲第一福利一区| 9人人澡人人爽人人精品| 欧美一二区视频| 亚洲女性喷水在线观看一区| 国产一区欧美二区| 6080yy午夜一二三区久久| 国产精品久久久久久久久免费相片 | 国产乱码精品1区2区3区| 欧美日韩亚洲国产综合| 国产精品久久久久久亚洲伦| 精品一区二区三区在线播放| 欧美日韩国产在线观看| 亚洲少妇中出一区| 国产成人午夜视频| 欧美成人一区二区三区| 亚洲电影视频在线| 91黄色免费观看| 国产精品久久久久久久久久免费看| 国内精品国产三级国产a久久| 欧美美女网站色| 一级精品视频在线观看宜春院 | 免费成人你懂的| 精品1区2区3区| 亚洲午夜久久久久久久久电影院| 99精品一区二区| 中文字幕亚洲一区二区va在线| 国产一区二区精品在线观看| 欧美xxxxxxxx| 紧缚捆绑精品一区二区| 日韩午夜小视频| 久久精品国产99国产精品| 欧美一区二区三区爱爱| 男女男精品视频网| 欧美高清精品3d| 另类小说色综合网站|