亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? recover.java

?? 非常棒的java數據庫
?? JAVA
?? 第 1 頁 / 共 3 頁
字號:
                }
                storageId = s.readInt();
                if (storageId < 0) {
                    writeDataError(writer, "storageId<0", s.getBytes(), blockCount);
                    continue;
                }
                int page = block / DiskFile.BLOCKS_PER_PAGE;
                if (pageOwners[page] != 0 && pageOwners[page] != storageId) {
                    writeDataError(writer, "double allocation, previous=" + pageOwners[page] + " now=" + storageId, s
                            .getBytes(), blockCount);
                } else {
                    pageOwners[page] = storageId;
                }
                writer.println("// [" + block + "] page:" + page + " blocks:" + blockCount + " storage:" + storageId);
            }
            writer.close();
        } catch (Throwable e) {
            writeError(writer, e);
        } finally {
            IOUtils.closeSilently(writer);
            closeSilently(store);
        }
    }

    private void dumpData(String fileName) throws SQLException {
        PrintWriter writer = null;
        FileStore store = null;
        try {
            setDatabaseName(fileName.substring(0, fileName.length() - Constants.SUFFIX_DATA_FILE.length()));
            writer = getWriter(fileName, ".sql");
            writer.println("CREATE ALIAS IF NOT EXISTS READ_CLOB FOR \"" + this.getClass().getName() + ".readClob\";");
            writer.println("CREATE ALIAS IF NOT EXISTS READ_BLOB FOR \"" + this.getClass().getName() + ".readBlob\";");
            ObjectArray schema = new ObjectArray();
            HashSet objectIdSet = new HashSet();
            HashMap tableMap = new HashMap();
            textStorage = Database.isTextStorage(fileName, false);
            byte[] magic = Database.getMagic(textStorage);
            store = FileStore.open(null, fileName, "r", magic);
            long length = store.length();
            int offset = FileStore.HEADER_LENGTH;
            int blockSize = DiskFile.BLOCK_SIZE;
            int blocks = (int) (length / blockSize);
            blockCount = 1;
            int[] pageOwners = new int[blocks / DiskFile.BLOCKS_PER_PAGE];
            for (int block = 0; block < blocks; block += blockCount) {
                store.seek(offset + (long) block * blockSize);
                byte[] buff = new byte[blockSize];
                DataPage s = DataPage.create(this, buff);
                store.readFully(buff, 0, blockSize);
                blockCount = s.readInt();
                storageId = -1;
                recordLength = -1;
                valueId = -1;
                if (blockCount == 0) {
                    // free block
                    blockCount = 1;
                    continue;
                } else if (blockCount < 0) {
                    writeDataError(writer, "blockCount<0", s.getBytes(), 1);
                    blockCount = 1;
                    continue;
                } else if ((blockCount * blockSize) >= Integer.MAX_VALUE / 4) {
                    writeDataError(writer, "blockCount=" + blockCount, s.getBytes(), 1);
                    blockCount = 1;
                    continue;
                }
                writer.println("-- block " + block + " - " + (block + blockCount - 1));
                try {
                    s.checkCapacity(blockCount * blockSize);
                } catch (OutOfMemoryError e) {
                    writeDataError(writer, "out of memory", s.getBytes(), 1);
                    blockCount = 1;
                    continue;
                }
                if (blockCount > 1) {
                    if ((blockCount * blockSize) < 0) {
                        writeDataError(writer, "wrong blockCount", s.getBytes(), 1);
                        blockCount = 1;
                    } else {
                        store.readFully(s.getBytes(), blockSize, blockCount * blockSize - blockSize);
                    }
                }
                try {
                    s.check(blockCount * blockSize);
                } catch (SQLException e) {
                    writeDataError(writer, "wrong checksum", s.getBytes(), 1);
                    blockCount = 1;
                    continue;
                }
                storageId = s.readInt();
                if (storageId < 0) {
                    writeDataError(writer, "storageId<0", s.getBytes(), blockCount);
                    continue;
                }
                int page = block / DiskFile.BLOCKS_PER_PAGE;
                if (pageOwners[page] != 0 && pageOwners[page] != storageId) {
                    writeDataError(writer, "double allocation, previous=" + pageOwners[page] + " now=" + storageId, s
                            .getBytes(), blockCount);
                } else {
                    pageOwners[page] = storageId;
                }
                recordLength = s.readInt();
                if (recordLength <= 0) {
                    writeDataError(writer, "recordLength<0", s.getBytes(), blockCount);
                    continue;
                }
                Value[] data;
                try {
                    data = new Value[recordLength];
                } catch (OutOfMemoryError e) {
                    writeDataError(writer, "out of memory", s.getBytes(), blockCount);
                    continue;
                }
                if (!objectIdSet.contains(ObjectUtils.getInteger(storageId))) {
                    objectIdSet.add(ObjectUtils.getInteger(storageId));
                    StringBuffer sb = new StringBuffer();
                    sb.append("CREATE TABLE O_" + storageId + "(");
                    for (int i = 0; i < recordLength; i++) {
                        if (i > 0) {
                            sb.append(", ");
                        }
                        sb.append("C");
                        sb.append(i);
                        sb.append(" VARCHAR");
                    }
                    sb.append(");");
                    writer.println(sb.toString());
                    writer.flush();
                }
                StringBuffer sb = new StringBuffer();
                sb.append("INSERT INTO O_" + storageId + " VALUES(");
                for (valueId = 0; valueId < recordLength; valueId++) {
                    try {
                        Value v = s.readValue();
                        data[valueId] = v;
                        if (valueId > 0) {
                            sb.append(", ");
                        }
                        sb.append(getSQL(v));
                    } catch (Exception e) {
                        writeDataError(writer, "exception " + e, s.getBytes(), blockCount);
                        continue;
                    } catch (OutOfMemoryError e) {
                        writeDataError(writer, "out of memory", s.getBytes(), blockCount);
                        continue;
                    }
                }
                sb.append(");");
                writer.println(sb.toString());
                writer.flush();
                if (storageId == 0) {
                    try {
                        SimpleRow r = new SimpleRow(data);
                        MetaRecord meta = new MetaRecord(r);
                        schema.add(meta);
                        if (meta.getObjectType() == DbObject.TABLE_OR_VIEW) {
                            String sql = data[3].getString();
                            int end = sql.indexOf('(');
                            if (end >= 0) {
                                int start = sql.lastIndexOf(' ', end);
                                String name = sql.substring(start, end).trim();
                                tableMap.put(ObjectUtils.getInteger(meta.getId()), name);
                            }
                        }
                    } catch (Throwable t) {
                        writeError(writer, t);
                    }
                }
            }
            MetaRecord.sort(schema);
            for (int i = 0; i < schema.size(); i++) {
                MetaRecord m = (MetaRecord) schema.get(i);
                writer.println(m.getSQL() + ";");
            }
            for (Iterator it = tableMap.entrySet().iterator(); it.hasNext();) {
                Map.Entry entry = (Entry) it.next();
                Integer objectId = (Integer) entry.getKey();
                String name = (String) entry.getValue();
                writer.println("INSERT INTO " + name + " SELECT * FROM O_" + objectId + ";");
            }
            for (Iterator it = objectIdSet.iterator(); it.hasNext();) {
                Integer objectId = (Integer) it.next();
                writer.println("DROP TABLE O_" + objectId + ";");
            }
            writer.println("DROP ALIAS READ_CLOB;");
            writer.println("DROP ALIAS READ_BLOB;");
            writer.close();
        } catch (Throwable e) {
            writeError(writer, e);
        } finally {
            IOUtils.closeSilently(writer);
            closeSilently(store);
        }
    }

    private void closeSilently(FileStore store) {
        if (store != null) {
            store.closeSilently();
            store = null;
        }
    }

    private void writeError(PrintWriter writer, Throwable e) {
        if (writer != null) {
            writer.println("// error: " + e);
        }
        logError("Error", e);
    }

    /**
     * INTERNAL
     */
    public boolean getTextStorage() {
        return textStorage;
    }

    /**
     * INTERNAL
     */
    public String getDatabasePath() {
        return databaseName;
    }

    /**
     * INTERNAL
     */
    public FileStore openFile(String name, String mode, boolean mustExist) throws SQLException {
        return FileStore.open(this, name, "rw", Constants.MAGIC_FILE_HEADER.getBytes());
    }

    /**
     * INTERNAL
     */
    public int getChecksum(byte[] data, int start, int end) {
        int x = 0;
        while (start < end) {
            x += data[start++];
        }
        return x;
    }

    /**
     * INTERNAL
     */
    public void checkPowerOff() throws SQLException {
    }

    /**
     * INTERNAL
     */
    public void checkWritingAllowed() throws SQLException {
    }

    /**
     * INTERNAL
     */
    public void freeUpDiskSpace() throws SQLException {
    }

    /**
     * INTERNAL
     */
    public void handleInvalidChecksum() throws SQLException {
        throw new SQLException("Invalid Checksum");
    }

    /**
     * INTERNAL
     */
    public int compareTypeSave(Value a, Value b) throws SQLException {
        throw Message.getInternalError();
    }

    /**
     * INTERNAL
     */
    public int getMaxLengthInplaceLob() {
        throw Message.getInternalError();
    }

    /**
     * INTERNAL
     */
    public int allocateObjectId(boolean b, boolean c) {
        throw Message.getInternalError();
    }

    /**
     * INTERNAL
     */
    public String createTempFile() throws SQLException {
        throw Message.getInternalError();
    }

    /**
     * INTERNAL
     */
    public String getLobCompressionAlgorithm(int type) {
        return null;
    }

    /**
     * INTERNAL
     */
    public Object getLobSyncObject() {
        return this;
    }

    /**
     * INTERNAL
     */
    public boolean getLobFilesInDirectories() {
        return lobFilesInDirectories;
    }

    /**
     * INTERNAL
     */
    public SmallLRUCache getLobFileListCache() {
        return null;
    }

}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
激情六月婷婷综合| av在线一区二区三区| 国产乱对白刺激视频不卡| 91一区二区在线| 日韩视频一区在线观看| 国产精品久久久久久久第一福利 | 色综合久久综合网97色综合| 欧美猛男男办公室激情| 亚洲图片激情小说| 精品一区二区三区视频| 精品视频全国免费看| 国产精品丝袜久久久久久app| 午夜精品一区在线观看| av爱爱亚洲一区| 久久久久久久精| 蜜桃视频一区二区| 91麻豆精品国产91久久久 | 欧美一区二区三区男人的天堂| 国产精品乱人伦| 国产一区二区美女诱惑| 91.成人天堂一区| 亚洲一区二区视频| 色一情一乱一乱一91av| 欧美国产综合一区二区| 国内久久精品视频| 精品福利一二区| 麻豆精品精品国产自在97香蕉| 欧美视频你懂的| 亚洲一区二区三区激情| 色综合咪咪久久| 一区二区三区中文字幕精品精品| 成人丝袜高跟foot| 欧美极品xxx| k8久久久一区二区三区 | 国产精品久久夜| 亚洲不卡在线观看| 国产69精品久久久久毛片| 日本黄色一区二区| 26uuu成人网一区二区三区| 亚洲少妇屁股交4| 久久成人综合网| 91年精品国产| 久久色.com| 亚洲国产精品天堂| 国产91精品精华液一区二区三区| 欧美性受xxxx黑人xyx性爽| 亚洲精品一区二区三区影院| 亚洲蜜臀av乱码久久精品| 久久国产综合精品| 欧美亚洲动漫精品| 亚洲国产精品av| 麻豆传媒一区二区三区| 色婷婷亚洲一区二区三区| 欧美精品一区二区三区很污很色的 | 国产精品99久久久久久久vr| 在线观看网站黄不卡| 日本一区二区三区四区在线视频 | 日本不卡不码高清免费观看| av电影天堂一区二区在线| 精品日韩欧美在线| 一区二区三区欧美亚洲| 成人激情开心网| 2024国产精品视频| 捆绑调教美女网站视频一区| 欧美日韩亚洲综合| 亚洲综合图片区| 99久久精品免费看| 国产农村妇女精品| 国产精品资源站在线| 日韩欧美高清在线| 免费一区二区视频| 日韩欧美亚洲另类制服综合在线| 亚洲国产精品久久人人爱| 日本乱人伦aⅴ精品| 综合久久综合久久| 一本一道波多野结衣一区二区| 中文字幕亚洲成人| 99热国产精品| 亚洲欧美一区二区在线观看| 成人毛片在线观看| 国产精品第四页| 色婷婷久久99综合精品jk白丝| 亚洲欧洲成人av每日更新| 99精品视频在线播放观看| 国产精品福利av| 色94色欧美sute亚洲线路一久| 亚洲欧美aⅴ...| 欧美亚一区二区| 日本午夜精品一区二区三区电影| 在线综合亚洲欧美在线视频| 免费在线观看成人| 久久免费看少妇高潮| 成人精品一区二区三区中文字幕| 中文字幕欧美一| 色综合久久中文综合久久97| 亚洲免费看黄网站| 51精品视频一区二区三区| 久久国产麻豆精品| 国产精品乱码久久久久久| av在线不卡网| 污片在线观看一区二区| 26uuu国产一区二区三区| 成人精品国产免费网站| 亚洲综合视频在线观看| 日韩欧美一级二级三级| 成年人网站91| 日韩电影一二三区| 欧美高清在线一区| 欧美美女黄视频| 国产乱人伦偷精品视频不卡| 亚洲三级视频在线观看| 9191精品国产综合久久久久久| 国产精品一二三| 亚洲va韩国va欧美va| 久久久国产精华| 欧美日韩极品在线观看一区| 久久99国产精品久久99果冻传媒| 中文字幕日韩一区二区| 欧美一二三区在线观看| 91视频在线观看| 国产一区二区三区免费看| 一区二区三区欧美视频| 国产欧美一区二区在线| 欧美精品xxxxbbbb| 97久久人人超碰| 国产一区在线视频| 日韩在线卡一卡二| 亚洲天堂福利av| 久久久久久亚洲综合影院红桃 | 亚洲国产精品影院| 中文字幕欧美国产| 日韩美一区二区三区| 欧美午夜精品一区二区三区| 国产成人超碰人人澡人人澡| 日本三级亚洲精品| 亚洲综合丝袜美腿| 亚洲视频一区二区免费在线观看| 久久精品亚洲乱码伦伦中文| 3atv一区二区三区| 欧美三级蜜桃2在线观看| proumb性欧美在线观看| 高清成人免费视频| 经典三级一区二区| 裸体在线国模精品偷拍| 婷婷综合五月天| 亚洲一区二区三区四区在线| 国产精品夫妻自拍| 国产精品成人一区二区艾草| 国产日产欧美一区二区三区| 精品日韩在线一区| 精品国产乱码久久久久久牛牛 | 日本va欧美va瓶| 亚洲成人av免费| 一区二区三区免费在线观看| 136国产福利精品导航| 一区免费观看视频| 亚洲欧洲99久久| 综合av第一页| 一区二区三区中文字幕电影| 伊人色综合久久天天人手人婷| 亚洲欧美日韩国产综合| 一区二区三区四区蜜桃 | 婷婷六月综合网| 日韩精品色哟哟| 麻豆精品久久精品色综合| 精品亚洲porn| 高清成人免费视频| 95精品视频在线| 欧美日韩一级片在线观看| 欧美一级二级三级蜜桃| 精品人在线二区三区| 国产调教视频一区| 成人免费在线视频| 亚洲国产视频直播| 美女一区二区久久| 风间由美一区二区av101| 99久久免费精品高清特色大片| 91极品美女在线| 91精品国产欧美一区二区成人| 欧美精品一区二区不卡 | 亚洲一区二区av在线| 免费观看91视频大全| 国产成人在线电影| 91高清视频免费看| 日韩一二三四区| 中文字幕在线不卡一区二区三区 | 欧美日韩精品是欧美日韩精品| 欧美一区二区三区视频免费播放| 久久久综合激的五月天| 亚洲免费看黄网站| 九一九一国产精品| 色婷婷精品大在线视频| 日韩片之四级片| 亚洲欧美另类在线| 精品亚洲免费视频| 91麻豆免费观看| 久久―日本道色综合久久| 亚洲精品乱码久久久久久黑人| 精品一区二区三区免费观看| 91丝袜国产在线播放|