Package org.chaidb.db.index.btree

Source Code of org.chaidb.db.index.btree.Id2nodeBTree

/*
* Copyright (C) 2006  http://www.chaidb.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
* GNU General Public License for more details.
*
*/

package org.chaidb.db.index.btree;

import org.apache.log4j.Logger;
import org.chaidb.db.DbEnvironment;
import org.chaidb.db.KernelContext;
import org.chaidb.db.api.Converter;
import org.chaidb.db.api.keys.IntKey;
import org.chaidb.db.api.keys.NodeId;
import org.chaidb.db.exception.ChaiDBException;
import org.chaidb.db.exception.EncodingException;
import org.chaidb.db.exception.ErrorCode;
import org.chaidb.db.helper.ByteTool;
import org.chaidb.db.index.IDBIndex;
import org.chaidb.db.index.Key;
import org.chaidb.db.index.btree.bufmgr.PageNumber;
import org.chaidb.db.log.logrecord.BTreeSpecLogRecord;

import java.io.File;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.LinkedList;

/**
* User: arefool
* Date: Mar 14, 2003
* Time: 11:56:47 AM
*/
public class Id2nodeBTree extends BTree {

    private static final Logger logger = Logger.getLogger(AbstractBTree.class);

    /* Just for describing a specail tree. Key=DocID, value=DataTree's root */
    public BTree indexBTree;
    /* Just for describing a specail tree. Key=DocID, value=DataTree's root */ Hashtable id2root = new Hashtable();

    public static int MAX_ENTRY_NUMBERS = 2000;

    public static int KICK_NUMBERS = 1;

    /* Keep clean key2root entrys */ LinkedList cleanIdQueue = new LinkedList();


    public static final String IMPLICITLY_OPEN_FILE = IDBIndex.IMPLICITLY_OPEN_FILE;

    public Id2nodeBTree() {
    }

    public void acquire(KernelContext kContext, int mode) throws ChaiDBException {
    }

    public void release(KernelContext kContext) {
    }

    protected AbstractBTree GenerateClonedBTree() throws ChaiDBException {
        return new Id2nodeBTree();
    }

    private byte[] lookupWithoutConverting(Key key, KernelContext kContext) throws ChaiDBException {
        if (key == null) return null;

        byte[] bytes = null;

        try {
            int docID;
            PageNumber root;

            /*Modified by ben zhang at aug, 12, 2002 assumed key type is nodeid */
            // Used only by id2node
            docID = ((NodeId) key).getDocId();
            root = getDocRoot(docID, kContext);

            bytes = lookup(key, root, kContext);
        } finally {
            doFinalJob();
        }

        return bytes;
    }

    public Object lookup(Key key, KernelContext kContext) throws ChaiDBException {
        byte[] bytes = null;
        for (int i = 0; i < 3; i++) {
            try {
                bytes = lookupWithoutConverting(key, kContext);
            } catch (ChaiDBException e) {
                if (i == 2) {
                    throw e;
                } else {
                    try {
                        Thread.sleep(100);
                    } catch (InterruptedException ie) {
                        ;
                    }
                    continue;

                }
            }
            break;
        }//end for
        return decodeFromByteArray(bytes, key);
    }

    public Object lookup(Converter localConverter, Key key, KernelContext kContext) throws ChaiDBException {
        byte[] bytes = null;
        for (int i = 0; i < 3; i++) {
            try {
                bytes = lookupWithoutConverting(key, kContext);
            } catch (ChaiDBException e) {
                if (i == 2) {
                    throw e;
                } else {
                    try {
                        Thread.sleep(100);
                    } catch (InterruptedException ie) {
                        ;
                    }
                    continue;

                }
            }
            break;
        }//end for

        if (bytes == null) return null;

        if (converter == null) {
            //unlock the one and ONLY one page we got in search
            return bytes;
        } else {
            Object obj;
            try {
                /* Modified by ben zhang at aug,12,2002 pending issue*/
                obj = localConverter.decodeFromByteArray(key, bytes);
                //unlock the one and ONLY one page we got in search
                return obj;
            } catch (EncodingException ee) {
                logger.error(ee);
                // details -ranjeet
                String details = "Converter failed to decode : " + ee.toString() + ".";
                throw new ChaiDBException(ErrorCode.CONVERTER_DECODING_ERROR, details);
            }
        }
    }


    public ArrayList rangeLookup(Key minKey, Key maxKey, boolean includeMinKey, boolean includeMaxKey, KernelContext kContext) throws ChaiDBException {
        return null;
    }

    public boolean delete(Key key, KernelContext kContext) throws ChaiDBException {
        if (DbEnvironment.READ_ONLY) throw new ChaiDBException(ErrorCode.DATABASE_IS_READONLY);

        OperResult result = null;
        boolean succ = false;

        this.getBTreeSpec().setModified(true);
        //retry to finish this operation with maxized MAX_RETRYS times.
        for (int retry = 0; retry < BTreeLock.MAX_RETRYS + 1 && !succ; retry++)
            try {
                // if (islockTree) lm.getLock(MR1WLock.WRITE);
                PageNumber root;
                root = getDocRoot(((NodeId) key).getDocId(), kContext);

                result = delete(key, root, kContext);
                succ = true;

                if (result.newRoot == null) break;

                //save new root
                updateDocRoot(((NodeId) key).getDocId(), result.newRoot, kContext);


            } catch (ChaiDBException e) {
                if (e.getErrorCode() != ErrorCode.LOCK_NO_GRANT || retry == BTreeLock.MAX_RETRYS) throw e;
            } finally {
                doFinalJob();
            }

        return result.success;
    }

    public void store(Key key, Object data, short mode, KernelContext kContext) throws ChaiDBException {
        if (DbEnvironment.READ_ONLY) throw new ChaiDBException(ErrorCode.DATABASE_IS_READONLY);

        byte[] value = encodeToByteArray(data);
        if (value == null) return;
        this.getBTreeSpec().setModified(true);
        boolean succ = false;

        //retry to finish this operation with maxized MAX_RETRYS times.
        for (int retry = 0; retry < BTreeLock.MAX_RETRYS + 1 && !succ; retry++)
            try {
                PageNumber root = getDocRoot(((NodeId) key).getDocId(), kContext);

                root = store(key, value, mode, root, kContext);
                if (root == null) {
                    succ = true;
                    return;
                }

                // save new root page number
                updateDocRoot(((NodeId) key).getDocId(), root, kContext);
                succ = true;

            } catch (ChaiDBException e) {
                if (e.getErrorCode() != ErrorCode.LOCK_NO_GRANT || retry == BTreeLock.MAX_RETRYS) throw e;
            } finally {
                doFinalJob();
            }
    }


    public Enumeration keys(KernelContext kContext) {
        Id2nodeBTreeEnumerator id2nodeBTreeEnum = null;
        for (int i = 0; i < 3; i++) {
            try {
                id2nodeBTreeEnum = new Id2nodeBTreeEnumerator(this, btreeSpec, getBuffer(), kContext);
            } catch (ChaiDBException e) {
                if (i == 2) {
                    logger.error(e);
                    return null;
                } else {
                    try {
                        Thread.sleep(100);
                    } catch (InterruptedException ie) {
                        ;
                    }
                    continue;

                }
            }
            break;
        }//end for
        return id2nodeBTreeEnum;

    }

/*    public Enumeration keys(int docId, KernelContext kContext) {
      try {
        return new Id2nodeBTreeEnumerator(this, btreeSpec, getBuffer(),docId, kContext);
      }catch(ChaiDBException e) {
        logger.error(e);
        return null;
      }
    }
*/

    public void open(File dataSource, KernelContext kContext) throws ChaiDBException {

        isReady = false;
        initId2nodeBTree(dataSource, kContext);
        super.open(dataSource, kContext);
    }

    /**
     * load root of the specified document
     */
    private PageNumber getDocRoot(int docID, KernelContext kContext) throws ChaiDBException {

        PageNumber docRoot;
        docRoot = (PageNumber) id2root.get(new Integer(docID));

        if (docRoot != null) {
            return docRoot;
        }
        Key tmpKey = new IntKey(docID);
        indexBTree.acquire(kContext, READ_MODE);
        byte[] bRoot = null;
        try {
            bRoot = (byte[]) indexBTree.lookup(tmpKey, kContext);
        } finally {
            indexBTree.release(kContext);
        }
        if (bRoot != null) docRoot = new PageNumber(ByteTool.bytesToInt(bRoot, 0, btreeSpec.isMsbFirst()));
        else docRoot = new PageNumber(BTreeSpec.INVALID_PAGENO);
        synchronized (cleanIdQueue) {
            if (id2root.size() >= MAX_ENTRY_NUMBERS) {
                //need to kick out last entry.
                Integer doc_id = null;
                //Todo: cleanIdQueue.size == 0 , exist memory leak
                for (int i = 0; i < KICK_NUMBERS && cleanIdQueue.size() > 0; i++) {
                    doc_id = (Integer) cleanIdQueue.removeLast();
                    id2root.remove(doc_id);
                }
            }

            id2root.put(new Integer(docID), docRoot);
            cleanIdQueue.addFirst(new Integer(docID));
        }


        docRoot.setTreeId(id);
        return docRoot;
    }

    /**
     * Called when do normal operation
     */
    private void updateDocRoot(int docID, PageNumber docRoot, KernelContext kContext) throws ChaiDBException {
        int txnId = kContext.getLocker();
        boolean needLog = kContext.getNeedLog();

        //search key2root first
        Integer idocID = new Integer(docID);
        PageNumber subRoot = (PageNumber) id2root.get(idocID);
        if (subRoot == null) {
            Key tmpKey = new IntKey(docID);
            indexBTree.acquire(kContext, READ_MODE);
            byte[] bRoot = null;
            try {
                bRoot = (byte[]) indexBTree.lookup(tmpKey, kContext);
            } finally {
                indexBTree.release(kContext);
            }

            if (bRoot != null) subRoot = new PageNumber(ByteTool.bytesToInt(bRoot, 0, btreeSpec.isMsbFirst()));
            else subRoot = null;

        }

        if (needLog) {
            BTreeSpecLogRecord logRec = new BTreeSpecLogRecord(id, subRoot == null ? -1 : subRoot.getPageNumber(), docRoot.getPageNumber(), BTreeSpecLogRecord.DOC_ROOT_PAGE_NUMBER_FLAG, txnId, docID, IDBIndex.ID2NODE_BTREE);
            logRec.log();
        }

        docRoot.setTreeId(id);
        if (subRoot == null) id2root.put(idocID, docRoot);
        else subRoot.setPageNumber(docRoot);

        //This situation occurs to do commit like operation(such as delete, modify node etc)after getDocument operation
        synchronized (cleanIdQueue) {
            cleanIdQueue.remove(idocID);
        }

        //if no txn, flush it to disk directly
        if (!needLog) flushDocRoot(idocID, kContext);
        else getBuffer().addChangedDoc(kContext.getLocker(), idocID, docRoot.getTreeId());
    }

    //called when a txn commits if txn exists OR btree destroy if no txn
    public void flushDocRoot(Integer docID, KernelContext kContext) throws ChaiDBException {

        PageNumber docRoot = (PageNumber) id2root.get(docID);
        if (docRoot == null) {
            //Due to dirty root don't join into clean queue, so don't be kick out.
            //so there is no entry in memory, it represents it don't need be flushed.
//            if (Debug.DEBUG_ID2ROOT){    //Todo: remove it
//                new Throwable("doc id " + docID.intValue()).printStackTrace(System.err);
//            }
            return;
        }
        this.getBTreeSpec().setModified(true);
        /*Appended by ben zhang at aug, 14, 2002 */
        IntKey tmpKey = new IntKey(docID.intValue());

        try {
            indexBTree.acquire(kContext, WRITE_MODE);
            if (docRoot.getPageNumber() <= 0) {
                indexBTree.delete(tmpKey, kContext);
            } else {
                indexBTree.store(tmpKey, ByteTool.intToBytes(docRoot.getPageNumber(), btreeSpec.isMsbFirst()), STORE_REPLACE, kContext);

            }
        } finally {
            indexBTree.release(kContext);
        }


        id2root.remove(docID);
    }

    /**
     * Called when rolling back or recovery
     */
    public void updateDocRootForRollBack(byte[] docID, PageNumber docRoot) {
        byte[] ba = new byte[4];
        System.arraycopy(docID, 0, ba, 1, docID.length);
        Integer idocID = new Integer(ByteTool.bytesToInt(ba, 0, btreeSpec.isMsbFirst()));

        if (docRoot.getPageNumber() == BTreeSpec.INVALID_PAGENO) {
            id2root.remove(idocID);
            //This situation occurs to do rollback like operation(such as delete, modify node etc)after getDocument operation
            synchronized (cleanIdQueue) {
                cleanIdQueue.remove(idocID);
            }

            return;
        }

        PageNumber subRoot = (PageNumber) id2root.get(idocID);
        if (subRoot == null) id2root.put(idocID, docRoot);
        else subRoot.setPageNumber(docRoot);


        synchronized (cleanIdQueue) {
            cleanIdQueue.remove(idocID);
            cleanIdQueue.addFirst(idocID);
        }

    }

    public short getType() {
        return IDBIndex.ID2NODE_BTREE;
    }

    private void initId2nodeBTree(File dataSource, KernelContext kContext) throws ChaiDBException {
        String dir = dataSource.getAbsolutePath();
        dir = dir.substring(0, dir.lastIndexOf(File.separator));
        BTreeSpec indexSpec = getBuffer().getBTreeSpec(dir + File.separator + IMPLICITLY_OPEN_FILE, IDBIndex.BTREE, kContext);
        indexBTree = (BTree) indexSpec.btree;
        //for index tree, adopting locking the whole tree
        id2root = new Hashtable();
        cleanIdQueue = new LinkedList();
    }

    /**
     * Delete the subtree of a document
     * //Caller codes guarantee lock this subtree(document), so that now it's unnecessary
     * //to lock subtree.
     *
     * @param docId    document id
     * @param kContext
     * @throws ChaiDBException
     */
    public void deleteSubTree(int docId, KernelContext kContext) throws ChaiDBException {
        this.getBTreeSpec().setModified(true);
        PageNumber subRoot = new PageNumber(getDocRoot(docId, kContext));
        updateDocRoot(docId, new PageNumber(BTreeSpec.INVALID_PAGENO), kContext);
        delSubTreeCore(subRoot, false, kContext);

    }
}
TOP

Related Classes of org.chaidb.db.index.btree.Id2nodeBTree

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.