/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* $Id: DocumentCacheImpl.java 594588 2007-11-13 17:21:25Z vgritsenko $
*/
package org.apache.xindice.core.cache;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.xindice.core.Collection;
import org.apache.xindice.core.data.Entry;
import org.apache.xindice.core.data.Key;
import org.apache.xindice.core.data.Value;
import org.apache.xindice.xml.NodeSource;
import org.apache.xindice.xml.SymbolTable;
import org.apache.xindice.xml.dom.DBDocument;
import org.apache.xindice.xml.dom.DOMParser;
import org.apache.xindice.xml.dom.DocumentImpl;
import org.w3c.dom.Document;
import java.util.Map;
import java.util.WeakHashMap;
/**
* DocumentCache implements a simple Document caching system for
* Collections.
*
* <small>
* FIXME: Revisit cache implementation. Most probably, commons collections'
* ReferenceMap should be used instead of WeakHashMap.
* </small>
*
* @version $Revision: 594588 $, $Date: 2007-11-13 12:21:25 -0500 (Tue, 13 Nov 2007) $
*/
public class DocumentCacheImpl implements DocumentCache {
private static final Log log = LogFactory.getLog(DocumentCacheImpl.class);
/**
* CacheKey to CacheEntry mapping
*/
private final Map table = new WeakHashMap();
private class CacheEntry {
private final int type;
private final Key key;
private final Value value;
private Map meta;
public CacheEntry(int type, Key key, Value value, Map meta) {
this.type = type;
this.key = key;
this.value = value;
this.meta = meta;
}
public int getType() {
return type;
}
public Key getKey() {
return key;
}
public Value getValue() {
return value;
}
public Map getMeta() {
return meta;
}
void setMeta(Map meta) {
this.meta = meta;
}
}
public Entry getEntry(Collection col, Key key) {
CacheEntry e;
synchronized (table) {
e = (CacheEntry) table.get(new CacheKey(col, key));
}
if (e == null) {
return null;
}
switch (e.getType()) {
case DocumentCache.COMPRESSED:
{
SymbolTable s = col.getSymbols();
NodeSource ns = new NodeSource(col, key);
Document doc = new DocumentImpl(e.getValue().getData(), s, ns);
return new Entry(key, doc, e.getMeta());
}
case DocumentCache.UNCOMPRESSED:
try {
Document doc = DOMParser.toDocument(e.getValue());
((DBDocument) doc).setSource(new NodeSource(col, key));
return new Entry(key, doc, e.getMeta());
} catch (Exception ex) {
if (log.isWarnEnabled()) {
log.warn("ignored exception", ex);
}
}
break;
case DocumentCache.BINARY:
return new Entry(Entry.BINARY, key, e.getValue().getData(), e.getMeta());
default:
throw new IllegalStateException("Invalid cache entry type: <" + e.getType() + ">");
}
return null;
}
public Entry getEntryMeta(Collection col, Key key) {
CacheEntry e;
synchronized (table) {
e = (CacheEntry) table.get(new CacheKey(col, key));
}
if (e == null) {
return null;
}
return new Entry(key, e.getMeta());
}
public void putEntry(Collection col, Key key, int type, Value value, Map meta) {
CacheKey ckey = new CacheKey(col, key);
synchronized (table) {
table.put(ckey, new CacheEntry(type, key, value, meta));
}
}
public void putEntryMeta(Collection col, Key key, int type, Map meta) {
CacheKey ckey = new CacheKey(col, key);
synchronized (table) {
CacheEntry e = (CacheEntry) table.get(ckey);
if (e == null) {
e = new CacheEntry(type, key, null, meta);
} else {
e.setMeta(meta);
}
table.put(ckey, e);
}
}
public void removeEntry(Collection col, Key key) {
synchronized (table) {
table.remove(new CacheKey(col, key));
}
}
/**
* Obtains value of the cache control processing instruction in this document
* @param doc document to inspect for cache control processing instruction
* @return cache control value
public static int getCacheControl(Document doc) {
String cache = DBDocument.CACHE;
NodeList childNodes = doc.getChildNodes();
int size = childNodes.getLength();
for (int i = 0; i < size; i++) {
Node n = childNodes.item(i);
if (n.getNodeType() == Node.PROCESSING_INSTRUCTION_NODE && n.getNodeName().equals(DBDocument.CACHE_CONTROL)) {
cache = n.getNodeValue().trim();
break;
}
}
if (cache != null) {
if (cache.equals(DBDocument.CACHE)) {
return -1;
} else if (cache.equals(DBDocument.NOCACHE)) {
return 0;
} else {
return Integer.parseInt(cache);
}
}
return -1;
}
*/
}