package scigest.core;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.AndFileFilter;
import org.apache.commons.io.filefilter.DirectoryFileFilter;
import org.apache.commons.io.filefilter.HiddenFileFilter;
import org.apache.commons.io.filefilter.SuffixFileFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import scigest.configuration.ScigestConfiguration;
/**
* The main class for generating THREDDS catalog file.
*
* @author Feiyi Wang
*
*/
@Component
public class CatalogPublisher {
private File dataSrcDir;
private String dirLayout;
private Configuration config;
@Autowired
@Qualifier("datasetMapping")
private DatasetMapping dm;
private Logger logger = LoggerFactory.getLogger(CatalogPublisher.class);
protected static List<String> catalogList = new ArrayList<String>();
CatalogPublisher() {
this.config = ConfigReader.load();
this.dataSrcDir = new File(config.getString("data.src"));
this.dirLayout = config.getString("dir.layout");
}
/**
* Publish a file map as returned by a configured implementation of
* {@see DatasetMapping} interface.
* @throws Exception
*
*/
public void publishFileMap() throws Exception {
Map<File, ArrayList<File>> fileMap = dm.getFileMap(this.dataSrcDir);
for (File subDir: fileMap.keySet()) {
String[] ncFiles = getFileNameList(fileMap.get(subDir));
System.out.printf("Processing directory [%s] with [%d] files ... \n",
subDir.getName(), ncFiles.length);
ParentDataset pds = new ParentDataset(subDir, ncFiles);
pds.threddsGen();
}
// persist the catalog file
persistCatalogs();
}
private void persistCatalogs() throws Exception {
ScigestConfiguration sc = new ScigestConfiguration();
File catalogFile = sc.getCatalogFile();
catalogFile.delete();
for (String catalog: catalogList) {
FileUtils.writeStringToFile(catalogFile, catalog, true);
}
}
/**
* Convert a list of file object into an array
*
* @return an array with full path file name
* @throws IOException
*/
public String[] getFileNameList(ArrayList<File> fileList) throws IOException {
ArrayList<String> filenameList = new ArrayList<String>();
for (File f: fileList)
filenameList.add(f.getCanonicalPath());
return filenameList.toArray(new String[filenameList.size()]);
}
/**
*
* Based on "flat" or "twolevel" configuration, this method
* invoke different publishing steps.
*
* <p> Use publishFileMap() instead.
*
* @deprecated
*
* @throws Exception
*/
public void publish() throws Exception {
if (this.dirLayout.equals("nested")) {
nestedDirectory();
} else if (this.dirLayout.equals("flat")) {
flatDirectory();
} else {
logger.error("We don't yet support arbitrary deep directory yet, stay tuned");
System.exit(1);
}
}
/**
* handle a flat directory structure
*
*/
private void flatDirectory() {
String[] ncFiles = dataSrcDir.list(new SuffixFileFilter("nc"));
System.out.printf("Processing directory [%s] with [%d] files ... \n",
dataSrcDir.getName(), ncFiles.length);
ParentDataset pds = new ParentDataset(dataSrcDir, ncFiles);
pds.threddsGen();
}
/**
* handles a two-level directory structure
*
*/
private void nestedDirectory() {
FilenameFilter myfilter = new AndFileFilter(
DirectoryFileFilter.INSTANCE, HiddenFileFilter.VISIBLE);
String[] files = dataSrcDir.list(myfilter);
for (int i = 0 ; i < files.length; i++) {
// construct File object
File curDir = new File(dataSrcDir + "/" + files[i]);
logger.debug("Processing:" + curDir.toString());
String[] ncFiles = curDir.list(new SuffixFileFilter("nc"));
// passing it to generate separate catalog file
System.out.printf("Processing directory [%s] with [%d] files ... \n",
curDir.getName(), ncFiles.length);
ParentDataset pds = new ParentDataset(curDir, ncFiles);
pds.threddsGen();
}
}
}