package loaders; import java.io.File; import java.io.FileFilter; import java.io.FilenameFilter; import java.util.ArrayList; import java.util.List; import org.apache.log4j.BasicConfigurator; import org.molgenis.MolgenisOptions; import org.molgenis.framework.db.Database; import org.molgenis.framework.db.Database.DatabaseAction; import org.molgenis.util.SimpleTuple; import app.CsvImport; import app.DatabaseFactory; import app.JDBCDatabase; public class LoadDbGapDownloads { public static void main(String[] args) throws Exception { // start logging // BasicConfigurator.configure(); Database db = new JDBCDatabase("apps/phenoflow/phenoflow.properties"); loadDbGaPData(db); } public static void loadDbGaPData(Database db) throws Exception { // it is assumed that DbGapToPheno has put its results here // This will need updating if run on a different machine // File rootDir = new File("D:\\Data\\dbgap"); File rootDir = new File("../pheno_data/dbgap/"); for (File investigationDir : rootDir.listFiles(new FileFilter() { public boolean accept(File file) { return file.isDirectory() && file.getName().startsWith("phs"); } })) { System.out.println("Loading " + investigationDir); // need to preload measurements first, otherwise protocol // will complain it's missing them due to incorrect // autogenerated order // List<String> list = new ArrayList<String>(); // list.add("investigation"); // list.add("ontologyterm"); // list.add("category"); // list.add("measurement"); // list.add("measurement"); // // CsvImport.importAll(investigationDir, db, new SimpleTuple(), // list, // DatabaseAction.ADD_IGNORE_EXISTING, ""); // measurements already in, proceed loading a complete set CsvImport.importAll(investigationDir, db, new SimpleTuple(), null, DatabaseAction.ADD_IGNORE_EXISTING, ""); } } }