Java Examples for com.opencsv.CSVWriter
The following java examples will help you to understand the usage of com.opencsv.CSVWriter. These source code samples are taken from different open source projects.
Example 1
| Project: CloudSim-master File: HelperEx.java View source code |
public static void printResultsNew(PowerContainerDatacenter datacenter, ContainerDatacenterBroker broker, double lastClock, String experimentName, boolean outputInCsv, String outputFolder) throws IOException {
List<ContainerVm> vms = broker.getVmsCreatedList();
List<Container> containers = broker.getContainersCreatedList();
Log.enable();
List<ContainerHost> hosts = datacenter.getHostList();
Map<String, Double> slaMetrics = getSlaMetrics(vms);
String[] msg = { "ExperimentName", "hostSelectionPolicy", "vmAllocationPolicy", "OLThreshold", "ULThreshold", "VMSPolicy", "ContainerSpolicy", "ContainerPlacement", "Percentile", "numberOfHosts", "numberOfVms", "totalSimulationTime", "slaOverall", "slaAverage", "slaTimePerActiveHost", "meanTimeBeforeHostShutdown", "stDevTimeBeforeHostShutdown", "medTimeBeforeHostShutdown", "meanTimeBeforeContainerMigration", "stDevTimeBeforeContainerMigration", "medTimeBeforeContainerMigration", "meanActiveVm", "stDevActiveVm", "medActiveVm", "meanActiveHosts", "stDevActiveHosts", "medActiveHosts", "meanNumberOfContainerMigrations", "stDevNumberOfContainerMigrations", "medNumberOfContainerMigrations", "meanDatacenterEnergy", "stDevDatacenterEnergy", "medDatacenterEnergy", "totalContainerMigration", "totalVmMigration", "totalVmCreated", "numberOfOverUtilization", "energy", "CreatedContainers", "CreatedVms" };
int numberOfHosts = hosts.size();
int numberOfVms = vms.size();
double totalSimulationTime = lastClock;
double slaOverall = slaMetrics.get("overall");
double slaAverage = slaMetrics.get("average");
double slaTimePerActiveHost = getSlaTimePerActiveHost(hosts);
List<Double> timeBeforeHostShutdown = getTimesBeforeHostShutdown(hosts);
int numberOfHostShutdowns = timeBeforeHostShutdown.size();
double meanTimeBeforeHostShutdown = Double.NaN;
double stDevTimeBeforeHostShutdown = Double.NaN;
double medTimeBeforeHostShutdown = Double.NaN;
if (!timeBeforeHostShutdown.isEmpty()) {
meanTimeBeforeHostShutdown = MathUtil.mean(timeBeforeHostShutdown);
stDevTimeBeforeHostShutdown = MathUtil.stDev(timeBeforeHostShutdown);
medTimeBeforeHostShutdown = MathUtil.median(timeBeforeHostShutdown);
}
List<Double> timeBeforeContainerMigration = getTimesBeforeContainerMigration(containers);
double meanTimeBeforeContainerMigration = Double.NaN;
double stDevTimeBeforeContainerMigration = Double.NaN;
double medTimeBeforeContainerMigration = Double.NaN;
if (!timeBeforeContainerMigration.isEmpty()) {
meanTimeBeforeContainerMigration = MathUtil.mean(timeBeforeContainerMigration);
stDevTimeBeforeContainerMigration = MathUtil.stDev(timeBeforeContainerMigration);
medTimeBeforeContainerMigration = MathUtil.median(timeBeforeContainerMigration);
}
List<Double> activeVm = datacenter.getActiveVmList();
double meanActiveVm = Double.NaN;
double stDevActiveVm = Double.NaN;
double medActiveVm = Double.NaN;
if (!activeVm.isEmpty()) {
meanActiveVm = MathUtil.mean(activeVm);
stDevActiveVm = MathUtil.stDev(activeVm);
medActiveVm = MathUtil.median(activeVm);
}
List<Double> activeHost = datacenter.getActiveHostList();
double meanActiveHosts = Double.NaN;
double stDevActiveHosts = Double.NaN;
double medActiveHosts = Double.NaN;
if (!activeHost.isEmpty()) {
meanActiveHosts = MathUtil.mean(activeHost);
stDevActiveHosts = MathUtil.stDev(activeHost);
medActiveHosts = MathUtil.median(activeHost);
}
List<Double> numberOfContainerMigrations = datacenter.getContainerMigrationList();
double meanNumberOfContainerMigrations = Double.NaN;
double stDevNumberOfContainerMigrations = Double.NaN;
double medNumberOfContainerMigrations = Double.NaN;
if (!numberOfContainerMigrations.isEmpty()) {
meanNumberOfContainerMigrations = MathUtil.mean(numberOfContainerMigrations);
stDevNumberOfContainerMigrations = MathUtil.stDev(numberOfContainerMigrations);
medNumberOfContainerMigrations = MathUtil.median(numberOfContainerMigrations);
}
List<Double> datacenterEnergy = datacenter.getDatacenterEnergyList();
double meanDatacenterEnergy = Double.NaN;
double stDevDatacenterEnergy = Double.NaN;
double medDatacenterEnergy = Double.NaN;
if (!datacenterEnergy.isEmpty()) {
meanDatacenterEnergy = MathUtil.mean(datacenterEnergy);
stDevDatacenterEnergy = MathUtil.stDev(datacenterEnergy);
medDatacenterEnergy = MathUtil.median(datacenterEnergy);
}
int totalContainerMigration = 0;
int totalVmMigration = 0;
int totalVmCreated = 0;
if (datacenter instanceof PowerContainerDatacenterCM) {
totalContainerMigration = ((PowerContainerDatacenterCM) datacenter).getContainerMigrationCount();
totalVmMigration = ((PowerContainerDatacenterCM) datacenter).getVmMigrationCount();
totalVmCreated = ((PowerContainerDatacenterCM) datacenter).getNewlyCreatedVms();
}
PowerContainerVmAllocationPolicyMigrationAbstract vmAllocationPolicy = (PowerContainerVmAllocationPolicyMigrationAbstract) datacenter.getVmAllocationPolicy();
int numberOfOverUtilization = getNumberofOverUtilization(hosts, vmAllocationPolicy);
double energy = datacenter.getPower() / (3600 * 1000);
//Now we create the log we need
StringBuilder data = new StringBuilder();
String delimeter = ",";
data.append(experimentName + delimeter);
data.append(parseExperimentName(experimentName));
data.append(String.format("%d", numberOfHosts) + delimeter);
data.append(String.format("%d", numberOfVms) + delimeter);
data.append(String.format("%.2f", totalSimulationTime) + delimeter);
data.append(String.format("%.10f", slaOverall) + delimeter);
data.append(String.format("%.10f", slaAverage) + delimeter);
data.append(String.format("%.10f", slaTimePerActiveHost) + delimeter);
data.append(String.format("%.10f", meanTimeBeforeHostShutdown) + delimeter);
data.append(String.format("%.10f", stDevTimeBeforeHostShutdown) + delimeter);
data.append(String.format("%.10f", medTimeBeforeHostShutdown) + delimeter);
data.append(String.format("%.10f", meanTimeBeforeContainerMigration) + delimeter);
data.append(String.format("%.10f", stDevTimeBeforeContainerMigration) + delimeter);
data.append(String.format("%.10f", medTimeBeforeContainerMigration) + delimeter);
data.append(String.format("%.10f", meanActiveVm) + delimeter);
data.append(String.format("%.10f", stDevActiveVm) + delimeter);
data.append(String.format("%.10f", medActiveVm) + delimeter);
data.append(String.format("%.10f", meanActiveHosts) + delimeter);
data.append(String.format("%.10f", stDevActiveHosts) + delimeter);
data.append(String.format("%.10f", medActiveHosts) + delimeter);
data.append(String.format("%.10f", meanNumberOfContainerMigrations) + delimeter);
data.append(String.format("%.10f", stDevNumberOfContainerMigrations) + delimeter);
data.append(String.format("%.10f", medNumberOfContainerMigrations) + delimeter);
data.append(String.format("%.10f", meanDatacenterEnergy) + delimeter);
data.append(String.format("%.10f", stDevDatacenterEnergy) + delimeter);
data.append(String.format("%.10f", medDatacenterEnergy) + delimeter);
data.append(String.format("%d", totalContainerMigration) + delimeter);
data.append(String.format("%d", totalVmMigration) + delimeter);
data.append(String.format("%d", totalVmCreated) + delimeter);
data.append(String.format("%d", numberOfOverUtilization) + delimeter);
data.append(String.format("%.5f", energy) + delimeter);
data.append(String.format("%d", broker.getContainersCreated()) + delimeter);
data.append(String.format("%d", broker.getNumberOfCreatedVMs()) + delimeter);
// data.append(String.format("%.10f", sla) + delimeter);
// data.append(String.format("%.10f", slaDegradationDueToMigration) + delimeter);
int index = experimentName.lastIndexOf("_");
File folder1 = new File(outputFolder + "/stats/");
File parent1 = folder1.getParentFile();
if (!parent1.exists() && !parent1.mkdirs()) {
throw new IllegalStateException("Couldn't create dir: " + parent1);
}
if (!folder1.exists()) {
folder1.mkdir();
}
String beforShutDown = outputFolder + "/time_before_host_shutdown/" + experimentName.substring(0, index);
File folder2 = new File(beforShutDown);
File parent2 = folder2.getParentFile();
if (!parent2.exists() && !parent2.mkdirs()) {
throw new IllegalStateException("Couldn't create dir: " + parent2);
}
if (!folder2.exists()) {
folder2.mkdir();
}
String beforeMigrate = outputFolder + "/time_before_vm_migration/" + experimentName.substring(0, index);
File folder3 = new File(beforeMigrate);
File parent3 = folder3.getParentFile();
if (!parent3.exists() && !parent3.mkdirs()) {
throw new IllegalStateException("Couldn't create dir: " + parent3);
}
if (!folder3.exists()) {
folder3.mkdir();
}
// int index = experimentName.lastIndexOf("_");
String fileAddress = String.format("%s/stats/%s_stats.csv", outputFolder, experimentName.substring(0, index));
File f = new File(fileAddress);
CSVWriter writer = new CSVWriter(new FileWriter(fileAddress, true), ',', CSVWriter.NO_QUOTE_CHARACTER);
File parent = f.getParentFile();
if (!parent.exists() && !parent.mkdirs()) {
throw new IllegalStateException("Couldn't create dir: " + parent);
}
if (!f.exists()) {
f.createNewFile();
// writer.writeNext("\n")
}
int temp = index;
if (experimentName.substring(index).startsWith("_1") && experimentName.length() - 2 == temp) {
// CSVWriter writer1 = new CSVWriter(new FileWriter(fileAddress, true), ',',CSVWriter.NO_QUOTE_CHARACTER);
writer.writeNext(msg);
}
writer.writeNext(new String[] { data.toString() });
writer.flush();
writer.close();
writeDataColumn(timeBeforeHostShutdown, beforShutDown + "/" + experimentName + "_time_before_host_shutdown.csv");
writeDataColumn(timeBeforeContainerMigration, beforeMigrate + "/" + experimentName + "_time_before_vm_migration.csv");
}Example 2
| Project: appJavou-master File: ParticipantSendTask.java View source code |
@Override
protected Boolean doInBackground(Void... params) {
CSVWriter writer;
boolean isAttend = false;
try {
writer = new CSVWriter(new FileWriter(Constant.PATH_FILE_JAVOU));
List<String[]> data = new ArrayList<>();
data.add(Constant.FILE_COLS);
for (Participant participant : mParticipant) {
if (participant.isAttend()) {
isAttend = true;
String sex = (participant.isSex() ? "F" : "M");
data.add(new String[] { String.valueOf(participant.getCode()), participant.getName(), participant.getEmail(), participant.getPhone(), sex, participant.getCompany() });
}
}
writer.writeAll(data);
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
return isAttend;
}Example 3
| Project: tablesaw-master File: ObservationDataTest.java View source code |
private static void generateData(int observationCount, Table table) throws IOException {
while (concepts.size() <= 100_000) {
concepts.add(RandomStringUtils.randomAscii(30));
}
while (patientIds.size() <= 1_000_000) {
patientIds.add(RandomUtils.nextInt(0, 2_000_000_000));
}
while (dates.size() <= size) {
dates.add(PackedLocalDate.pack(randomDate()));
}
DateColumn dateColumn = table.dateColumn("date");
CategoryColumn conceptColumn = table.categoryColumn("concept");
FloatColumn valueColumn = table.floatColumn("value");
IntColumn patientColumn = table.intColumn("patient");
CSVWriter writer = new CSVWriter(new FileWriter(CSV_FILE));
String[] line = new String[4];
String[] header = { "concept", "date", "value", "patient" };
writer.writeNext(header);
// sample from the pools to write the data
for (int i = 0; i < observationCount; i++) {
line[0] = concepts.get(RandomUtils.nextInt(0, concepts.size()));
line[1] = PackedLocalDate.toDateString(dates.getInt(RandomUtils.nextInt(0, dates.size())));
line[2] = Float.toString(RandomUtils.nextFloat(0f, 100_000f));
line[3] = Integer.toString(patientIds.getInt(RandomUtils.nextInt(0, patientIds.size())));
writer.writeNext(line);
/*
dateColumn.add(dates.getInt(RandomUtils.nextInt(0, dates.size())));
conceptColumn.add(concepts.get(RandomUtils.nextInt(0, concepts.size())));
valueColumn.add(RandomUtils.nextFloat(0f, 100_000f));
patientColumn.add(patientIds.getInt(RandomUtils.nextInt(0, patientIds.size())));
*/
}
writer.flush();
writer.close();
concepts = null;
patientIds = null;
dates = null;
}Example 4
| Project: ceeql-master File: CeeqlCsv.java View source code |
public static String generate(List<Map<String, Object>> rows) {
StringWriter writer = new StringWriter();
CSVWriter csvWriter = new CSVWriter(writer);
boolean header = false;
String[] keys = {};
ArrayList<String[]> output = new ArrayList<>();
for (Map<String, Object> row : rows) {
if (!header) {
keys = row.keySet().toArray(new String[] {});
Arrays.sort(keys);
output.add(keys);
header = true;
}
String[] values = new String[keys.length];
int i = 0;
for (final String key : keys) {
final Object value = row.get(key);
values[i++] = (value != null) ? value.toString() : "null";
}
output.add(values);
}
try {
csvWriter.writeAll(output);
return writer.toString();
} catch (Exception e) {
return CeeqlError.errorType(e.getClass().getSimpleName(), e.getMessage());
}
}Example 5
| Project: baleen-master File: AbstractCsvConsumer.java View source code |
@Override
public void doInitialize(UimaContext aContext) throws ResourceInitializationException {
super.doInitialize(aContext);
try {
// Attempt to create the path if it doesn't exist
new File(filename).getParentFile().mkdirs();
writer = new CSVWriter(new OutputStreamWriter(new FileOutputStream(filename, false), StandardCharsets.UTF_8), '\t', CSVWriter.NO_QUOTE_CHARACTER);
} catch (final IOException e) {
throw new ResourceInitializationException(e);
}
}Example 6
| Project: AnyMemo-master File: CSVExporter.java View source code |
@Override
public void convert(String src, String dest) throws Exception {
new File(dest).delete();
AnyMemoDBOpenHelper helper = AnyMemoDBOpenHelperManager.getHelper(src);
final CardDao cardDao = helper.getCardDao();
final CategoryDao categoryDao = helper.getCategoryDao();
CSVWriter writer;
if (separator == null) {
writer = new CSVWriter(new FileWriter(dest));
} else {
writer = new CSVWriter(new FileWriter(dest), separator);
}
try {
final List<Card> cardList = cardDao.queryForAll();
// Populate all category field in a transaction.
categoryDao.callBatchTasks(new Callable<Void>() {
public Void call() throws Exception {
for (Card c : cardList) {
categoryDao.refresh(c.getCategory());
}
return null;
}
});
AnyMemoDBOpenHelperManager.releaseHelper(helper);
if (cardList.size() == 0) {
throw new IOException("Can't retrieve cards for database: " + src);
}
String[] entries = new String[4];
for (Card card : cardList) {
entries[0] = card.getQuestion();
entries[1] = card.getAnswer();
entries[2] = card.getCategory().getName();
entries[3] = card.getNote();
writer.writeNext(entries);
}
} finally {
writer.close();
}
}Example 7
| Project: Java-library-master File: StaticListDownloadRequest.java View source code |
@Override
public String parse(String response) throws IOException {
if (fileOutputStream.isPresent()) {
try (OutputStreamWriter stream = new OutputStreamWriter(fileOutputStream.get());
CSVWriter writer = new CSVWriter(stream, CSVWriter.DEFAULT_SEPARATOR, CSVWriter.NO_QUOTE_CHARACTER, CSVWriter.NO_ESCAPE_CHARACTER)) {
String[] rows = response.split("\n");
for (String row : rows) {
writer.writeNext(row.split(","));
}
} finally {
fileOutputStream.get().close();
}
}
return response;
}Example 8
| Project: ambari-master File: OpenCSVTest.java View source code |
@Test
public void testWriter() throws Exception {
String csv = "\'valu#e1\'#c#10#10.1\n" + "value2#c2#102#true";
try (StringReader sr = new StringReader(csv);
CSVReader csvReader = new CSVReader(sr, '#', '\'', '\\');
StringWriter sw = new StringWriter();
CSVWriter csvWriter = new CSVWriter(sw)) {
String[] row1 = csvReader.readNext();
csvWriter.writeNext(row1);
String[] row2 = csvReader.readNext();
csvWriter.writeNext(row2);
Assert.assertEquals("CSVWriter failed.", "\"valu#e1\",\"c\",\"10\",\"10.1\"\n" + "\"value2\",\"c2\",\"102\",\"true\"\n", sw.getBuffer().toString());
}
}Example 9
| Project: WebAPI-master File: IRAnalysisService.java View source code |
/**
* Exports the analysis definition and results
*
* @param id - the IR Analysis ID to export
* @return Response containing binary stream of zipped data
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/export")
@Transactional
public Response export(@PathParam("id") final int id) {
Response response = null;
HashMap<String, String> fileList = new HashMap<>();
HashMap<Integer, String> distTypeLookup = new HashMap<>();
distTypeLookup.put(1, "TAR");
distTypeLookup.put(2, "TTO");
try {
IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOne(id);
Set<ExecutionInfo> executions = analysis.getExecutionInfoList();
fileList.put("analysisDefinition.json", analysis.getDetails().getExpression());
// squentially return reults of IR calculation. In Spring 1.4.2, we can utlilize @Async operations to do this in parallel.
// store results in single CSV file
ArrayList<String[]> summaryLines = new ArrayList<>();
ArrayList<String[]> strataLines = new ArrayList<>();
ArrayList<String[]> distLines = new ArrayList<>();
for (ExecutionInfo execution : executions) {
Source source = execution.getSource();
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
// perform this query to CDM in an isolated transaction to avoid expensive JDBC transaction synchronization
DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionStatus initStatus = this.getTransactionTemplateRequiresNew().getTransactionManager().getTransaction(requresNewTx);
// get the summary data
List<AnalysisReport.Summary> summaryList = getAnalysisSummaryList(id, source);
if (summaryLines.isEmpty()) {
summaryLines.add("db_id#targetId#outcomeId#total#timeAtRisk#cases".split("#"));
}
for (AnalysisReport.Summary summary : summaryList) {
summaryLines.add(new String[] { source.getSourceKey(), String.valueOf(summary.targetId), String.valueOf(summary.outcomeId), String.valueOf(summary.totalPersons), String.valueOf(summary.timeAtRisk), String.valueOf(summary.cases) });
}
// get the strata results
List<AnalysisReport.StrataStatistic> strataList = getStrataStatistics(id, source);
if (strataLines.isEmpty()) {
strataLines.add("db_id#targetId#outcomeId#strata_id#strata_name#total#timeAtRisk#cases".split("#"));
}
for (AnalysisReport.StrataStatistic strata : strataList) {
strataLines.add(new String[] { source.getSourceKey(), String.valueOf(strata.targetId), String.valueOf(strata.outcomeId), String.valueOf(strata.id), String.valueOf(strata.name), String.valueOf(strata.totalPersons), String.valueOf(strata.timeAtRisk), String.valueOf(strata.cases) });
}
// get the distribution data
String distQuery = String.format("select '%s' as db_id, target_id, outcome_id, strata_sequence, dist_type, total, avg_value, std_dev, min_value, p10_value, p25_value, median_value, p75_value, p90_value, max_value from %s.ir_analysis_dist where analysis_id = %d", source.getSourceKey(), resultsTableQualifier, id);
String translatedSql = SqlTranslate.translateSql(distQuery, "sql server", source.getSourceDialect(), SessionUtils.sessionId(), resultsTableQualifier);
SqlRowSet rs = this.getSourceJdbcTemplate(source).queryForRowSet(translatedSql);
this.getTransactionTemplateRequiresNew().getTransactionManager().commit(initStatus);
if (distLines.isEmpty()) {
distLines.add(rs.getMetaData().getColumnNames());
}
while (rs.next()) {
ArrayList<String> columns = new ArrayList<>();
for (int i = 1; i <= rs.getMetaData().getColumnNames().length; i++) {
switch(rs.getMetaData().getColumnName(i)) {
case "dist_type":
columns.add(distTypeLookup.get(rs.getInt(i)));
break;
default:
columns.add(rs.getString(i));
break;
}
}
distLines.add(columns.toArray(new String[0]));
}
}
// Write report lines to CSV
StringWriter sw = null;
CSVWriter csvWriter = null;
sw = new StringWriter();
csvWriter = new CSVWriter(sw);
csvWriter.writeAll(summaryLines);
csvWriter.flush();
fileList.put("ir_summary.csv", sw.getBuffer().toString());
sw = new StringWriter();
csvWriter = new CSVWriter(sw);
csvWriter.writeAll(strataLines);
csvWriter.flush();
fileList.put("ir_strata.csv", sw.getBuffer().toString());
sw = new StringWriter();
csvWriter = new CSVWriter(sw);
csvWriter.writeAll(distLines);
csvWriter.flush();
fileList.put("ir_dist.csv", sw.getBuffer().toString());
// build zip output
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(baos);
for (String fileName : fileList.keySet()) {
ZipEntry resultsEntry = new ZipEntry(fileName);
zos.putNextEntry(resultsEntry);
zos.write(fileList.get(fileName).getBytes());
}
zos.closeEntry();
zos.close();
baos.flush();
baos.close();
response = Response.ok(baos).type(MediaType.APPLICATION_OCTET_STREAM).header("Content-Disposition", String.format("attachment; filename=\"%s\"", "ir_analysis_" + id + ".zip")).build();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
return response;
}Example 10
| Project: windup-master File: ExportCSVFileRuleProvider.java View source code |
@Override
public void perform(GraphRewrite event, EvaluationContext context, WindupConfigurationModel config) {
InlineHintService hintService = new InlineHintService(event.getGraphContext());
String outputFolderPath = config.getOutputPath().getFilePath() + File.separator;
ClassificationService classificationService = new ClassificationService(event.getGraphContext());
final Map<String, CSVWriter> projectToFile = new HashMap<>();
final Iterable<InlineHintModel> hints = hintService.findAll();
final Iterable<ClassificationModel> classifications = classificationService.findAll();
//try{} in case something bad happens, we need to close files
try {
for (InlineHintModel hint : hints) {
final ProjectModel parentRootProjectModel = hint.getFile().getProjectModel().getRootProjectModel();
String links = buildLinkString(hint.getLinks());
String ruleId = hint.getRuleID() != null ? hint.getRuleID() : "";
String title = hint.getTitle() != null ? hint.getTitle() : "";
String description = hint.getDescription() != null ? hint.getDescription() : "";
String projectNameString = "";
String fileName = "";
String filePath = "";
if (hint.getFile() != null) {
if (hint.getFile().getProjectModel() != null) {
projectNameString = hint.getFile().getProjectModel().getName();
}
fileName = hint.getFile().getFileName();
filePath = hint.getFile().getFilePath();
}
String[] strings = new String[] { ruleId, "hint", title, description, links, projectNameString, fileName, filePath, String.valueOf(hint.getLineNumber()), String.valueOf(hint.getEffort()) };
writeCsvRecordForProject(projectToFile, outputFolderPath, parentRootProjectModel, strings);
}
for (ClassificationModel classification : classifications) {
for (FileModel fileModel : classification.getFileModels()) {
final ProjectModel parentRootProjectModel = fileModel.getProjectModel().getRootProjectModel();
String links = buildLinkString(classification.getLinks());
String ruleId = classification.getRuleID() != null ? classification.getRuleID() : "";
String classificationText = classification.getClassification() != null ? classification.getClassification() : "";
String description = classification.getDescription() != null ? classification.getDescription() : "";
String projectNameString = "";
String fileName = "";
String filePath = "";
if (fileModel.getProjectModel() != null) {
projectNameString = fileModel.getProjectModel().getName();
}
fileName = fileModel.getFileName();
filePath = fileModel.getFilePath();
String[] strings = new String[] { ruleId, "classification", classificationText, description, links, projectNameString, fileName, filePath, "N/A", String.valueOf(classification.getEffort()) };
writeCsvRecordForProject(projectToFile, outputFolderPath, parentRootProjectModel, strings);
}
}
} finally {
for (CSVWriter csvWriter : projectToFile.values()) {
try {
csvWriter.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}Example 11
| Project: flume-ng-sql-source-master File: SQLSource.java View source code |
/**
* Configure the source, load configuration properties and establish connection with database
*/
@Override
public void configure(Context context) {
LOG.getName();
LOG.info("Reading and processing configuration values for source " + getName());
/* Initialize configuration parameters */
sqlSourceHelper = new SQLSourceHelper(context, this.getName());
/* Initialize metric counters */
sqlSourceCounter = new SqlSourceCounter("SOURCESQL." + this.getName());
/* Establish connection with database */
hibernateHelper = new HibernateHelper(sqlSourceHelper);
hibernateHelper.establishSession();
/* Instantiate the CSV Writer */
csvWriter = new CSVWriter(new ChannelWriter());
}Example 12
| Project: ContractionTimer-master File: CSVTransformer.java View source code |
public static void writeContractions(Context context, OutputStream outputStream) throws IOException {
ArrayList<String[]> contractions = new ArrayList<>();
Cursor data = context.getContentResolver().query(ContractionContract.Contractions.CONTENT_URI, null, null, null, null);
if (data != null) {
while (data.moveToNext()) {
String[] contraction = new String[5];
final int startTimeColumnIndex = data.getColumnIndex(ContractionContract.Contractions.COLUMN_NAME_START_TIME);
final long startTime = data.getLong(startTimeColumnIndex);
contraction[0] = Long.toString(startTime);
contraction[1] = DateFormat.getDateTimeInstance().format(new Date(startTime));
final int endTimeColumnIndex = data.getColumnIndex(ContractionContract.Contractions.COLUMN_NAME_END_TIME);
if (!data.isNull(endTimeColumnIndex)) {
final long endTime = data.getLong(endTimeColumnIndex);
contraction[2] = Long.toString(endTime);
contraction[3] = DateFormat.getDateTimeInstance().format(new Date(endTime));
} else {
contraction[2] = "";
contraction[3] = "";
}
final int noteColumnIndex = data.getColumnIndex(ContractionContract.Contractions.COLUMN_NAME_NOTE);
if (!data.isNull(noteColumnIndex)) {
final String note = data.getString(noteColumnIndex);
contraction[4] = note;
} else
contraction[4] = "";
contractions.add(contraction);
}
data.close();
}
CSVWriter writer = new CSVWriter(new OutputStreamWriter(outputStream));
writer.writeNext(new String[] { context.getString(R.string.detail_start_time_label), context.getString(R.string.detail_start_time_formatted_label), context.getString(R.string.detail_end_time_label), context.getString(R.string.detail_end_time_formatted_label), context.getString(R.string.detail_note_label) });
writer.writeAll(contractions);
writer.close();
}Example 13
| Project: android-money-manager-ex-master File: ExportToCsvFile.java View source code |
private boolean runTask() {
if (mAdapter == null || mAdapter.getCursor() == null)
return false;
// take cursor
Cursor data = mAdapter.getCursor();
// create object to write csv file
try {
CSVWriter csvWriter = new CSVWriter(new FileWriter(mFileName), CSVWriter.DEFAULT_SEPARATOR, CSVWriter.NO_QUOTE_CHARACTER);
while (data.moveToNext()) {
String[] record = new String[7];
// compose a records
record[0] = data.getString(data.getColumnIndex(QueryAllData.UserDate));
if (!TextUtils.isEmpty(data.getString(data.getColumnIndex(QueryAllData.Payee)))) {
record[1] = data.getString(data.getColumnIndex(QueryAllData.Payee));
} else {
record[1] = data.getString(data.getColumnIndex(QueryAllData.AccountName));
}
record[2] = Double.toString(data.getDouble(data.getColumnIndex(QueryAllData.Amount)));
record[3] = data.getString(data.getColumnIndex(QueryAllData.Category));
record[4] = data.getString(data.getColumnIndex(QueryAllData.Subcategory));
record[5] = Integer.toString(data.getInt(data.getColumnIndex(QueryAllData.TransactionNumber)));
record[6] = data.getString(data.getColumnIndex(QueryAllData.Notes));
// writer record
csvWriter.writeNext(record);
// move to next row
data.moveToNext();
}
csvWriter.close();
} catch (Exception e) {
Timber.e(e, "exporting to CSV");
return false;
}
return true;
}Example 14
| Project: distiller-CORE-master File: CsvPrinter.java View source code |
/**
* Writes the annotations contained in the printer on the specified file.
*
* @param fileName the path where to write.
*/
@Override
public void writeFile(String fileName) {
try {
CSVWriter writer = new CSVWriter(new FileWriter(fileName), delimiter, CSVWriter.DEFAULT_QUOTE_CHARACTER);
writer.writeNext(getHeaders().toArray(new String[getHeaders().size()]), false);
// build the rows
for (Map<String, Either<String, Number>> row : this.getRows()) {
String[] rowArray = new String[getHeaders().size()];
for (int i = 0; i < getHeaders().size(); i++) {
String header = getHeaders().get(i);
Either<String, Number> cell = row.get(header);
if (cell == null) {
if (getHeaderTypes().get(i).isLeft()) {
rowArray[i] = "";
} else {
rowArray[i] = "0";
}
} else if (cell.isLeft()) {
// the cell is a string
rowArray[i] = cell.getLeft();
} else {
// the cell is a number
rowArray[i] = // value, avoid printing ".0"
cell.getRight().doubleValue() == Math.floor(cell.getRight().doubleValue()) ? String.format(Locale.US, "%d", cell.getRight().intValue()) : String.format(Locale.US, "%f", cell.getRight().doubleValue());
}
}
writer.writeNext(rowArray, false);
}
writer.close();
} catch (IOException ex) {
Logger.getLogger(CsvPrinter.class.getName()).log(Level.SEVERE, "Error while writing CSV file", ex);
}
}Example 15
| Project: easybatch-framework-master File: OpenCsvRecordMarshaller.java View source code |
@Override
public StringRecord processRecord(Record<P> record) throws Exception {
StringWriter stringWriter = null;
CSVWriter csvWriter = null;
try {
stringWriter = new StringWriter();
// force lineEnd to empty string
csvWriter = new CSVWriter(stringWriter, delimiter, qualifier, "");
P payload = record.getPayload();
List<String> fields = extractFields(payload);
String[] items = fields.toArray(new String[fields.size()]);
csvWriter.writeNext(items);
csvWriter.flush();
return new StringRecord(record.getHeader(), stringWriter.toString());
} finally {
if (csvWriter != null) {
csvWriter.close();
}
if (stringWriter != null) {
stringWriter.close();
}
}
}Example 16
| Project: Hydrograph-master File: ExportAction.java View source code |
private void writeDataInFile(List<String[]> fileDataList, String filePath) {
if (filePath != null) {
if (StringUtils.length(ConvertHexValues.parseHex(delimiter)) == 1 && StringUtils.length(ConvertHexValues.parseHex(quoteCharactor)) == 1) {
try (FileWriter fileWriter = new FileWriter(filePath);
CSVWriter writer = new CSVWriter(fileWriter, ConvertHexValues.parseHex(delimiter).toCharArray()[0], ConvertHexValues.parseHex(quoteCharactor).toCharArray()[0])) {
writer.writeAll(fileDataList, false);
showMessage("Data exported to " + filePath + " successfully.", INFORMATION, SWT.ICON_INFORMATION);
} catch (IOException e1) {
showMessage(Messages.ERROR_MESSAGE, ERROR, SWT.ICON_ERROR);
}
}
}
}Example 17
| Project: OpenFoodFacts-androidApp-master File: HistoryScanActivity.java View source code |
public void exportCSV() {
Toast.makeText(this, R.string.txt_exporting_history, Toast.LENGTH_LONG).show();
String baseDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS).getAbsolutePath();
Log.d("dir", baseDir);
String fileName = "exportHistoryOFF" + new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss").format(new Date()) + ".csv";
String filePath = baseDir + File.separator + fileName;
File f = new File(filePath);
CSVWriter writer;
FileWriter fileWriter;
try {
if (f.exists() && !f.isDirectory()) {
fileWriter = new FileWriter(filePath, true);
writer = new CSVWriter(fileWriter);
} else {
writer = new CSVWriter(new FileWriter(filePath));
}
String[] headers = { "Barcode", "Name", "Brands" };
writer.writeNext(headers);
List<HistoryProduct> listHistoryProducts = mHistoryProductDao.loadAll();
for (HistoryProduct hp : listHistoryProducts) {
String[] line = { hp.getBarcode(), hp.getTitle(), hp.getBrands() };
writer.writeNext(line);
}
writer.close();
Toast.makeText(this, R.string.txt_history_exported, Toast.LENGTH_LONG).show();
} catch (IOException e) {
e.printStackTrace();
}
}Example 18
| Project: Zettelkasten-master File: ExportToCsvTask.java View source code |
@Override
protected Object doInBackground() {
// if no file exists, exit task
if (null == filepath) {
showOkMessage = false;
return null;
}
// check whether file already exists
if (filepath.exists()) {
// file exists, ask user to overwrite it...
int optionDocExists = JOptionPane.showConfirmDialog(null, resourceMap.getString("askForOverwriteFileMsg", "", filepath.getName()), resourceMap.getString("askForOverwriteFileTitle"), JOptionPane.YES_NO_OPTION, JOptionPane.PLAIN_MESSAGE);
// if the user does *not* choose to overwrite, quit...
if (optionDocExists != JOptionPane.YES_OPTION) {
// don't show "export was OK" message in main frame
showOkMessage = false;
return null;
}
}
int contentsize;
int counter;
// yet everything is ok...
exportOk = true;
CSVWriter writer = null;
try {
// create csv-writer and export the data
// get the size of the export data, used for progressbar
writer = new CSVWriter(new OutputStreamWriter(new FileOutputStream(filepath), "UTF-8"), csvseparator);
// get the size of the export data, used for progressbar
contentsize = exportentries.size();
// create linked list which will hold all values of one comma-separated line
LinkedList<String> csvline = new LinkedList<>();
// that should be exported
if ((exportparts & Constants.EXPORT_TITLE) != 0) {
csvline.add(resourceMap.getString("csvHeaderTitle"));
}
if ((exportparts & Constants.EXPORT_CONTENT) != 0) {
csvline.add(resourceMap.getString("csvHeaderContent"));
}
if ((exportparts & Constants.EXPORT_AUTHOR) != 0) {
csvline.add(resourceMap.getString("csvHeaderAuthor"));
}
if ((exportparts & Constants.EXPORT_KEYWORDS) != 0) {
csvline.add(resourceMap.getString("csvHeaderKeywords"));
}
if ((exportparts & Constants.EXPORT_MANLINKS) != 0) {
csvline.add(resourceMap.getString("csvHeaderManLinks"));
}
if ((exportparts & Constants.EXPORT_LUHMANN) != 0) {
csvline.add(resourceMap.getString("csvHeaderLuhmann"));
}
if ((exportparts & Constants.EXPORT_LINKS) != 0) {
csvline.add(resourceMap.getString("csvHeaderLinks"));
}
if ((exportparts & Constants.EXPORT_REMARKS) != 0) {
csvline.add(resourceMap.getString("csvHeaderRemarks"));
}
if ((exportparts & Constants.EXPORT_TIMESTAMP) != 0) {
csvline.add(resourceMap.getString("csvHeaderTimestamp"));
}
// copy linked list to string array
String[] finalline = csvline.toArray(new String[csvline.size()]);
// write array to csv-file
writer.writeNext(finalline);
// go through all elements of the data file
for (counter = 0; counter < exportentries.size(); counter++) {
try {
// retrieve zettelnumber
int zettelnummer = Integer.parseInt(exportentries.get(counter).toString());
// get the zettel-element
Element zettel = dataObj.retrieveZettel(zettelnummer);
// clear data-line
csvline.clear();
// in the exportparts-variabe. if so, export title
if ((exportparts & Constants.EXPORT_TITLE) != 0) {
csvline.add(zettel.getChild("title").getText());
}
// in the exportparts-variabe. if so, export content
if ((exportparts & Constants.EXPORT_CONTENT) != 0) {
csvline.add((removeformattags) ? dataObj.getCleanZettelContent(zettelnummer) : dataObj.getZettelContent(zettelnummer));
}
// in the exportparts-variabe. if so, export author
if ((exportparts & Constants.EXPORT_AUTHOR) != 0) {
// get author strings
String[] aus = dataObj.getAuthors(zettelnummer);
// if we have any author, go on
if (aus != null && aus.length > 0) {
// create string builder for author values
StringBuilder sbauthor = new StringBuilder("");
// iterate array of authors
for (String a : aus) {
// append author to stringbuilder
sbauthor.append(a);
// and add a new line
sbauthor.append(System.lineSeparator());
}
// if we have any values in the stringbuilder, truncate last line separator
if (sbauthor.length() > 1) {
sbauthor.setLength((sbauthor.length() - System.lineSeparator().length()));
}
// finally, add author values to the csv-line
csvline.add(sbauthor.toString());
} else {
// else set empty string
csvline.add("");
}
}
// in the exportparts-variabe. if so, export keywords
if ((exportparts & Constants.EXPORT_KEYWORDS) != 0) {
// get keywords-trings
String[] kws = dataObj.getKeywords(zettelnummer, true);
// if we have any author, go on
if (kws != null && kws.length > 0) {
// create string builder for author values
StringBuilder sbkeywords = new StringBuilder("");
// iterate array of authors
for (String k : kws) {
// append author to stringbuilder
sbkeywords.append(k);
// and add a new line
sbkeywords.append(System.lineSeparator());
}
// if we have any values in the stringbuilder, truncate last line separator
if (sbkeywords.length() > 1) {
sbkeywords.setLength((sbkeywords.length() - System.lineSeparator().length()));
}
// finally, add author values to the csv-line
csvline.add(sbkeywords.toString());
} else {
// else set empty string
csvline.add("");
}
}
// in the exportparts-variabe. if so, export manual links
if ((exportparts & Constants.EXPORT_MANLINKS) != 0) {
csvline.add(zettel.getChild(Daten.ELEMENT_MANLINKS).getText());
}
// in the exportparts-variabe. if so, export manual links
if ((exportparts & Constants.EXPORT_LUHMANN) != 0) {
csvline.add(zettel.getChild("luhmann").getText());
}
// in the exportparts-variabe. if so, export links
if ((exportparts & Constants.EXPORT_LINKS) != 0) {
// add the content from the data-file. we cannot use settext here,
// because we might have several sub-children
// get the list of all sub-children
List<Element> l = dataObj.getAttachments(zettelnummer);
// create an iterator
Iterator<Element> i = l.iterator();
// create string builder for csv-value
StringBuilder links = new StringBuilder("");
// go through loop and add all children
while (i.hasNext()) {
// get the child-element from the list
Element el_dummy = i.next();
// and set the text to our created child element
links.append(el_dummy.getText());
links.append(System.lineSeparator());
}
// if we have any values in the stringbuilder, truncate last line separator
if (links.length() > 1) {
links.setLength((links.length() - System.lineSeparator().length()));
}
// finally, add author values to the csv-line
csvline.add(links.toString());
}
// in the exportparts-variabe. if so, export remarks
if ((exportparts & Constants.EXPORT_REMARKS) != 0) {
csvline.add(zettel.getChild(Daten.ELEMENT_REMARKS).getText());
}
// in the exportparts-variabe. if so, export timestamp
if ((exportparts & Constants.EXPORT_TIMESTAMP) != 0) {
// add timestamp to csv
csvline.add(dataObj.getTimestampCreated(zettel) + ";" + dataObj.getTimestampEdited(zettel));
}
// copy linked list to string array
finalline = csvline.toArray(new String[csvline.size()]);
// write array to csv-file
writer.writeNext(finalline);
// update progress bar
setProgress(counter, 0, contentsize);
} catch (NumberFormatException e) {
writer.writeNext(new String[] { exportentries.get(counter).toString().substring(2) });
setProgress(counter, 0, contentsize);
}
}
// close outputstream
} catch (IOException e) {
Constants.zknlogger.log(Level.SEVERE, e.getLocalizedMessage());
exportOk = false;
} finally {
try {
if (writer != null) {
writer.close();
}
} catch (IOException e) {
Constants.zknlogger.log(Level.SEVERE, e.getLocalizedMessage());
exportOk = false;
}
}
// if the user requested a bibtex-export, do this now
if (exportbibtex) {
// show status text
msgLabel.setText(resourceMap.getString("msgBibtextExport"));
// write bibtex file
ExportTools.writeBibTexFile(dataObj, bibtexObj, exportentries, filepath, resourceMap);
}
// return your result
return null;
}Example 19
| Project: alf.io-master File: EventApiController.java View source code |
@RequestMapping("/events/{eventName}/export.csv")
public void downloadAllTicketsCSV(@PathVariable("eventName") String eventName, HttpServletRequest request, HttpServletResponse response, Principal principal) throws IOException {
List<String> fields = Arrays.asList(Optional.ofNullable(request.getParameterValues("fields")).orElse(new String[] {}));
Event event = loadEvent(eventName, principal);
Map<Integer, TicketCategory> categoriesMap = eventManager.loadTicketCategories(event).stream().collect(Collectors.toMap(TicketCategory::getId, Function.identity()));
ZoneId eventZoneId = event.getZoneId();
Predicate<String> contains = FIXED_FIELDS::contains;
response.setContentType("text/csv;charset=UTF-8");
response.setHeader("Content-Disposition", "attachment; filename=" + eventName + "-export.csv");
try (ServletOutputStream out = response.getOutputStream();
CSVWriter writer = new CSVWriter(new OutputStreamWriter(out))) {
for (int marker : BOM_MARKERS) {
//UGLY-MODE_ON: specify that the file is written in UTF-8 with BOM, thanks to alexr http://stackoverflow.com/a/4192897
out.write(marker);
}
writer.writeNext(fields.toArray(new String[fields.size()]));
eventManager.findAllConfirmedTicketsForCSV(eventName, principal.getName()).stream().map( t -> {
List<String> line = new ArrayList<>();
if (fields.contains("ID")) {
line.add(t.getUuid());
}
if (fields.contains("Creation")) {
line.add(t.getCreation().withZoneSameInstant(eventZoneId).toString());
}
if (fields.contains("Category")) {
line.add(categoriesMap.get(t.getCategoryId()).getName());
}
if (fields.contains("Event")) {
line.add(eventName);
}
if (fields.contains("Status")) {
line.add(t.getStatus().toString());
}
if (fields.contains("OriginalPrice")) {
line.add(MonetaryUtil.centsToUnit(t.getSrcPriceCts()).toString());
}
if (fields.contains("PaidPrice")) {
line.add(MonetaryUtil.centsToUnit(t.getFinalPriceCts()).toString());
}
if (fields.contains("Discount")) {
line.add(MonetaryUtil.centsToUnit(t.getDiscountCts()).toString());
}
if (fields.contains("VAT")) {
line.add(MonetaryUtil.centsToUnit(t.getVatCts()).toString());
}
if (fields.contains("ReservationID")) {
line.add(t.getTicketsReservationId());
}
if (fields.contains("Full Name")) {
line.add(t.getFullName());
}
if (fields.contains("First Name")) {
line.add(t.getFirstName());
}
if (fields.contains("Last Name")) {
line.add(t.getLastName());
}
if (fields.contains("E-Mail")) {
line.add(t.getEmail());
}
if (fields.contains("Locked")) {
line.add(String.valueOf(t.getLockedAssignment()));
}
if (fields.contains("Language")) {
line.add(String.valueOf(t.getUserLanguage()));
}
if (fields.contains("Confirmation")) {
line.add(t.getTicketReservation().getConfirmationTimestamp().withZoneSameInstant(eventZoneId).toString());
}
if (fields.contains("Billing Address")) {
line.add(t.getTicketReservation().getBillingAddress());
}
Map<String, String> additionalValues = ticketFieldRepository.findAllValuesForTicketId(t.getId());
fields.stream().filter(contains.negate()).forEachOrdered( field -> {
line.add(additionalValues.getOrDefault(field, "").replaceAll("\"", ""));
});
return line.toArray(new String[line.size()]);
}).forEachOrdered(writer::writeNext);
writer.flush();
out.flush();
}
}