Skip to content
Snippets Groups Projects
Commit 13c3fd1e authored by Christopher Bohn's avatar Christopher Bohn :thinking:
Browse files

Added unit tests.

parent ddc67053
No related branches found
No related tags found
No related merge requests found
......@@ -11,8 +11,6 @@ import java.util.*;
public class CSVReaderWriter {
// this does not (yet) handle data elements that include commas and/or are surrounded by quotation marks
// use of ClassLoader.getResourceAsStream inspired by https://www.mkyong.com/java/java-read-a-file-from-resources-folder/
// use of Map inspired by Python's csv module
private static final String DELIMTER = ",";
private static final char BYTE_ORDER_MARK = '\ufeff';
......@@ -34,26 +32,7 @@ public class CSVReaderWriter {
if (resource != null) {
File file = new File(resource.getPath());
try(OutputStream outputStream = new FileOutputStream(file)) {
PrintStream writer = new PrintStream(outputStream);
Set<String> fieldNames = null;
int number_of_fields = 0;
for (Map<String, String> row: data) {
int field_number = 0;
if (fieldNames == null) {
fieldNames = row.keySet();
number_of_fields = fieldNames.size();
for (String field: fieldNames) {
writer.print(field);
writer.print(++field_number<number_of_fields ? "," : "\n");
}
}
field_number = 0;
for (String field: fieldNames) {
String value = row.get(field);
writer.print(value != null ? value : "");
writer.print(++field_number<number_of_fields ? "," : "\n");
}
}
placeCSVonStream(data, outputStream);
} catch (FileNotFoundException fileNotFoundException) {
System.err.println("Could not open " + filename + "; probably due to a bad pathname. " + fileNotFoundException);
wroteFile = false;
......@@ -68,7 +47,7 @@ public class CSVReaderWriter {
return wroteFile;
}
private static Set<Map<String, String>> parseCSV(InputStream inputStream) throws IOException {
static Set<Map<String, String>> parseCSV(InputStream inputStream) throws IOException {
Set<Map<String, String>> csvSet = new HashSet<>();
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
String line;
......@@ -94,22 +73,34 @@ public class CSVReaderWriter {
return csvSet;
}
public static void main(String[] args) {
Set<Map<String, String>> demo = readCSV("demo.csv");
boolean success = writeCSV("out.csv", demo);
System.out.println(success ? "Wrote file!" : "Didn't write file");
/*
static void placeCSVonStream(Set<Map<String, String>> data, OutputStream outputStream) {
PrintStream writer = new PrintStream(outputStream);
Set<String> fieldNames = null;
for (Map<String, String> row: demo) {
int number_of_fields = 0;
for (Map<String, String> row: data) {
int field_number = 0;
if (fieldNames == null) {
fieldNames = row.keySet();
number_of_fields = fieldNames.size();
for (String field: fieldNames) {
writer.print(field);
writer.print(++field_number<number_of_fields ? "," : "\n");
}
}
field_number = 0;
for (String field: fieldNames) {
String value = row.get(field);
System.out.print(field + ":" + value + "\t");
writer.print(value != null ? value : "");
writer.print(++field_number<number_of_fields ? "," : "\n");
}
System.out.println();
}
*/
}
/*
public static void main(String[] args) {
Set<Map<String, String>> demo = readCSV("demo.csv");
boolean success = writeCSV("out.csv", demo);
System.out.println(success ? "Wrote file!" : "Didn't write file");
}
*/
}
package edu.unl.cse.csv_io;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.*;
import java.util.*;
import static org.junit.Assert.*;
import static org.hamcrest.CoreMatchers.*;
public class CSVReaderWriterTest {
private static InputStream inputStream;
private static OutputStream outputStream;
private static void placeCSVStringOnInputStream(String[][] data) {
String CSVString = createCSVString(data);
inputStream = new ByteArrayInputStream(CSVString.getBytes());
}
private static String createCSVString(String[][] data) {
StringBuilder stringBuilder = new StringBuilder();
for (String[] row : data) {
for (String element : row) {
stringBuilder.append(element).append(",");
}
stringBuilder.deleteCharAt(stringBuilder.lastIndexOf(","));
stringBuilder.append("\n");
}
return stringBuilder.toString();
}
@Before
public void setUp() {
outputStream = new ByteArrayOutputStream();
}
@After
public void tearDown() {
}
@Test
public void testParsing2x2CSV() {
// Input
String[] headers = {"header1", "header2"};
String[] row = {"datum1", "datum2"};
String[][] data = {headers, row};
// Output
placeCSVStringOnInputStream(data);
Set<Map<String, String>> result = null;
try {
result = CSVReaderWriter.parseCSV(inputStream);
} catch (IOException ignored) {
fail();
}
// Oracle -- Header
Set<String> expectedKeys = new HashSet<>(Arrays.asList(headers));
// Oracle -- Rows
Map<String, String> expectedRow = new HashMap<>();
for (int i=0; i<headers.length; i++) {
expectedRow.put(headers[i], row[i]);
}
// Compare
Map<String,String> aRow = (Map<String,String>)result.toArray()[0];
assertEquals(expectedKeys, aRow.keySet());
assertTrue(result.contains(expectedRow));
}
@Test
public void testParsing2x3CSV() {
// Input
String[] headers = {"header1", "header2"};
String[][] rows = {{"datum1", "datum2"}, {"datum3", "datum4"}};
String[][] data = {headers, rows[0], rows[1]};
// Output
placeCSVStringOnInputStream(data);
Set<Map<String, String>> result = null;
try {
result = CSVReaderWriter.parseCSV(inputStream);
} catch (IOException ignored) {
fail();
}
// Oracle -- Header
Set<String> expectedKeys = new HashSet<>(Arrays.asList(headers));
// Oracle -- Rows
Map<String, String>[] expectedRows = new Map[2];
for (int i = 0; i < expectedRows.length; i++) {
expectedRows[i] = new HashMap<String, String>();
for (int j = 0; j < headers.length; j++) {
expectedRows[i].put(headers[j], rows[i][j]);
}
}
// Compare
Map<String, String> aRow = (Map<String, String>) result.toArray()[0];
assertEquals(expectedKeys, aRow.keySet());
for (Map<String, String> expectedRow: expectedRows) {
assertTrue(result.contains(expectedRow));
}
}
@Test
public void testParsingCSVwithOnlyHeaderRow() {
// Input
String[] headers = {"header1", "header2"};
String[][] data = {headers};
// Output
placeCSVStringOnInputStream(data);
Set<Map<String, String>> result = null;
try {
result = CSVReaderWriter.parseCSV(inputStream);
} catch (IOException ignored) {
fail();
}
// Oracle
int expectedResultSize = 0;
// Compare
assertNotNull(result);
assertEquals(expectedResultSize, result.size());
}
@Test
public void testParsingEmptyCSV() {
// Input
String[][] data = {};
// Output
placeCSVStringOnInputStream(data);
Set<Map<String, String>> result = null;
try {
result = CSVReaderWriter.parseCSV(inputStream);
} catch (IOException ignored) {
fail();
}
// Oracle
int expectedResultSize = 0;
// Compare
assertNotNull(result);
assertEquals(expectedResultSize, result.size());
}
// We probably should test malformed CSVs, but this is good enough for students' starter code
@Test
public void testWriting2x3CSV() {
// Input
String[] headers = {"header1", "header2"};
String[][] rows = {{"datum1", "datum2"}, {"datum3", "datum4"}};
Map<String, String>[] inputRows = new Map[2];
for (int i=0; i<inputRows.length; i++) {
inputRows[i] = new HashMap<>(2);
inputRows[i].put(headers[0], rows[i][0]);
inputRows[i].put(headers[1], rows[i][1]);
}
Set<Map<String, String>> input = new HashSet<>(Arrays.asList(inputRows));
// Oracle
String[][] data = {headers, rows[0], rows[1]};
String expectedCSVStringOption1 = createCSVString(data);
Collections.reverse(Arrays.asList(headers));
Collections.reverse(Arrays.asList(rows[0]));
Collections.reverse(Arrays.asList(rows[1]));
String expectedCSVStringOption2 = createCSVString(data);
// Output
CSVReaderWriter.placeCSVonStream(input, outputStream);
String output = outputStream.toString();
// Compare
assertTrue(expectedCSVStringOption1.equals(output) || expectedCSVStringOption2.equals(output));
assertThat(output, either(is(expectedCSVStringOption1)).or(is(expectedCSVStringOption2)));
}
}
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment