Commit 740fea64 authored by Daniel Lehmberg's avatar Daniel Lehmberg

WIP: headerProcSep and error in PostViz when multiple required column names detected

parent ec80f092
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
package org.vadere.simulator.projects.dataprocessing.outputfile; package org.vadere.simulator.projects.dataprocessing.outputfile;
import org.vadere.simulator.projects.dataprocessing.DataProcessingJsonManager;
import org.vadere.simulator.projects.dataprocessing.datakey.DataKey; import org.vadere.simulator.projects.dataprocessing.datakey.DataKey;
import org.vadere.simulator.projects.dataprocessing.processor.DataProcessor; import org.vadere.simulator.projects.dataprocessing.processor.DataProcessor;
import org.vadere.simulator.projects.dataprocessing.writer.VadereWriter; import org.vadere.simulator.projects.dataprocessing.writer.VadereWriter;
...@@ -54,7 +55,11 @@ public abstract class OutputFile<K extends DataKey<K>> { ...@@ -54,7 +55,11 @@ public abstract class OutputFile<K extends DataKey<K>> {
private boolean isWriteMetaData; private boolean isWriteMetaData;
private String separator; private String separator;
public final static String nameConflictAdd = "-PID?"; // the # is replaced with the processor id
// Check also the PostVis where there is a dependency
public final static String headerProcSep = "-";
public final static String headerNameAdd = headerProcSep + "PID?"; // the # is replaced with the processor id
private VadereWriterFactory writerFactory; private VadereWriterFactory writerFactory;
private VadereWriter writer; private VadereWriter writer;
...@@ -129,7 +134,7 @@ public abstract class OutputFile<K extends DataKey<K>> { ...@@ -129,7 +134,7 @@ public abstract class OutputFile<K extends DataKey<K>> {
// characters // characters
String md = "#IDXCOL=" + dataIndices.length + String md = "#IDXCOL=" + dataIndices.length +
",DATACOL="+(getEntireHeader().size()-dataIndices.length)+","+ ",DATACOL="+(getEntireHeader().size()-dataIndices.length)+","+
"SEP=\'"+ this.separator+"\'"; "SEP=\'"+ DataProcessingJsonManager.DEFAULT_SEPARATOR +"\'";
//Make a list with one element to reuse 'writeLine' function //Make a list with one element to reuse 'writeLine' function
List<String> line = new LinkedList<>(); List<String> line = new LinkedList<>();
...@@ -166,16 +171,37 @@ public abstract class OutputFile<K extends DataKey<K>> { ...@@ -166,16 +171,37 @@ public abstract class OutputFile<K extends DataKey<K>> {
} }
public String getHeaderLine() { public String getHeaderLine() {
return String.join(this.separator, this.getEntireHeader()); return String.join(DataProcessingJsonManager.DEFAULT_SEPARATOR, this.getEntireHeader());
} }
public String getIndicesLine() { public String getIndicesLine() {
return String.join(this.separator, this.getIndices()); return String.join(DataProcessingJsonManager.DEFAULT_SEPARATOR, this.getIndices());
}
private List<String> uniqueHeaderNames(){
// This function adds to every header "headerNameAdd", for ALL headers EVERY time
// (cmp. headersWithNameMangling)
LinkedList<String> headers = new LinkedList<>();
for (DataProcessor l: dataProcessors) {
List<String> allProcHeaders = Arrays.asList(l.getHeaders());
for (String singleHeader: allProcHeaders) {
// add the processor id to make header unique
headers.addLast(singleHeader + headerNameAdd.replace("?", "" + l.getId()));
}
}
return headers;
} }
private List<String> headersWithNameMangling(){ private List<String> headersWithNameMangling(){
// This function adds to every header "headerNameAdd", ONLY if there is a name conflict detected
// (cmp. uniqueHeaderNames)
LinkedList<String> headers = new LinkedList<>(); LinkedList<String> headers = new LinkedList<>();
boolean isNameMangle = false; // assume there is no nameing conflict boolean isNameMangleDetected = false; // assume there is no nameing conflict
mainloop: mainloop:
for (DataProcessor l: dataProcessors) { for (DataProcessor l: dataProcessors) {
...@@ -183,7 +209,7 @@ public abstract class OutputFile<K extends DataKey<K>> { ...@@ -183,7 +209,7 @@ public abstract class OutputFile<K extends DataKey<K>> {
for(String el: list) { for(String el: list) {
if(headers.contains(el)){ if(headers.contains(el)){
isNameMangle = true; // conflict found: stop collecting headers isNameMangleDetected = true; // conflict found: stop collecting and name make every header unique
break mainloop; break mainloop;
}else{ }else{
headers.addLast(el); headers.addLast(el);
...@@ -191,24 +217,16 @@ public abstract class OutputFile<K extends DataKey<K>> { ...@@ -191,24 +217,16 @@ public abstract class OutputFile<K extends DataKey<K>> {
} }
} }
if(isNameMangle){ if(isNameMangleDetected){
headers.clear(); //start from new... headers = (LinkedList<String>) uniqueHeaderNames();
for (DataProcessor l: dataProcessors) {
List<String> list = Arrays.asList(l.getHeaders());
for (String h: list) {
// ... but now add the processor id
headers.addLast(h +
nameConflictAdd.replace("?", "" + l.getId()+'0'));
}
}
} }
return headers; return headers;
} }
private List<String> composeHeaderLine(){ private List<String> composeHeaderLine(){
final List<String> allHeaders = new LinkedList<>(Arrays.asList(dataIndices)); final List<String> allHeaders = new LinkedList<>(Arrays.asList(dataIndices));
List<String> procHeaders = this.headersWithNameMangling(); List<String> procHeaders = this.uniqueHeaderNames();
allHeaders.addAll(procHeaders); allHeaders.addAll(procHeaders);
......
...@@ -3,6 +3,7 @@ package org.vadere.simulator.projects.io; ...@@ -3,6 +3,7 @@ package org.vadere.simulator.projects.io;
import org.apache.commons.math3.util.Pair; import org.apache.commons.math3.util.Pair;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.vadere.simulator.projects.Scenario; import org.vadere.simulator.projects.Scenario;
import org.vadere.simulator.projects.dataprocessing.outputfile.OutputFile;
import org.vadere.simulator.projects.dataprocessing.processor.PedestrianPositionProcessor; import org.vadere.simulator.projects.dataprocessing.processor.PedestrianPositionProcessor;
import org.vadere.state.attributes.scenario.AttributesAgent; import org.vadere.state.attributes.scenario.AttributesAgent;
import org.vadere.state.scenario.Agent; import org.vadere.state.scenario.Agent;
...@@ -65,6 +66,8 @@ public class TrajectoryReader { ...@@ -65,6 +66,8 @@ public class TrajectoryReader {
private int groupSizeIndex; private int groupSizeIndex;
private int stridesIndex; private int stridesIndex;
private static final int notSetColumnIndexIdentifier = -1;
public TrajectoryReader(final Path trajectoryFilePath, final Scenario scenario) { public TrajectoryReader(final Path trajectoryFilePath, final Scenario scenario) {
this(trajectoryFilePath, scenario.getAttributesPedestrian()); this(trajectoryFilePath, scenario.getAttributesPedestrian());
} }
...@@ -98,26 +101,29 @@ public class TrajectoryReader { ...@@ -98,26 +101,29 @@ public class TrajectoryReader {
stridesKeys.add("strides"); stridesKeys.add("strides");
stridesKeys.add("footSteps"); stridesKeys.add("footSteps");
pedIdIndex = -1; pedIdIndex = notSetColumnIndexIdentifier;
stepIndex = -1; stepIndex = notSetColumnIndexIdentifier;
xIndex = -1; xIndex = notSetColumnIndexIdentifier;
yIndex = -1; yIndex = notSetColumnIndexIdentifier;
targetIdIndex = -1; targetIdIndex = notSetColumnIndexIdentifier;
groupIdIndex = -1; groupIdIndex = notSetColumnIndexIdentifier;
groupSizeIndex = -1; groupSizeIndex = notSetColumnIndexIdentifier;
stridesIndex = -1; stridesIndex = notSetColumnIndexIdentifier;
} }
public Map<Step, List<Agent>> readFile() throws IOException { public Map<Step, List<Agent>> readFile() throws IOException {
if (checkFile()){ checkFile();
return readStandardTrajectoryFile(); return readStandardTrajectoryFile();
} else { }
throw new IOException("could not read trajectory file, some colums are missing.");
private void errorWhenNotUniqueColumn(int currentValue, String columnName) throws IOException{
if(currentValue != notSetColumnIndexIdentifier){
throw new IOException("The header " + columnName + " is not unique in the file. This is likely to have " +
"unwanted side effects");
} }
} }
public boolean checkFile () throws IOException { public void checkFile () throws IOException {
// 1. Get the correct column // 1. Get the correct column
String header; String header;
//read only first line. //read only first line.
...@@ -127,50 +133,58 @@ public class TrajectoryReader { ...@@ -127,50 +133,58 @@ public class TrajectoryReader {
String[] columns = header.split(SPLITTER); String[] columns = header.split(SPLITTER);
for (int index = 0; index < columns.length; index++) { for (int index = 0; index < columns.length; index++) {
if (pedestrianIdKeys.contains(columns[index])) {
// header name without processor ID
String headerName = columns[index].split(OutputFile.headerProcSep)[0];
if (pedestrianIdKeys.contains(headerName)) {
errorWhenNotUniqueColumn(pedIdIndex, headerName);
pedIdIndex = index; pedIdIndex = index;
} else if (stepKeys.contains(columns[index])) { } else if (stepKeys.contains(headerName)) {
errorWhenNotUniqueColumn(stepIndex, headerName);
stepIndex = index; stepIndex = index;
} else if (xKeys.contains(columns[index])) { } else if (xKeys.contains(headerName)) {
errorWhenNotUniqueColumn(xIndex, headerName);
xIndex = index; xIndex = index;
} else if (yKeys.contains(columns[index])) { } else if (yKeys.contains(headerName)) {
errorWhenNotUniqueColumn(yIndex, headerName);
yIndex = index; yIndex = index;
} else if (targetIdKeys.contains(columns[index])) { } else if (targetIdKeys.contains(headerName)) {
errorWhenNotUniqueColumn(targetIdIndex, headerName);
targetIdIndex = index; targetIdIndex = index;
} else if (groupIdKeys.contains(columns[index])){ } else if (groupIdKeys.contains(headerName)){
errorWhenNotUniqueColumn(groupIdIndex, headerName);
groupIdIndex = index; groupIdIndex = index;
} }
else if (groupSizeKeys.contains(columns[index])){ else if (groupSizeKeys.contains(headerName)){
errorWhenNotUniqueColumn(groupSizeIndex, headerName);
groupSizeIndex = index; groupSizeIndex = index;
} }
else if(stridesKeys.contains(columns[index])) { else if(stridesKeys.contains(headerName)) {
errorWhenNotUniqueColumn(stridesIndex, headerName);
stridesIndex = index; stridesIndex = index;
} }
} }
try {
if (pedIdIndex != -1 && xIndex != -1 && yIndex != -1 && stepIndex != -1) {
// load default values with no groups
return true;
} else {
return false;
}
} catch (Exception e) {
logger.warn("could not read trajectory file. The file format might not be compatible or it is missing.");
throw e;
}
if (! (pedIdIndex != notSetColumnIndexIdentifier && xIndex != notSetColumnIndexIdentifier &&
yIndex != notSetColumnIndexIdentifier && stepIndex != notSetColumnIndexIdentifier)) {
// load default values with no groups
throw new IOException(String.format("All columns with " + notSetColumnIndexIdentifier + " value could " +
"not be found in the trajectory file pedIdIndex=%d, x-values=%d, y-values=%d, step " +
"values=%d", pedIdIndex, xIndex, yIndex, stepIndex));
}
} }
private Map<Step, List<Agent>> readStandardTrajectoryFile() throws IOException { private Map<Step, List<Agent>> readStandardTrajectoryFile() throws IOException {
try (BufferedReader in = IOUtils.defaultBufferedReader(this.trajectoryFilePath)) { try (BufferedReader in = IOUtils.defaultBufferedReader(this.trajectoryFilePath)) {
return in.lines() // a stream of lines return in.lines() // a stream of lines
.skip(1) // skip the first line i.e. the header .skip(1) // skip the first line i.e. the header
.map(line -> split(line)) // split the line into string tokens .map(line -> split(line)) // split the line into string tokens
.map(rowTokens -> parseRowTokens(rowTokens)) // transform those tokens into a pair of java objects (step, agent) .map(rowTokens -> parseRowTokens(rowTokens)) // transform those tokens into a pair of java objects (step, agent)
.collect(Collectors.groupingBy(Pair::getKey, // group all agent objects by the step. .collect(Collectors.groupingBy(Pair::getKey, // group all agent objects by the step.
Collectors.mapping(Pair::getValue, Collectors.toList()))); Collectors.mapping(Pair::getValue, Collectors.toList())));
} catch (Exception e){ } catch (Exception e){
logger.warn("could not read trajectory file. The file format might not be compatible or it is missing."); logger.warn("Could not read trajectory file. The file format might not be compatible or it is missing.");
throw e; throw e;
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment