Skip to content

Commit 9849909

Browse files
wangcj05alfoa
authored andcommitted
Dump HistorySet MetaData into the correct output csv files (#987)
* enable historyset to handle both scalar and vector meta data, and add check for NAN from code collections * fix * fix HistorySet * fix reading vector meta data * update tests * add unit test for meta data load, print, reload * fix * modify test to test only the mechanics
1 parent 69f066f commit 9849909

File tree

12 files changed

+498
-100
lines changed

12 files changed

+498
-100
lines changed

framework/DataObjects/DataSet.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -112,20 +112,21 @@ def addExpectedMeta(self,keys, params={}):
112112
@ In, keys, set(str), keys to register
113113
@ In, params, dict, optional, {key:[indexes]}, keys of the dictionary are the variable names,
114114
values of the dictionary are lists of the corresponding indexes/coordinates of given variable
115-
@ Out, None
115+
@ Out, keys, list(str), extra keys that has been registered
116116
"""
117117
# TODO add option to skip parts of meta if user wants to
118118
# remove already existing keys
119119
keys = list(key for key in keys if key not in self.getVars()+self.indexes)
120120
# if no new meta, move along
121121
if len(keys) == 0:
122-
return
122+
return keys
123123
# CANNOT add expected meta after samples are started
124124
assert(self._data is None)
125125
assert(self._collector is None or len(self._collector) == 0)
126126
self._metavars.extend(keys)
127127
self._orderedVars.extend(keys)
128128
self.setPivotParams(params)
129+
return keys
129130

130131
def addMeta(self, tag, xmlDict = None, node = None):
131132
"""
@@ -972,7 +973,7 @@ def _convertArrayListToDataset(self,array,action='return'):
972973
# determine dimensions for each variable
973974
dimsMeta = {}
974975
for name, var in new.variables.items():
975-
if name not in self._inputs + self._outputs:
976+
if name not in self._inputs + self._outputs + self._metavars:
976977
continue
977978
dims = list(var.dims)
978979
# don't list if only entry is sampleTag
@@ -1554,8 +1555,12 @@ def _loadCsvMeta(self,fileName):
15541555
dims = meta.get('pivotParams',{})
15551556
if len(dims)>0:
15561557
self.setPivotParams(dims)
1558+
# vector metavars is also stored in 'DataSet/dims' node
1559+
metavars = meta.get('metavars',[])
1560+
# get dict of vector metavars
1561+
params = {key:val for key, val in dims.items() if key in metavars}
15571562
# add metadata, so we get probability weights and etc
1558-
self.addExpectedMeta(meta.get('metavars',[]))
1563+
self.addExpectedMeta(metavars,params)
15591564
# check all variables desired are available
15601565
provided = set(meta.get('inputs',[])+meta.get('outputs',[])+meta.get('metavars',[]))
15611566
# otherwise, if we have no meta XML to load from, infer what we can from the CSV, which is only the available variables.

framework/DataObjects/HistorySet.py

Lines changed: 22 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -81,6 +81,8 @@ def __init__(self):
8181
self.printTag = self.name
8282
self._tempPivotParam = None
8383
self._neededForReload = [] # HistorySet doesn't need anything special to load, since it's written in cluster-by-sample CSV format
84+
self._inputMetaVars = [] # meta vars belong to the input of HistorySet, i.e. scalar
85+
self._outputMetaVars = [] # meta vara belong to the output of HistorySet, i.e. vector
8486

8587
def _readMoreXML(self,xmlNode):
8688
"""
@@ -126,7 +128,7 @@ def _fromCSV(self,fileName,**kwargs):
126128
main = self._readPandasCSV(fileName+'.csv')
127129
nSamples = len(main.index)
128130
## collect input space data
129-
for inp in self._inputs + self._metavars:
131+
for inp in self._inputs + self._inputMetaVars:
130132
data[inp] = main[inp].values
131133
## get the sampleTag values if they're present, in case it's not just range
132134
if self.sampleTag in main:
@@ -136,7 +138,7 @@ def _fromCSV(self,fileName,**kwargs):
136138
# load subfiles for output spaces
137139
subFiles = main['filename'].values
138140
# pre-build realization spots
139-
for out in self._outputs + self.indexes:
141+
for out in self._outputs + self.indexes + self._outputMetaVars:
140142
data[out] = np.zeros(nSamples,dtype=object)
141143
# read in secondary CSVs
142144
for i,sub in enumerate(subFiles):
@@ -150,7 +152,7 @@ def _fromCSV(self,fileName,**kwargs):
150152
if len(set(subDat.keys()).intersection(self.indexes)) != len(self.indexes):
151153
self.raiseAnError(IOError,'Importing HistorySet from .csv: the pivot parameters "'+', '.join(self.indexes)+'" have not been found in the .csv file. Check that the '
152154
'correct <pivotParameter> has been specified in the dataObject or make sure the <pivotParameter> is included in the .csv files')
153-
for out in self._outputs+self.indexes:
155+
for out in self._outputs + self.indexes + self._outputMetaVars:
154156
data[out][i] = subDat[out].values
155157
# construct final data object
156158
self.load(data,style='dict',dims=self.getDimensions())
@@ -190,7 +192,7 @@ def _selectiveRealization(self,rlz):
190192
if not utils.isSingleValued(val):
191193
# treat inputs, outputs differently TODO this should extend to per-variable someday
192194
## inputs
193-
if var in self._inputs:
195+
if var in self._inputs + self._inputMetaVars:
194196
method,indic = self._selectInput
195197
# pivot variables are included here in "else"; remove them after they're used in operators
196198
else:
@@ -256,7 +258,7 @@ def _toCSV(self,fileName,start=0,**kwargs):
256258
# specific implementation
257259
## write input space CSV with pointers to history CSVs
258260
### get list of input variables to keep
259-
ordered = list(i for i in itertools.chain(self._inputs,self._metavars) if i in keep)
261+
ordered = list(i for i in itertools.chain(self._inputs,self._inputMetaVars) if i in keep)
260262
### select input part of dataset
261263
inpData = data[ordered]
262264
### add column for realization information, pointing to the appropriate CSV
@@ -271,7 +273,7 @@ def _toCSV(self,fileName,start=0,**kwargs):
271273
### write CSV
272274
self._usePandasWriteCSV(fileName,inpData,ordered,keepSampleTag = self.sampleTag in keep,mode=mode)
273275
## obtain slices to write subset CSVs
274-
ordered = list(o for o in self.getVars('output') if o in keep)
276+
ordered = list(o for o in itertools.chain(self._outputs,self._outputMetaVars) if o in keep)
275277

276278
if len(ordered):
277279
# hierarchical flag controls the printing/plotting of the dataobject in case it is an hierarchical one.
@@ -300,3 +302,17 @@ def _toCSV(self,fileName,start=0,**kwargs):
300302
self._usePandasWriteCSV(filename,rlz,ordered,keepIndex=True)
301303
else:
302304
self.raiseAWarning('No output space variables have been requested for DataObject "{}"! No history files will be printed!'.format(self.name))
305+
306+
def addExpectedMeta(self,keys, params={}):
307+
"""
308+
Registers meta to look for in realizations.
309+
@ In, keys, set(str), keys to register
310+
@ In, params, dict, optional, {key:[indexes]}, keys of the dictionary are the variable names,
311+
values of the dictionary are lists of the corresponding indexes/coordinates of given variable
312+
@ Out, None
313+
"""
314+
extraKeys = DataSet.addExpectedMeta(self, keys, params)
315+
self._inputMetaVars.extend(list(key for key in extraKeys if key not in params))
316+
if params:
317+
self._outputMetaVars.extend(list(key for key in extraKeys if key in params))
318+
return extraKeys

framework/Models/Code.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -615,6 +615,8 @@ def evaluateSample(self, myInput, samplerType, kwargs):
615615

616616
csvLoader = CsvLoader.CsvLoader(self.messageHandler)
617617
csvData = csvLoader.loadCsvFile(outFile)
618+
if np.isnan(csvData).all():
619+
self.raiseAnError(IOError, 'The data collected from', outputFile+'.csv', 'only contain "NAN"')
618620
headers = csvLoader.getAllFieldNames()
619621

620622
## Numpy by default iterates over rows, thus we transpose the data and

framework/PostProcessors/BasicStatistics.py

Lines changed: 17 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -254,20 +254,32 @@ def initialize(self, runInfo, inputs, initDict):
254254
inputObj = inputs[-1] if type(inputs) == list else inputs
255255
if inputObj.type == 'HistorySet':
256256
self.dynamic = True
257-
metaKeys = []
257+
inputMetaKeys = []
258+
outputMetaKeys = []
258259
for metric, infos in self.toDo.items():
259260
steMetric = metric + '_ste'
260261
if steMetric in self.steVals:
261262
for info in infos:
262263
prefix = info['prefix']
263264
for target in info['targets']:
264265
metaVar = prefix + '_ste_' + target if not self.outputDataset else metric + '_ste'
265-
metaKeys.append(metaVar)
266+
metaDim = inputObj.getDimensions(target)
267+
if len(metaDim[target]) == 0:
268+
inputMetaKeys.append(metaVar)
269+
else:
270+
outputMetaKeys.append(metaVar)
271+
metaParams = {}
266272
if not self.outputDataset:
267-
metaParams = {key:[self.pivotParameter] for key in metaKeys} if self.dynamic else {}
273+
if len(outputMetaKeys) > 0:
274+
metaParams = {key:[self.pivotParameter] for key in outputMetaKeys}
268275
else:
269-
metaParams = {key:[self.pivotParameter,self.steMetaIndex] for key in metaKeys} if self.dynamic else {key:[self.steMetaIndex]}
270-
276+
if len(outputMetaKeys) > 0:
277+
params = {key:[self.pivotParameter,self.steMetaIndex] for key in outputMetaKeys + inputMetaKeys}
278+
metaParams.update(params)
279+
elif len(inputMetaKeys) > 0:
280+
params = {key:[self.steMetaIndex] for key in inputMetaKeys}
281+
metaParams.update(params)
282+
metaKeys = inputMetaKeys + outputMetaKeys
271283
self.addMetaKeys(metaKeys,metaParams)
272284

273285
def _localReadMoreXML(self, xmlNode):
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
<DataObjectMetadata name="SET_Back_to_MASTER">
2+
<DataSet type="Static">
3+
<dims>
4+
<avg_EXP_MaxCtoF_a>Time_aligned</avg_EXP_MaxCtoF_a>
5+
<avg_EXP_PCT_a>Time_aligned</avg_EXP_PCT_a>
6+
<avg_EXP_P_a>Time_aligned</avg_EXP_P_a>
7+
<avg_REP_MaxCtoF_a>Time_aligned</avg_REP_MaxCtoF_a>
8+
<avg_REP_PCT_a>Time_aligned</avg_REP_PCT_a>
9+
<avg_REP_P_a>Time_aligned</avg_REP_P_a>
10+
<avg_ste_EXP_MaxCtoF_a>Time_aligned</avg_ste_EXP_MaxCtoF_a>
11+
<avg_ste_EXP_PCT_a>Time_aligned</avg_ste_EXP_PCT_a>
12+
<avg_ste_EXP_P_a>Time_aligned</avg_ste_EXP_P_a>
13+
<avg_ste_REP_MaxCtoF_a>Time_aligned</avg_ste_REP_MaxCtoF_a>
14+
<avg_ste_REP_PCT_a>Time_aligned</avg_ste_REP_PCT_a>
15+
<avg_ste_REP_P_a>Time_aligned</avg_ste_REP_P_a>
16+
<sen_EXP_MaxCtoF_a_HTcoeff>Time_aligned</sen_EXP_MaxCtoF_a_HTcoeff>
17+
<sen_EXP_MaxCtoF_a_LinPow>Time_aligned</sen_EXP_MaxCtoF_a_LinPow>
18+
<sen_EXP_MaxCtoF_a_RodPitch>Time_aligned</sen_EXP_MaxCtoF_a_RodPitch>
19+
<sen_EXP_MaxCtoF_a_TCool>Time_aligned</sen_EXP_MaxCtoF_a_TCool>
20+
<sen_EXP_MaxCtoF_a_VCool>Time_aligned</sen_EXP_MaxCtoF_a_VCool>
21+
<sen_EXP_PCT_a_HTcoeff>Time_aligned</sen_EXP_PCT_a_HTcoeff>
22+
<sen_EXP_PCT_a_LinPow>Time_aligned</sen_EXP_PCT_a_LinPow>
23+
<sen_EXP_PCT_a_RodPitch>Time_aligned</sen_EXP_PCT_a_RodPitch>
24+
<sen_EXP_PCT_a_TCool>Time_aligned</sen_EXP_PCT_a_TCool>
25+
<sen_EXP_PCT_a_VCool>Time_aligned</sen_EXP_PCT_a_VCool>
26+
<sen_EXP_P_a_HTcoeff>Time_aligned</sen_EXP_P_a_HTcoeff>
27+
<sen_EXP_P_a_LinPow>Time_aligned</sen_EXP_P_a_LinPow>
28+
<sen_EXP_P_a_RodPitch>Time_aligned</sen_EXP_P_a_RodPitch>
29+
<sen_EXP_P_a_TCool>Time_aligned</sen_EXP_P_a_TCool>
30+
<sen_EXP_P_a_VCool>Time_aligned</sen_EXP_P_a_VCool>
31+
<sen_REP_MaxCtoF_a_HTcoeff>Time_aligned</sen_REP_MaxCtoF_a_HTcoeff>
32+
<sen_REP_MaxCtoF_a_LinPow>Time_aligned</sen_REP_MaxCtoF_a_LinPow>
33+
<sen_REP_MaxCtoF_a_RodPitch>Time_aligned</sen_REP_MaxCtoF_a_RodPitch>
34+
<sen_REP_MaxCtoF_a_TCool>Time_aligned</sen_REP_MaxCtoF_a_TCool>
35+
<sen_REP_MaxCtoF_a_VCool>Time_aligned</sen_REP_MaxCtoF_a_VCool>
36+
<sen_REP_PCT_a_HTcoeff>Time_aligned</sen_REP_PCT_a_HTcoeff>
37+
<sen_REP_PCT_a_LinPow>Time_aligned</sen_REP_PCT_a_LinPow>
38+
<sen_REP_PCT_a_RodPitch>Time_aligned</sen_REP_PCT_a_RodPitch>
39+
<sen_REP_PCT_a_TCool>Time_aligned</sen_REP_PCT_a_TCool>
40+
<sen_REP_PCT_a_VCool>Time_aligned</sen_REP_PCT_a_VCool>
41+
<sen_REP_P_a_HTcoeff>Time_aligned</sen_REP_P_a_HTcoeff>
42+
<sen_REP_P_a_LinPow>Time_aligned</sen_REP_P_a_LinPow>
43+
<sen_REP_P_a_TCool>Time_aligned</sen_REP_P_a_TCool>
44+
<sen_REP_P_a_VCool>Time_aligned</sen_REP_P_a_VCool>
45+
<var_EXP_MaxCtoF_a>Time_aligned</var_EXP_MaxCtoF_a>
46+
<var_EXP_PCT_a>Time_aligned</var_EXP_PCT_a>
47+
<var_EXP_P_a>Time_aligned</var_EXP_P_a>
48+
<var_REP_MaxCtoF_a>Time_aligned</var_REP_MaxCtoF_a>
49+
<var_REP_PCT_a>Time_aligned</var_REP_PCT_a>
50+
<var_REP_P_a>Time_aligned</var_REP_P_a>
51+
<var_ste_EXP_MaxCtoF_a>Time_aligned</var_ste_EXP_MaxCtoF_a>
52+
<var_ste_EXP_PCT_a>Time_aligned</var_ste_EXP_PCT_a>
53+
<var_ste_EXP_P_a>Time_aligned</var_ste_EXP_P_a>
54+
<var_ste_REP_MaxCtoF_a>Time_aligned</var_ste_REP_MaxCtoF_a>
55+
<var_ste_REP_PCT_a>Time_aligned</var_ste_REP_PCT_a>
56+
<var_ste_REP_P_a>Time_aligned</var_ste_REP_P_a>
57+
</dims>
58+
<general>
59+
<inputs>avg_VCool,avg_TCool,avg_LinPow,avg_HTcoeff,avg_RodPitch,var_VCool,var_TCool,var_LinPow,var_HTcoeff,var_RodPitch</inputs>
60+
<outputs>sen_EXP_PCT_a_VCool,sen_EXP_MaxCtoF_a_VCool,sen_EXP_P_a_VCool,sen_EXP_PCT_a_TCool,sen_EXP_MaxCtoF_a_TCool,sen_EXP_P_a_TCool,sen_EXP_PCT_a_LinPow,sen_EXP_MaxCtoF_a_LinPow,sen_EXP_P_a_LinPow,sen_EXP_PCT_a_HTcoeff,sen_EXP_MaxCtoF_a_HTcoeff,sen_EXP_P_a_HTcoeff,sen_EXP_PCT_a_RodPitch,sen_EXP_MaxCtoF_a_RodPitch,sen_EXP_P_a_RodPitch,sen_REP_PCT_a_VCool,sen_REP_MaxCtoF_a_VCool,sen_REP_P_a_VCool,sen_REP_PCT_a_TCool,sen_REP_MaxCtoF_a_TCool,sen_REP_P_a_TCool,sen_REP_PCT_a_LinPow,sen_REP_MaxCtoF_a_LinPow,sen_REP_P_a_LinPow,sen_REP_PCT_a_HTcoeff,sen_REP_MaxCtoF_a_HTcoeff,sen_REP_P_a_HTcoeff,sen_REP_PCT_a_RodPitch,sen_REP_MaxCtoF_a_RodPitch,avg_EXP_PCT_a,avg_EXP_MaxCtoF_a,avg_EXP_P_a,avg_REP_PCT_a,avg_REP_MaxCtoF_a,avg_REP_P_a,var_EXP_PCT_a,var_EXP_MaxCtoF_a,var_EXP_P_a,var_REP_PCT_a,var_REP_MaxCtoF_a,var_REP_P_a</outputs>
61+
<pointwise_meta>avg_ste_EXP_MaxCtoF_a,avg_ste_EXP_PCT_a,avg_ste_EXP_P_a,avg_ste_HTcoeff,avg_ste_LinPow,avg_ste_REP_MaxCtoF_a,avg_ste_REP_PCT_a,avg_ste_REP_P_a,avg_ste_RodPitch,avg_ste_TCool,avg_ste_VCool,var_ste_EXP_MaxCtoF_a,var_ste_EXP_PCT_a,var_ste_EXP_P_a,var_ste_HTcoeff,var_ste_LinPow,var_ste_REP_MaxCtoF_a,var_ste_REP_PCT_a,var_ste_REP_P_a,var_ste_RodPitch,var_ste_TCool,var_ste_VCool</pointwise_meta>
62+
<sampleTag>RAVEN_sample_ID</sampleTag>
63+
</general>
64+
</DataSet>
65+
66+
</DataObjectMetadata>
Lines changed: 157 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,157 @@
1+
<?xml version="1.0" ?>
2+
<Simulation verbosity="debug">
3+
4+
<TestInfo>
5+
<name>framework/PostProcessors/BasicStatistics.timeDepMeta</name>
6+
<author>wangc</author>
7+
<created>2019-05-10</created>
8+
<classesTested>PostProcessors.BasicStatistics, DataObjects.HistorySet</classesTested>
9+
<description>
10+
This test checks the metadata printing of history set, the scalar meta data should be printed with
11+
the input of HistorySet, while vector meta data should be printed with output of HistorySet. See issue #986.
12+
With the fix of #986, The HistorySet will handle scalar and vector meta data separately.
13+
</description>
14+
</TestInfo>
15+
16+
<RunInfo>
17+
<WorkingDir>timeDepMeta</WorkingDir>
18+
<Sequence>Read_raw, SensPost_1, Back_to_MASTER</Sequence>
19+
<internalParallel>True</internalParallel>
20+
<deleteOutExtension>o,plt,rst,csv,i</deleteOutExtension>
21+
</RunInfo>
22+
23+
<Files>
24+
<Input name="myinputfile" type="">RawData</Input>
25+
</Files>
26+
27+
<VariableGroups>
28+
<Group name="GRO_SensPost_in_features_scalar">
29+
VCool,
30+
TCool,
31+
LinPow,
32+
HTcoeff,
33+
RodPitch
34+
</Group>
35+
<Group name="GRO_SensPost_in_targets_Timealigned">
36+
EXP_PCT_a,
37+
EXP_MaxCtoF_a,
38+
EXP_P_a,
39+
REP_PCT_a,
40+
REP_MaxCtoF_a,
41+
REP_P_a
42+
</Group>
43+
<Group name="GRO_SensPost_out_scalar">
44+
avg_VCool,
45+
avg_TCool,
46+
avg_LinPow,
47+
avg_HTcoeff,
48+
avg_RodPitch,
49+
var_VCool,
50+
var_TCool,
51+
var_LinPow,
52+
var_HTcoeff,
53+
var_RodPitch
54+
</Group>
55+
<Group name="GRO_SensPost_out_Timealigned">
56+
sen_EXP_PCT_a_VCool,
57+
sen_EXP_MaxCtoF_a_VCool,
58+
sen_EXP_P_a_VCool,
59+
sen_EXP_PCT_a_TCool,
60+
sen_EXP_MaxCtoF_a_TCool,
61+
sen_EXP_P_a_TCool,
62+
sen_EXP_PCT_a_LinPow,
63+
sen_EXP_MaxCtoF_a_LinPow,
64+
sen_EXP_P_a_LinPow,
65+
sen_EXP_PCT_a_HTcoeff,
66+
sen_EXP_MaxCtoF_a_HTcoeff,
67+
sen_EXP_P_a_HTcoeff,
68+
sen_EXP_PCT_a_RodPitch,
69+
sen_EXP_MaxCtoF_a_RodPitch,
70+
sen_EXP_P_a_RodPitch,
71+
sen_REP_PCT_a_VCool,
72+
sen_REP_MaxCtoF_a_VCool,
73+
sen_REP_P_a_VCool,
74+
sen_REP_PCT_a_TCool,
75+
sen_REP_MaxCtoF_a_TCool,
76+
sen_REP_P_a_TCool,
77+
sen_REP_PCT_a_LinPow,
78+
sen_REP_MaxCtoF_a_LinPow,
79+
sen_REP_P_a_LinPow,
80+
sen_REP_PCT_a_HTcoeff,
81+
sen_REP_MaxCtoF_a_HTcoeff,
82+
sen_REP_P_a_HTcoeff,
83+
sen_REP_PCT_a_RodPitch,
84+
sen_REP_MaxCtoF_a_RodPitch,
85+
sen_REP_P_a_HTcoeff,
86+
avg_EXP_PCT_a,
87+
avg_EXP_MaxCtoF_a,
88+
avg_EXP_P_a,
89+
avg_REP_PCT_a,
90+
avg_REP_MaxCtoF_a,
91+
avg_REP_P_a,
92+
var_EXP_PCT_a,
93+
var_EXP_MaxCtoF_a,
94+
var_EXP_P_a,
95+
var_REP_PCT_a,
96+
var_REP_MaxCtoF_a,
97+
var_REP_P_a
98+
</Group>
99+
</VariableGroups>
100+
<Models>
101+
<PostProcessor name="SensPost_1" subType="BasicStatistics" verbosity="debug">
102+
<pivotParameter>Time_aligned</pivotParameter>
103+
<sensitivity prefix="sen">
104+
<targets>GRO_SensPost_in_targets_Timealigned</targets>
105+
<features>GRO_SensPost_in_features_scalar</features>
106+
</sensitivity>
107+
<expectedValue prefix="avg">
108+
GRO_SensPost_in_targets_Timealigned, GRO_SensPost_in_features_scalar
109+
</expectedValue>
110+
<variance prefix="var">
111+
GRO_SensPost_in_targets_Timealigned, GRO_SensPost_in_features_scalar
112+
</variance>
113+
</PostProcessor>
114+
</Models>
115+
116+
<DataObjects>
117+
<HistorySet name="SET_SensPost_aligned">
118+
<Input>GRO_SensPost_in_features_scalar</Input>
119+
<Output>GRO_SensPost_in_targets_Timealigned</Output>
120+
<options>
121+
<pivotParameter>Time_aligned</pivotParameter>
122+
</options>
123+
</HistorySet>
124+
<HistorySet name="SET_Back_to_MASTER">
125+
<Input>GRO_SensPost_out_scalar</Input>
126+
<Output>GRO_SensPost_out_Timealigned</Output>
127+
<options>
128+
<pivotParameter>Time_aligned</pivotParameter>
129+
</options>
130+
</HistorySet>
131+
</DataObjects>
132+
133+
<Steps>
134+
<IOStep name="Read_raw">
135+
<Input class="Files" type="csv">myinputfile</Input>
136+
<Output class="DataObjects" type="HiostorySet">SET_SensPost_aligned</Output>
137+
</IOStep>
138+
139+
<PostProcess name="SensPost_1">
140+
<Input class="DataObjects" type="HistorySet">SET_SensPost_aligned</Input>
141+
<Model class="Models" type="PostProcessor">SensPost_1</Model>
142+
<Output class="DataObjects" type="HistorySet">SET_Back_to_MASTER</Output>
143+
</PostProcess>
144+
145+
<IOStep name="Back_to_MASTER">
146+
<Input class="DataObjects" type="HistorySet">SET_Back_to_MASTER</Input>
147+
<Output class="OutStreams" type="Print">Back_to_MASTER</Output>
148+
</IOStep>
149+
</Steps>
150+
151+
<OutStreams>
152+
<Print name="Back_to_MASTER">
153+
<type>csv</type>
154+
<source>SET_Back_to_MASTER</source>
155+
</Print>
156+
</OutStreams>
157+
</Simulation>

0 commit comments

Comments
 (0)