Skip to content

Commit

Permalink
updates sampling-integration module to use powsybl-core v2.1.0 comput…
Browse files Browse the repository at this point in the history
…ation APIs
  • Loading branch information
CBiasuzzi authored and sylvlecl committed Nov 2, 2018
1 parent 543cef9 commit bf5954f
Show file tree
Hide file tree
Showing 3 changed files with 105 additions and 104 deletions.
3 changes: 1 addition & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,7 @@
<module>online-workflow</module>
<module>pclfsim-integration</module>
<module>py-powsybl</module>
<!-- to be aligned to powsybl-core v2.1.0 -->
<!--module>sampling-integration</module-->
<module>sampling-integration</module>
<module>security-analysis-ws</module>
<module>uncertainties-analysis</module>
<module>wca-integration</module>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ private Command createMatm2Cmd() {
.program(wp41cM2)
.args("MOD1_${EXEC_NUM}.mat",
"MOD2_${EXEC_NUM}.mat",
Command.EXECUTION_NUMBER_PATTERN,
CommandConstants.EXECUTION_NUMBER_PATTERN,
"" + config.getIr(),
"" + config.getTflag())
.inputFiles(new InputFile("MOD1_${EXEC_NUM}.mat"))
Expand All @@ -300,56 +300,60 @@ private Command createMatm2Cmd() {

}



private void computeMod1AndMod2(DataMiningFacadeParams dmParams, Path cacheDir) throws Exception {
try (CommandExecutor executor = computationManager.newCommandExecutor(createEnv(), WORKING_DIR_PREFIX, config.isDebug())) {
Path workingDir = executor.getWorkingDir();
LOGGER.info("Retrieving historical data for network {}", network.getId());
Wp41HistoData histoData = getHistoDBData(dmParams, workingDir);
int parK = config.getPar_k() == -1 ? (int) Math.round(Math.sqrt(histoData.getHdTable().rowKeyList().size() / 2))
: config.getPar_k();
LOGGER.info(" IR: {}, tflag: {}, number of clusters: {} ", config.getIr(), config.getTflag(), parK);
double[][] dataMatrix = Utils.histoDataAsDoubleMatrixNew(histoData.getHdTable());
Utils.writeWp41ContModule1Mat(workingDir.resolve(M1INPUTFILENAME), dataMatrix);

if (config.getValidationDir() != null) {
// store input file, for validation purposes
computationManager.execute(new ExecutionEnvironment(createEnv(), WORKING_DIR_PREFIX, config.isDebug()), new AbstractExecutionHandler<Object>() {
int parK;
@Override
public List<CommandExecution> before(Path workingDir) throws IOException {
LOGGER.info("Retrieving historical data for network {}", network.getId());
Wp41HistoData histoData = null;
try {
Files.copy(workingDir.resolve(M1INPUTFILENAME), config.getValidationDir().resolve(M1INPUTFILENAME), REPLACE_EXISTING);
Utils.dumpWp41HistoDataColumns(histoData, config.getValidationDir());
} catch (Throwable t) {
LOGGER.error(t.getMessage(), t);
histoData = getHistoDBData(dmParams, workingDir);
} catch (Exception e) {
throw new RuntimeException(e);
}
parK = config.getPar_k() == -1 ? (int) Math.round(Math.sqrt(histoData.getHdTable().rowKeyList().size() / 2))
: config.getPar_k();
LOGGER.info(" IR: {}, tflag: {}, number of clusters: {} ", config.getIr(), config.getTflag(), parK);
double[][] dataMatrix = Utils.histoDataAsDoubleMatrixNew(histoData.getHdTable());
Utils.writeWp41ContModule1Mat(workingDir.resolve(M1INPUTFILENAME), dataMatrix);

if (config.getValidationDir() != null) {
// store input file, for validation purposes
try {
Files.copy(workingDir.resolve(M1INPUTFILENAME), config.getValidationDir().resolve(M1INPUTFILENAME), REPLACE_EXISTING);
Utils.dumpWp41HistoDataColumns(histoData, config.getValidationDir());
} catch (Throwable t) {
LOGGER.error(t.getMessage(), t);
}
}
}

LOGGER.info("Executing wp41 module1(once) module2 ({} times)", parK);

ExecutionReport report = executor.start(new CommandExecution(createMatm1Cmd(parK), 1, priority));
report.log();
if (report.getErrors().size() > 0) {
throw new RuntimeException("Module 1 failed");
}
//1 brings all module1 output files (one per cluster plus the statvar file) back to the cache
for (int i = 0; i < parK; i++) {
Path srcPath = workingDir.resolve("MOD1_" + i + ".mat");
Path destPath = cacheDir.resolve("MOD1_" + i + ".mat");
Files.copy(srcPath, destPath, REPLACE_EXISTING);
LOGGER.info("Executing wp41 module1(once), module2 ({} times)", parK);
return Arrays.asList(new CommandExecution(createMatm1Cmd(parK), 1, priority),
new CommandExecution(createMatm2Cmd(), parK, priority));
}
Files.copy(workingDir.resolve(M1STATVARSFILENAME), cacheDir.resolve(M1STATVARSFILENAME), REPLACE_EXISTING);

//2 execute module2
report = executor.start(new CommandExecution(createMatm2Cmd(), parK, priority));
report.log();
if (report.getErrors().size() > 0) {
throw new RuntimeException("Module 2 failed");
}
for (int i = 0; i < parK; i++) {
Path srcPath = workingDir.resolve("MOD2_" + i + ".mat");
Path destPath = cacheDir.resolve("MOD2_" + i + ".mat");
Files.copy(srcPath, destPath, REPLACE_EXISTING);
@Override
public Object after(Path workingDir, ExecutionReport report) throws IOException {
report.log();
if (report.getErrors().size() > 0) {
throw new RuntimeException("Module 1 failed");
}
//1 brings all module1 output files (one per cluster plus the statvar file) back to the cache
for (int i = 0; i < parK; i++) {
Path srcPath = workingDir.resolve("MOD1_" + i + ".mat");
Path destPath = cacheDir.resolve("MOD1_" + i + ".mat");
Files.copy(srcPath, destPath, REPLACE_EXISTING);
}
Files.copy(workingDir.resolve(M1STATVARSFILENAME), cacheDir.resolve(M1STATVARSFILENAME), REPLACE_EXISTING);
for (int i = 0; i < parK; i++) {
Path srcPath = workingDir.resolve("MOD2_" + i + ".mat");
Path destPath = cacheDir.resolve("MOD2_" + i + ".mat");
Files.copy(srcPath, destPath, REPLACE_EXISTING);
}
return null;
}
}
}).join();
}

private Command createMatm3PreCmd(int clustNums, int samplesSize) {
Expand Down Expand Up @@ -381,14 +385,13 @@ private Command createMatm3PreCmd(int clustNums, int samplesSize) {
.build();
}


private Command createMatm3Cmd() {
List<String> args1 = new ArrayList<>();
args1.add("MOD1_${EXEC_NUM}.mat");
args1.add("MOD2_${EXEC_NUM}.mat");
args1.add(M3NSAMCFILENAME);
args1.add("MOD3_${EXEC_NUM}.mat");
args1.add(Command.EXECUTION_NUMBER_PATTERN);
args1.add(CommandConstants.EXECUTION_NUMBER_PATTERN);
if (config.getRngSeed() != null) {
args1.add(Integer.toString(config.getRngSeed()));
} else {
Expand All @@ -415,10 +418,6 @@ private Command createMatm3Cmd() {

}





private Command createMatm3reduceCmd(int clustNums) {
String wp41cM3Reduce;
if (config.getBinariesDir() != null) {
Expand All @@ -444,22 +443,21 @@ private Command createMatm3reduceCmd(int clustNums) {
.build();
}


private double[][] computeModule3(int nSamples) throws Exception {
LOGGER.info("Executing wp41 module3 (IR: {}, tflag: {}, number of clusters: {}), getting {} samples", config.getIr(), config.getTflag(), nClusters, nSamples);
try (CommandExecutor executor = computationManager.newCommandExecutor(createEnv(), WORKING_DIR_PREFIX, config.isDebug())) {

Path workingDir = executor.getWorkingDir();
Command cmd = createMatm3PreCmd(nClusters, nSamples);
ExecutionReport report = executor.start(new CommandExecution(cmd, 1, priority));
report.log();
if (report.getErrors().isEmpty()) {
report = executor.start(new CommandExecution(createMatm3Cmd(), nClusters, priority));

return computationManager.execute(new ExecutionEnvironment(createEnv(), WORKING_DIR_PREFIX, config.isDebug()), new AbstractExecutionHandler<double[][]>() {
@Override
public List<CommandExecution> before(Path workingDir) throws IOException {
return Arrays.asList(new CommandExecution(createMatm3PreCmd(nClusters, nSamples), 1, priority),
new CommandExecution(createMatm3Cmd(), nClusters, priority),
new CommandExecution(createMatm3reduceCmd(nClusters), 1, priority)
);
}
@Override
public double[][] after(Path workingDir, ExecutionReport report) throws IOException {
report.log();
if (report.getErrors().isEmpty()) {
report = executor.start(new CommandExecution(createMatm3reduceCmd(nClusters), 1, priority));
report.log();

LOGGER.debug("Retrieving module3 results from file {}", M3OUTPUTFILENAME);
MatFileReader mfr = new MatFileReader();
Map<String, MLArray> content = mfr.read(workingDir.resolve(M3OUTPUTFILENAME).toFile());
Expand All @@ -479,16 +477,14 @@ private double[][] computeModule3(int nSamples) throws Exception {
LOGGER.error(t.getMessage(), t);
}
}

return xNewMat;
} else {
return null;
}
}
return null;
}

}).join();
}


private Command createBinSamplerCmd(Path iFilePath, int nSamples) {
String wp41bIs;
if (config.getBinariesDir() != null) {
Expand All @@ -515,33 +511,39 @@ public double[][] computeBinSampling(double[] marginalExpectations, int nSamples
if (nSamples <= 0) {
throw new IllegalArgumentException("number of samples must be positive");
}
try (CommandExecutor executor = computationManager.newCommandExecutor(createEnv(), WORKING_DIR_PREFIX, config.isDebug())) {

Path workingDir = executor.getWorkingDir();
Utils.writeWP41BinaryIndependentSamplingInputFile(workingDir.resolve(B1INPUTFILENAME), marginalExpectations);

LOGGER.info("binsampler, asking for {} samples", nSamples);
return computationManager.execute(new ExecutionEnvironment(createEnv(), WORKING_DIR_PREFIX, config.isDebug()), new AbstractExecutionHandler<double[][]>() {
@Override
public List<CommandExecution> before(Path workingDir) throws IOException {
Utils.writeWP41BinaryIndependentSamplingInputFile(workingDir.resolve(B1INPUTFILENAME), marginalExpectations);

Command cmd = createBinSamplerCmd(workingDir.resolve(B1INPUTFILENAME), nSamples);
ExecutionReport report = executor.start(new CommandExecution(cmd, 1, priority));
report.log();
LOGGER.info("binsampler, asking for {} samples", nSamples);

LOGGER.debug("Retrieving binsampler results from file {}", B1OUTPUTFILENAME);
MatFileReader mfr = new MatFileReader();
Map<String, MLArray> content;
content = mfr.read(workingDir.resolve(B1OUTPUTFILENAME).toFile());
String errMsg = Utils.MLCharToString((MLChar) content.get("errmsg"));
if (!("Ok".equalsIgnoreCase(errMsg))) {
throw new MatlabException(errMsg);
Command cmd = createBinSamplerCmd(workingDir.resolve(B1INPUTFILENAME), nSamples);
return Collections.singletonList(new CommandExecution(createBinSamplerCmd(workingDir.resolve(B1INPUTFILENAME), nSamples), 1, priority));
}
MLArray xNew = content.get("STATUS");
Objects.requireNonNull(xNew);
MLDouble mld = (MLDouble) xNew;
double[][] retMat = mld.getArray();
return retMat;
}


@Override
public double[][] after(Path workingDir, ExecutionReport report) throws IOException {
report.log();
if (report.getErrors().isEmpty()) {
LOGGER.debug("Retrieving binsampler results from file {}", B1OUTPUTFILENAME);
MatFileReader mfr = new MatFileReader();
Map<String, MLArray> content;
content = mfr.read(workingDir.resolve(B1OUTPUTFILENAME).toFile());
String errMsg = Utils.MLCharToString((MLChar) content.get("errmsg"));
if (!("Ok".equalsIgnoreCase(errMsg))) {
throw new MatlabException(errMsg);
}
MLArray xNew = content.get("STATUS");
Objects.requireNonNull(xNew);
MLDouble mld = (MLDouble) xNew;
return mld.getArray();
} else {
return null;
}
}
}).join();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,7 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.*;

import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;

Expand Down Expand Up @@ -129,19 +126,22 @@ public void run(CommandLine line, ToolRunningContext context) throws Exception {
throw new RuntimeException("either specify both set1 and set2 parameters, or none of them");
}

try (CommandExecutor executor = context.getComputationManager().newCommandExecutor(createEnv(config), WORKING_DIR_PREFIX, config.isDebug())) {
Path workingDir = executor.getWorkingDir();
com.powsybl.computation.Command cmd = createConcatMatFilesCmd(config.getValidationDir(), MOD3FILES_PATTERN, config.getValidationDir().resolve(CONCATSAMPLESFILENAME), config);
int priority = 1;
ExecutionReport report = executor.start(new CommandExecution(cmd, 1, priority));
report.log();
if (report.getErrors().isEmpty()) {
report = executor.start(new CommandExecution(createDataComparatorCmd(config.getValidationDir().resolve(M1INPUTFILENAME).toFile().getAbsolutePath(), config.getValidationDir().resolve(CONCATSAMPLESFILENAME).toFile().getAbsolutePath(), set1, set2, config), 1, priority));
context.getShortTimeExecutionComputationManager().execute(new ExecutionEnvironment(createEnv(config), WORKING_DIR_PREFIX, config.isDebug()), new AbstractExecutionHandler<Object>() {
@Override
public List<CommandExecution> before(Path workingDir) throws IOException {
int priority = 1;
return Arrays.asList(new CommandExecution(createConcatMatFilesCmd(config.getValidationDir(), MOD3FILES_PATTERN, config.getValidationDir().resolve(CONCATSAMPLESFILENAME), config), 1, priority),
new CommandExecution(createDataComparatorCmd(config.getValidationDir().resolve(M1INPUTFILENAME).toFile().getAbsolutePath(), config.getValidationDir().resolve(CONCATSAMPLESFILENAME).toFile().getAbsolutePath(), set1, set2, config), 1, priority));
}
@Override
public Object after(Path workingDir, ExecutionReport report) throws IOException {
report.log();
Files.copy(workingDir.resolve(DATA_COMPARATOR_OUT_FIG), Paths.get(oFilePrefix + ".fig"), REPLACE_EXISTING);
Files.copy(workingDir.resolve(DATA_COMPARATOR_OUT_PNG), Paths.get(oFilePrefix + ".png"), REPLACE_EXISTING);
return null;
}
}
}).join();

}


Expand Down

0 comments on commit bf5954f

Please sign in to comment.