From 085ec5a0e2cfb59b56e3478d910981f513a4734c Mon Sep 17 00:00:00 2001 From: L-Nafaryus Date: Tue, 26 Oct 2021 13:47:10 +0500 Subject: [PATCH] Mod: - --- anisotropy/__init__.py | 3 ++ anisotropy/core/cli.py | 16 +++++++-- anisotropy/core/config.py | 4 ++- anisotropy/core/runner.py | 75 ++++++++++++++++++++++++++++++++++++--- 4 files changed, 90 insertions(+), 8 deletions(-) diff --git a/anisotropy/__init__.py b/anisotropy/__init__.py index 0baa3ab..fab208c 100644 --- a/anisotropy/__init__.py +++ b/anisotropy/__init__.py @@ -23,6 +23,9 @@ import os env = dict( ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) ) +env.update( + CLI = os.path.join(env["ROOT"], "anisotropy/core/cli.py") +) env.update( BUILD = os.path.join(env["ROOT"], "build"), LOG = os.path.join(env["ROOT"], "logs"), diff --git a/anisotropy/core/cli.py b/anisotropy/core/cli.py index 2d67c54..22c0ec4 100644 --- a/anisotropy/core/cli.py +++ b/anisotropy/core/cli.py @@ -43,7 +43,8 @@ import logging ) def compute(path, configFile, nprocs, stage, overwrite): from anisotropy.core.runner import UltimateRunner - from anisotropy.core.config import Config, DefaultConfig + from anisotropy.core.config import DefaultConfig + from copy import deepcopy config = DefaultConfig() @@ -57,7 +58,18 @@ def compute(path, configFile, nprocs, stage, overwrite): ) config.expand() - runner = UltimateRunner() + baseRunner = UltimateRunner(config = config, exec_id = True) + queue = [] + + for case in config.cases: + caseConfig = deepcopy(config) + caseConfig.cases = [ case ] + + caseRunner = UltimateRunner(config = caseConfig) + caseRunner._exec_id = baseRunner._exec_id + queue.append(caseRunner) + + baseRunner.parallel(queue) class LiteralOption(click.Option): diff --git a/anisotropy/core/config.py b/anisotropy/core/config.py index c8f282f..33aca09 100644 --- a/anisotropy/core/config.py +++ b/anisotropy/core/config.py @@ -87,7 +87,9 @@ class DefaultConfig(Config): "nprocs": 1, "stage": "all", "overwrite": False, - + "database": "anisotropy.db", + "build": "build", + "logs": "logs" }, "structures": [] } diff --git a/anisotropy/core/runner.py b/anisotropy/core/runner.py index 864cb77..b9a59f2 100644 --- a/anisotropy/core/runner.py +++ b/anisotropy/core/runner.py @@ -4,19 +4,55 @@ from datetime import datetime +from anisotropy.core.config import DefaultConfig +from anisotropy.database import * +from anisotropy.salomepl.runner import SalomeRunner import anisotropy.samples as samples class UltimateRunner(object): - def __init__(self): + def __init__(self, config = None, exec_id = False): - self.database = Database(..) + self.config = config or DefaultConfig() + + self.database = Database(self.config["database"]) self.datebase.setup() - self._exec = Execution(date = datetime.now()) - self._exec.save() + if exec_id: + self._exec_id = Execution(date = datetime.now()) + self._exec_id.save() + + def casePath(self): + case = self.config.cases[0] + + return os.path.join( + self.config["build"], + case["label"], + "direction-{}".format(str(case["direction"]).replace(" ", "")), + "theta-{}".format(case["theta"]) + ) def computeMesh(self): - pass + + case = self.config.cases[0] + runner = SalomeRunner() + cliArgs = [ + "computemesh", + case["label"], + case["direction"], + case["theta"], + path + ] + + out, err, returncode = runner.execute( + env["CLI"], + *cliArgs, + timeout = self.config["salome_timeout"], + root = env["ROOT"], + logpath = self.casePath() + ) + + return out, err, returncode + def _computeMesh(self): """Function for Salome @@ -24,6 +60,7 @@ class UltimateRunner(object): Resolution pipeline: cli(UR -> computeMesh) -> salomeRunner(salome -> cli) -> computemesh(UR -> _computeMesh) """ + # TODO: add logger configuration here sample = samples.__dict__[..] @@ -48,3 +85,31 @@ class UltimateRunner(object): flow = sample.onephaseflow(..) flow.build() + + def pipeline(self, stage: str = None): + stage = stage or config["stage"] + + match stage: + case "mesh" | "all": + with self.database.atomic(): + Shape.create(self._exec_id, **self.config.cases[0]) + Mesh.create(self._exec_id) + + self.computeMesh(..) + + case "flow" | "all": + with self.database.atomic(): + Flow.create(self._exec_id) + + self.computeFlow(..) + + case "postProcess" | "all": + self.postProcess(..) + + + + def parallel(queue: list, nprocs = None): + nprocs = nprocs or config["nprocs"] + + parallel(nprocs, [()] * len(queue), [ runner.pipeline for runner in queue ]) +