Mod: working shape stage
This commit is contained in:
parent
c35d5cfe3c
commit
7454714d70
@ -67,11 +67,46 @@ class CliListOption(click.Option):
|
|||||||
else:
|
else:
|
||||||
return self._convert(ctx, value)
|
return self._convert(ctx, value)
|
||||||
|
|
||||||
|
|
||||||
|
def verboseLevel(level: int):
|
||||||
|
return {
|
||||||
|
0: logging.ERROR,
|
||||||
|
1: logging.INFO,
|
||||||
|
2: logging.DEBUG
|
||||||
|
}.get(level, logging.ERROR)
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
@click.group()
|
||||||
def anisotropy():
|
def anisotropy():
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@anisotropy.command(
|
||||||
|
help = "Initialize new anisotropy project."
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-P", "--path", "path",
|
||||||
|
default = os.getcwd(),
|
||||||
|
help = "Specify directory to use (instead of cwd)"
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-v", "--verbose", "verbose",
|
||||||
|
count = True,
|
||||||
|
help = "Increase verbose level"
|
||||||
|
)
|
||||||
|
def init(path, verbose):
|
||||||
|
from anisotropy.core.config import DefaultConfig
|
||||||
|
from anisotropy.core.utils import setupLogger
|
||||||
|
|
||||||
|
setupLogger(verboseLevel(verbose))
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
config = DefaultConfig()
|
||||||
|
filepath = os.path.abspath(os.path.join(path, "anisotropy.toml"))
|
||||||
|
|
||||||
|
logger.info(f"Saving file at { filepath }")
|
||||||
|
config.dump(filepath)
|
||||||
|
|
||||||
|
|
||||||
@anisotropy.command()
|
@anisotropy.command()
|
||||||
@click.option(
|
@click.option(
|
||||||
"-P", "--path", "path",
|
"-P", "--path", "path",
|
||||||
@ -106,13 +141,24 @@ def anisotropy():
|
|||||||
cls = KeyValueOption,
|
cls = KeyValueOption,
|
||||||
help = "Overwrite existing parameter (except control variables)"
|
help = "Overwrite existing parameter (except control variables)"
|
||||||
)
|
)
|
||||||
def compute(path, configFile, nprocs, stage, overwrite, params):
|
@click.option(
|
||||||
|
"-v", "--verbose", "verbose",
|
||||||
|
count = True,
|
||||||
|
help = "Increase verbose level"
|
||||||
|
)
|
||||||
|
def compute(path, configFile, nprocs, stage, overwrite, params, verbose):
|
||||||
from anisotropy.core.runner import UltimateRunner
|
from anisotropy.core.runner import UltimateRunner
|
||||||
from anisotropy.core.config import DefaultConfig
|
from anisotropy.core.config import DefaultConfig
|
||||||
|
from anisotropy.core.utils import setupLogger
|
||||||
|
|
||||||
|
setupLogger(verboseLevel(verbose))
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
config = DefaultConfig()
|
config = DefaultConfig()
|
||||||
|
|
||||||
if configFile:
|
if configFile:
|
||||||
|
filepath = os.path.abspath(configFile)
|
||||||
|
logger.info(f"Loading file from { filepath }")
|
||||||
config.load(configFile)
|
config.load(configFile)
|
||||||
|
|
||||||
config.update(
|
config.update(
|
||||||
|
@ -3,40 +3,46 @@
|
|||||||
# License: GNU GPL version 3, see the file "LICENSE" for details.
|
# License: GNU GPL version 3, see the file "LICENSE" for details.
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import os
|
||||||
from os import path
|
from os import path
|
||||||
import logging
|
|
||||||
|
|
||||||
from anisotropy.core.config import DefaultConfig
|
from anisotropy.core.config import DefaultConfig
|
||||||
|
|
||||||
|
import logging
|
||||||
from anisotropy.core.utils import parallel, ParallelRunner, setupLogger
|
from anisotropy.core.utils import parallel, ParallelRunner, setupLogger
|
||||||
from anisotropy.database import *
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
from anisotropy.database import database, tables
|
||||||
|
|
||||||
|
T = tables
|
||||||
|
|
||||||
from anisotropy.shaping import Simple, BodyCentered, FaceCentered
|
from anisotropy.shaping import Simple, BodyCentered, FaceCentered
|
||||||
from anisotropy.meshing import Mesh
|
from anisotropy.meshing import Mesh
|
||||||
from anisotropy.openfoam.presets import CreatePatchDict
|
from anisotropy.openfoam.presets import CreatePatchDict
|
||||||
from anisotropy.solving.onephase import OnePhaseFlow
|
from anisotropy.solving.onephase import OnePhaseFlow
|
||||||
|
|
||||||
logger = logging.getLogger("anisotropy")
|
|
||||||
setupLogger(logger, logging.INFO)
|
|
||||||
|
|
||||||
class UltimateRunner(object):
|
class UltimateRunner(object):
|
||||||
def __init__(self, config = None, exec_id = None, m_shape = None):
|
def __init__(self, config = None, exec_id = None, t_shape = None):
|
||||||
|
|
||||||
self.config = config or DefaultConfig()
|
self.config = config or DefaultConfig()
|
||||||
|
|
||||||
if not m_shape:
|
self.type = "master" if not exec_id else "worker"
|
||||||
self.database = Database(self.config["database"])
|
|
||||||
self.database.setup()
|
if self.type == "master":
|
||||||
|
self.prepareDatabase()
|
||||||
|
|
||||||
if not exec_id:
|
if self.type == "master":
|
||||||
with self.database.database:
|
with self.database:
|
||||||
self.exec_id = Execution(date = datetime.now())
|
self.exec_id = T.Execution(date = datetime.now())
|
||||||
self.exec_id.save()
|
self.exec_id.save()
|
||||||
self.type = "master"
|
|
||||||
self.m_shape = None
|
self.t_shape = None
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.exec_id = exec_id
|
self.exec_id = exec_id
|
||||||
self.type = "worker"
|
self.t_shape = t_shape
|
||||||
self.m_shape = m_shape
|
|
||||||
|
|
||||||
self.shape = None
|
self.shape = None
|
||||||
self.mesh = None
|
self.mesh = None
|
||||||
@ -44,28 +50,34 @@ class UltimateRunner(object):
|
|||||||
|
|
||||||
self.queue = []
|
self.queue = []
|
||||||
|
|
||||||
|
|
||||||
|
def prepareDatabase(self):
|
||||||
|
# NOTE: separate function in cause of unpicklability of connections
|
||||||
|
self.database = database
|
||||||
|
self.database.setup(self.config["database"])
|
||||||
|
|
||||||
def fill(self):
|
def fill(self):
|
||||||
self.config.expand()
|
self.config.expand()
|
||||||
|
logger.info(f"Preparing queue: { len(self.config.cases) }")
|
||||||
|
|
||||||
for case in self.config.cases:
|
for case in self.config.cases:
|
||||||
with self.database.database:
|
with self.database:
|
||||||
m_shape = Shape(
|
t_shape = T.Shape(
|
||||||
exec_id = self.exec_id,
|
exec_id = self.exec_id,
|
||||||
**case
|
**case
|
||||||
)
|
)
|
||||||
m_shape.save()
|
t_shape.save()
|
||||||
|
|
||||||
self.queue.append(UltimateRunner(
|
self.queue.append(UltimateRunner(
|
||||||
config = self.config,
|
config = self.config,
|
||||||
exec_id = self.exec_id,
|
exec_id = self.exec_id,
|
||||||
m_shape = m_shape
|
t_shape = t_shape
|
||||||
))
|
))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def start(self, queue: list = None, nprocs: int = None):
|
def start(self, queue: list = None, nprocs: int = None):
|
||||||
nprocs = nprocs or self.config["nprocs"]
|
nprocs = nprocs or self.config["nprocs"]
|
||||||
|
|
||||||
|
logger.info(f"Starting subprocesses: { nprocs }")
|
||||||
parallel = ParallelRunner(nprocs = nprocs)
|
parallel = ParallelRunner(nprocs = nprocs)
|
||||||
parallel.start()
|
parallel.start()
|
||||||
|
|
||||||
@ -73,54 +85,66 @@ class UltimateRunner(object):
|
|||||||
parallel.append(runner.pipeline, args = [self.config["stage"]])
|
parallel.append(runner.pipeline, args = [self.config["stage"]])
|
||||||
|
|
||||||
parallel.wait()
|
parallel.wait()
|
||||||
#parallel(nprocs, args, runners)
|
|
||||||
# TODO: if runner done - remove from queue; results from parallel function
|
# TODO: if runner done - remove from queue; results from parallel function
|
||||||
|
|
||||||
def casepath(self):
|
def casepath(self):
|
||||||
|
with self.database:
|
||||||
with self.database.database:
|
params = T.Shape.get(
|
||||||
params = Shape.get(
|
T.Shape.exec_id == self.exec_id,
|
||||||
Shape.exec_id == self.exec_id,
|
T.Shape.shape_id == self.t_shape.shape_id
|
||||||
Shape.shape_id == self.m_shape.shape_id
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return path.abspath(path.join(
|
direction = "direction-[{},{},{}]".format(*[ str(d) for d in params.direction ])
|
||||||
self.config["build"],
|
theta = "theta-{}".format(params.theta)
|
||||||
params.label,
|
dirpath = path.join(self.config["build"], params.label, direction, theta)
|
||||||
"direction-[{},{},{}]".format(*[ str(d) for d in params.direction ]),
|
|
||||||
"theta-{}".format(params.theta)
|
return path.abspath(dirpath)
|
||||||
))
|
|
||||||
|
|
||||||
def computeShape(self):
|
def computeShape(self):
|
||||||
if not self.type == "worker":
|
if not self.type == "worker":
|
||||||
return
|
return
|
||||||
self.database = Database(self.config["database"])
|
|
||||||
self.database.setup()
|
with self.database:
|
||||||
with self.database.database:
|
params = T.Shape.get(
|
||||||
params = Shape.get(
|
T.Shape.exec_id == self.exec_id,
|
||||||
Shape.exec_id == self.exec_id,
|
T.Shape.shape_id == self.t_shape.shape_id
|
||||||
Shape.shape_id == self.m_shape.shape_id
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
logger.info("Computing shape for {} with direction = {} and theta = {}".format(params.label, params.direction, params.theta))
|
||||||
filename = "shape.step"
|
filename = "shape.step"
|
||||||
|
|
||||||
logger.info([params.label, params.direction, params.theta])
|
|
||||||
self.shape = {
|
self.shape = {
|
||||||
"simple": Simple,
|
"simple": Simple,
|
||||||
"bodyCentered": BodyCentered,
|
"bodyCentered": BodyCentered,
|
||||||
"faceCentered": FaceCentered
|
"faceCentered": FaceCentered
|
||||||
}[params.label](params.direction)
|
}[params.label]
|
||||||
|
|
||||||
|
self.shape(params.direction)
|
||||||
self.shape.build()
|
self.shape.build()
|
||||||
|
|
||||||
os.makedirs(self.casepath(), exist_ok = True)
|
os.makedirs(self.casepath(), exist_ok = True)
|
||||||
self.shape.export(path.join(self.casepath(), filename))
|
self.shape.export(path.join(self.casepath(), filename))
|
||||||
|
|
||||||
with self.database.database:
|
with self.database:
|
||||||
params.shapeStatus = "Done"
|
params.shapeStatus = "Done"
|
||||||
params.save()
|
params.save()
|
||||||
|
|
||||||
def computeMesh(self):
|
def computeMesh(self):
|
||||||
params = self.config.cases[0]
|
if not self.type == "worker":
|
||||||
|
return
|
||||||
|
|
||||||
|
with self.database:
|
||||||
|
params = (T.Mesh.select(T.Shape, T.Mesh)
|
||||||
|
.join(
|
||||||
|
T.Mesh,
|
||||||
|
JOIN.INNER,
|
||||||
|
on = (T.Mesh.shape_id == T.Shape.shape_id)
|
||||||
|
).where(
|
||||||
|
T.Shape.exec_id == self.exec_id,
|
||||||
|
T.Shape.shape_id == self.t_shape.shape_id
|
||||||
|
))
|
||||||
|
|
||||||
|
logger.info("Computing mesh for {} with direction = {} and theta = {}".format(params.label, params.direction, params.theta))
|
||||||
filename = "mesh.mesh"
|
filename = "mesh.mesh"
|
||||||
|
|
||||||
self.mesh = Mesh(self.shape.shape)
|
self.mesh = Mesh(self.shape.shape)
|
||||||
@ -185,6 +209,8 @@ class UltimateRunner(object):
|
|||||||
|
|
||||||
|
|
||||||
def pipeline(self, stage: str = None):
|
def pipeline(self, stage: str = None):
|
||||||
|
self.prepareDatabase()
|
||||||
|
|
||||||
stage = stage or self.config["stage"]
|
stage = stage or self.config["stage"]
|
||||||
|
|
||||||
if stage in ["shape", "all"]:
|
if stage in ["shape", "all"]:
|
||||||
|
@ -19,27 +19,29 @@ class CustomFormatter(logging.Formatter):
|
|||||||
red = "\x1b[31;21m"
|
red = "\x1b[31;21m"
|
||||||
bold_red = "\x1b[31;1m"
|
bold_red = "\x1b[31;1m"
|
||||||
reset = "\x1b[0m"
|
reset = "\x1b[0m"
|
||||||
format = "[ %(asctime)s ] [ %(processName)s ] [ %(levelname)s ] %(message)s"
|
|
||||||
|
info = "[%(levelname)s %(processName)s %(asctime)s %(funcName)s]" # [ %(processName)s ]
|
||||||
|
msg = " %(message)s"
|
||||||
|
|
||||||
formats = {
|
formats = {
|
||||||
logging.DEBUG: grey + format + reset,
|
logging.DEBUG: grey + info + reset + msg,
|
||||||
logging.INFO: grey + format + reset,
|
logging.INFO: grey + info + reset + msg,
|
||||||
logging.WARNING: yellow + format + reset,
|
logging.WARNING: yellow + info + reset + msg,
|
||||||
logging.ERROR: red + format + reset,
|
logging.ERROR: red + info + reset + msg,
|
||||||
logging.CRITICAL: bold_red + format + reset
|
logging.CRITICAL: bold_red + info + reset + msg
|
||||||
}
|
}
|
||||||
|
|
||||||
return formats.get(level)
|
return formats.get(level)
|
||||||
|
|
||||||
def format(self, record):
|
def format(self, record):
|
||||||
log_fmt = self._getFormat(record.levelno)
|
log_fmt = self._getFormat(record.levelno)
|
||||||
time_fmt = "%H:%M:%S %d-%m-%y"
|
time_fmt = "%d-%m-%y %H:%M:%S"
|
||||||
formatter = logging.Formatter(log_fmt, time_fmt)
|
formatter = logging.Formatter(log_fmt, time_fmt)
|
||||||
|
|
||||||
return formatter.format(record)
|
return formatter.format(record)
|
||||||
|
|
||||||
|
|
||||||
def setupLogger(logger, level: int, filepath: str = None):
|
def setupLogger(level: int, filepath: str = None):
|
||||||
"""Applies settings to logger
|
"""Applies settings to logger
|
||||||
|
|
||||||
:param logger:
|
:param logger:
|
||||||
@ -51,14 +53,22 @@ def setupLogger(logger, level: int, filepath: str = None):
|
|||||||
:param filepath:
|
:param filepath:
|
||||||
Path to directory
|
Path to directory
|
||||||
"""
|
"""
|
||||||
logger.handlers = []
|
#logger.handlers = []
|
||||||
logger.setLevel(level)
|
#logger.setLevel(level)
|
||||||
|
|
||||||
|
logging.addLevelName(logging.INFO, "II")
|
||||||
|
logging.addLevelName(logging.WARNING, "WW")
|
||||||
|
logging.addLevelName(logging.ERROR, "EE")
|
||||||
|
logging.addLevelName(logging.CRITICAL, "CC")
|
||||||
|
|
||||||
streamhandler = logging.StreamHandler()
|
streamhandler = logging.StreamHandler()
|
||||||
streamhandler.setLevel(level)
|
streamhandler.setLevel(level)
|
||||||
streamhandler.setFormatter(CustomFormatter())
|
streamhandler.setFormatter(CustomFormatter())
|
||||||
logger.addHandler(streamhandler)
|
#logger.addHandler(streamhandler)
|
||||||
|
|
||||||
|
logging.root.setLevel(level)
|
||||||
|
logging.root.addHandler(streamhandler)
|
||||||
|
|
||||||
if filepath:
|
if filepath:
|
||||||
if not os.path.exists(filepath):
|
if not os.path.exists(filepath):
|
||||||
os.makedirs(filepath, exist_ok = True)
|
os.makedirs(filepath, exist_ok = True)
|
||||||
@ -68,7 +78,9 @@ def setupLogger(logger, level: int, filepath: str = None):
|
|||||||
)
|
)
|
||||||
filehandler.setLevel(level)
|
filehandler.setLevel(level)
|
||||||
filehandler.setFormatter(CustomFormatter())
|
filehandler.setFormatter(CustomFormatter())
|
||||||
logger.addHandler(filehandler)
|
#logger.addHandler(filehandler)
|
||||||
|
|
||||||
|
logging.root.addHandler(filehandler)
|
||||||
|
|
||||||
|
|
||||||
class struct:
|
class struct:
|
||||||
@ -313,7 +325,8 @@ class ParallelRunner(object):
|
|||||||
for n in range(self.nprocs):
|
for n in range(self.nprocs):
|
||||||
self.processes.append(Process(
|
self.processes.append(Process(
|
||||||
target = self.queueRelease,
|
target = self.queueRelease,
|
||||||
args = (self.queueInput, self.queueOutput)
|
args = (self.queueInput, self.queueOutput),
|
||||||
|
name = f"PP-{ n + 1 }"
|
||||||
))
|
))
|
||||||
|
|
||||||
for proc in self.processes:
|
for proc in self.processes:
|
||||||
|
@ -4,4 +4,9 @@
|
|||||||
from .models import __database__, __models__
|
from .models import __database__, __models__
|
||||||
|
|
||||||
database = __database__
|
database = __database__
|
||||||
tables = __models__
|
|
||||||
|
class tables:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for model in __models__:
|
||||||
|
setattr(tables, model.__name__, model)
|
@ -34,7 +34,6 @@ class Database(SqliteDatabase):
|
|||||||
#autoconnect = self.autoconnect_
|
#autoconnect = self.autoconnect_
|
||||||
)
|
)
|
||||||
|
|
||||||
print(self.tables)
|
|
||||||
self.connect()
|
self.connect()
|
||||||
self.create_tables(self.tables)
|
self.create_tables(self.tables)
|
||||||
self.close()
|
self.close()
|
@ -10,8 +10,8 @@ from peewee import (
|
|||||||
IntegerField, BooleanField,
|
IntegerField, BooleanField,
|
||||||
TimeField, DateTimeField
|
TimeField, DateTimeField
|
||||||
)
|
)
|
||||||
from anisotropy.database.utils import JSONField
|
from .utils import JSONField
|
||||||
from .database import Database
|
from .db import Database
|
||||||
|
|
||||||
|
|
||||||
__database__ = Database()
|
__database__ = Database()
|
||||||
|
@ -9,7 +9,6 @@ from numpy import pi, sqrt
|
|||||||
from .occExtended import *
|
from .occExtended import *
|
||||||
from . import Periodic
|
from . import Periodic
|
||||||
|
|
||||||
|
|
||||||
class Simple(Periodic):
|
class Simple(Periodic):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import os
|
import os, shutil
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
unittest.TestLoader.sortTestMethodsUsing = None
|
unittest.TestLoader.sortTestMethodsUsing = None
|
||||||
@ -28,7 +28,7 @@ class TestDatabase(unittest.TestCase):
|
|||||||
self.assertTrue(table.table_exists())
|
self.assertTrue(table.table_exists())
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
os.removedirs(os.outputPath)
|
shutil.rmtree(self.outputPath)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
Loading…
Reference in New Issue
Block a user