Mod: working shape stage
This commit is contained in:
parent
c35d5cfe3c
commit
7454714d70
@ -67,11 +67,46 @@ class CliListOption(click.Option):
|
||||
else:
|
||||
return self._convert(ctx, value)
|
||||
|
||||
|
||||
def verboseLevel(level: int):
|
||||
return {
|
||||
0: logging.ERROR,
|
||||
1: logging.INFO,
|
||||
2: logging.DEBUG
|
||||
}.get(level, logging.ERROR)
|
||||
|
||||
|
||||
@click.group()
|
||||
def anisotropy():
|
||||
pass
|
||||
|
||||
@anisotropy.command(
|
||||
help = "Initialize new anisotropy project."
|
||||
)
|
||||
@click.option(
|
||||
"-P", "--path", "path",
|
||||
default = os.getcwd(),
|
||||
help = "Specify directory to use (instead of cwd)"
|
||||
)
|
||||
@click.option(
|
||||
"-v", "--verbose", "verbose",
|
||||
count = True,
|
||||
help = "Increase verbose level"
|
||||
)
|
||||
def init(path, verbose):
|
||||
from anisotropy.core.config import DefaultConfig
|
||||
from anisotropy.core.utils import setupLogger
|
||||
|
||||
setupLogger(verboseLevel(verbose))
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
config = DefaultConfig()
|
||||
filepath = os.path.abspath(os.path.join(path, "anisotropy.toml"))
|
||||
|
||||
logger.info(f"Saving file at { filepath }")
|
||||
config.dump(filepath)
|
||||
|
||||
|
||||
@anisotropy.command()
|
||||
@click.option(
|
||||
"-P", "--path", "path",
|
||||
@ -106,13 +141,24 @@ def anisotropy():
|
||||
cls = KeyValueOption,
|
||||
help = "Overwrite existing parameter (except control variables)"
|
||||
)
|
||||
def compute(path, configFile, nprocs, stage, overwrite, params):
|
||||
@click.option(
|
||||
"-v", "--verbose", "verbose",
|
||||
count = True,
|
||||
help = "Increase verbose level"
|
||||
)
|
||||
def compute(path, configFile, nprocs, stage, overwrite, params, verbose):
|
||||
from anisotropy.core.runner import UltimateRunner
|
||||
from anisotropy.core.config import DefaultConfig
|
||||
|
||||
from anisotropy.core.utils import setupLogger
|
||||
|
||||
setupLogger(verboseLevel(verbose))
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
config = DefaultConfig()
|
||||
|
||||
if configFile:
|
||||
filepath = os.path.abspath(configFile)
|
||||
logger.info(f"Loading file from { filepath }")
|
||||
config.load(configFile)
|
||||
|
||||
config.update(
|
||||
|
@ -3,40 +3,46 @@
|
||||
# License: GNU GPL version 3, see the file "LICENSE" for details.
|
||||
|
||||
from datetime import datetime
|
||||
import os
|
||||
from os import path
|
||||
import logging
|
||||
|
||||
from anisotropy.core.config import DefaultConfig
|
||||
|
||||
import logging
|
||||
from anisotropy.core.utils import parallel, ParallelRunner, setupLogger
|
||||
from anisotropy.database import *
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from anisotropy.database import database, tables
|
||||
|
||||
T = tables
|
||||
|
||||
from anisotropy.shaping import Simple, BodyCentered, FaceCentered
|
||||
from anisotropy.meshing import Mesh
|
||||
from anisotropy.openfoam.presets import CreatePatchDict
|
||||
from anisotropy.solving.onephase import OnePhaseFlow
|
||||
|
||||
logger = logging.getLogger("anisotropy")
|
||||
setupLogger(logger, logging.INFO)
|
||||
|
||||
class UltimateRunner(object):
|
||||
def __init__(self, config = None, exec_id = None, m_shape = None):
|
||||
def __init__(self, config = None, exec_id = None, t_shape = None):
|
||||
|
||||
self.config = config or DefaultConfig()
|
||||
|
||||
if not m_shape:
|
||||
self.database = Database(self.config["database"])
|
||||
self.database.setup()
|
||||
self.type = "master" if not exec_id else "worker"
|
||||
|
||||
if self.type == "master":
|
||||
self.prepareDatabase()
|
||||
|
||||
if not exec_id:
|
||||
with self.database.database:
|
||||
self.exec_id = Execution(date = datetime.now())
|
||||
if self.type == "master":
|
||||
with self.database:
|
||||
self.exec_id = T.Execution(date = datetime.now())
|
||||
self.exec_id.save()
|
||||
self.type = "master"
|
||||
self.m_shape = None
|
||||
|
||||
self.t_shape = None
|
||||
|
||||
else:
|
||||
self.exec_id = exec_id
|
||||
self.type = "worker"
|
||||
self.m_shape = m_shape
|
||||
self.t_shape = t_shape
|
||||
|
||||
self.shape = None
|
||||
self.mesh = None
|
||||
@ -44,28 +50,34 @@ class UltimateRunner(object):
|
||||
|
||||
self.queue = []
|
||||
|
||||
|
||||
def prepareDatabase(self):
|
||||
# NOTE: separate function in cause of unpicklability of connections
|
||||
self.database = database
|
||||
self.database.setup(self.config["database"])
|
||||
|
||||
def fill(self):
|
||||
self.config.expand()
|
||||
logger.info(f"Preparing queue: { len(self.config.cases) }")
|
||||
|
||||
for case in self.config.cases:
|
||||
with self.database.database:
|
||||
m_shape = Shape(
|
||||
with self.database:
|
||||
t_shape = T.Shape(
|
||||
exec_id = self.exec_id,
|
||||
**case
|
||||
)
|
||||
m_shape.save()
|
||||
t_shape.save()
|
||||
|
||||
self.queue.append(UltimateRunner(
|
||||
config = self.config,
|
||||
exec_id = self.exec_id,
|
||||
m_shape = m_shape
|
||||
t_shape = t_shape
|
||||
))
|
||||
|
||||
|
||||
|
||||
def start(self, queue: list = None, nprocs: int = None):
|
||||
nprocs = nprocs or self.config["nprocs"]
|
||||
|
||||
logger.info(f"Starting subprocesses: { nprocs }")
|
||||
parallel = ParallelRunner(nprocs = nprocs)
|
||||
parallel.start()
|
||||
|
||||
@ -73,54 +85,66 @@ class UltimateRunner(object):
|
||||
parallel.append(runner.pipeline, args = [self.config["stage"]])
|
||||
|
||||
parallel.wait()
|
||||
#parallel(nprocs, args, runners)
|
||||
# TODO: if runner done - remove from queue; results from parallel function
|
||||
|
||||
def casepath(self):
|
||||
|
||||
with self.database.database:
|
||||
params = Shape.get(
|
||||
Shape.exec_id == self.exec_id,
|
||||
Shape.shape_id == self.m_shape.shape_id
|
||||
with self.database:
|
||||
params = T.Shape.get(
|
||||
T.Shape.exec_id == self.exec_id,
|
||||
T.Shape.shape_id == self.t_shape.shape_id
|
||||
)
|
||||
|
||||
return path.abspath(path.join(
|
||||
self.config["build"],
|
||||
params.label,
|
||||
"direction-[{},{},{}]".format(*[ str(d) for d in params.direction ]),
|
||||
"theta-{}".format(params.theta)
|
||||
))
|
||||
direction = "direction-[{},{},{}]".format(*[ str(d) for d in params.direction ])
|
||||
theta = "theta-{}".format(params.theta)
|
||||
dirpath = path.join(self.config["build"], params.label, direction, theta)
|
||||
|
||||
return path.abspath(dirpath)
|
||||
|
||||
def computeShape(self):
|
||||
if not self.type == "worker":
|
||||
return
|
||||
self.database = Database(self.config["database"])
|
||||
self.database.setup()
|
||||
with self.database.database:
|
||||
params = Shape.get(
|
||||
Shape.exec_id == self.exec_id,
|
||||
Shape.shape_id == self.m_shape.shape_id
|
||||
|
||||
with self.database:
|
||||
params = T.Shape.get(
|
||||
T.Shape.exec_id == self.exec_id,
|
||||
T.Shape.shape_id == self.t_shape.shape_id
|
||||
)
|
||||
|
||||
logger.info("Computing shape for {} with direction = {} and theta = {}".format(params.label, params.direction, params.theta))
|
||||
filename = "shape.step"
|
||||
|
||||
logger.info([params.label, params.direction, params.theta])
|
||||
self.shape = {
|
||||
"simple": Simple,
|
||||
"bodyCentered": BodyCentered,
|
||||
"faceCentered": FaceCentered
|
||||
}[params.label](params.direction)
|
||||
}[params.label]
|
||||
|
||||
self.shape(params.direction)
|
||||
self.shape.build()
|
||||
|
||||
os.makedirs(self.casepath(), exist_ok = True)
|
||||
self.shape.export(path.join(self.casepath(), filename))
|
||||
|
||||
with self.database.database:
|
||||
with self.database:
|
||||
params.shapeStatus = "Done"
|
||||
params.save()
|
||||
|
||||
def computeMesh(self):
|
||||
params = self.config.cases[0]
|
||||
if not self.type == "worker":
|
||||
return
|
||||
|
||||
with self.database:
|
||||
params = (T.Mesh.select(T.Shape, T.Mesh)
|
||||
.join(
|
||||
T.Mesh,
|
||||
JOIN.INNER,
|
||||
on = (T.Mesh.shape_id == T.Shape.shape_id)
|
||||
).where(
|
||||
T.Shape.exec_id == self.exec_id,
|
||||
T.Shape.shape_id == self.t_shape.shape_id
|
||||
))
|
||||
|
||||
logger.info("Computing mesh for {} with direction = {} and theta = {}".format(params.label, params.direction, params.theta))
|
||||
filename = "mesh.mesh"
|
||||
|
||||
self.mesh = Mesh(self.shape.shape)
|
||||
@ -185,6 +209,8 @@ class UltimateRunner(object):
|
||||
|
||||
|
||||
def pipeline(self, stage: str = None):
|
||||
self.prepareDatabase()
|
||||
|
||||
stage = stage or self.config["stage"]
|
||||
|
||||
if stage in ["shape", "all"]:
|
||||
|
@ -19,27 +19,29 @@ class CustomFormatter(logging.Formatter):
|
||||
red = "\x1b[31;21m"
|
||||
bold_red = "\x1b[31;1m"
|
||||
reset = "\x1b[0m"
|
||||
format = "[ %(asctime)s ] [ %(processName)s ] [ %(levelname)s ] %(message)s"
|
||||
|
||||
|
||||
info = "[%(levelname)s %(processName)s %(asctime)s %(funcName)s]" # [ %(processName)s ]
|
||||
msg = " %(message)s"
|
||||
|
||||
formats = {
|
||||
logging.DEBUG: grey + format + reset,
|
||||
logging.INFO: grey + format + reset,
|
||||
logging.WARNING: yellow + format + reset,
|
||||
logging.ERROR: red + format + reset,
|
||||
logging.CRITICAL: bold_red + format + reset
|
||||
logging.DEBUG: grey + info + reset + msg,
|
||||
logging.INFO: grey + info + reset + msg,
|
||||
logging.WARNING: yellow + info + reset + msg,
|
||||
logging.ERROR: red + info + reset + msg,
|
||||
logging.CRITICAL: bold_red + info + reset + msg
|
||||
}
|
||||
|
||||
return formats.get(level)
|
||||
|
||||
def format(self, record):
|
||||
log_fmt = self._getFormat(record.levelno)
|
||||
time_fmt = "%H:%M:%S %d-%m-%y"
|
||||
time_fmt = "%d-%m-%y %H:%M:%S"
|
||||
formatter = logging.Formatter(log_fmt, time_fmt)
|
||||
|
||||
return formatter.format(record)
|
||||
|
||||
|
||||
def setupLogger(logger, level: int, filepath: str = None):
|
||||
def setupLogger(level: int, filepath: str = None):
|
||||
"""Applies settings to logger
|
||||
|
||||
:param logger:
|
||||
@ -51,14 +53,22 @@ def setupLogger(logger, level: int, filepath: str = None):
|
||||
:param filepath:
|
||||
Path to directory
|
||||
"""
|
||||
logger.handlers = []
|
||||
logger.setLevel(level)
|
||||
#logger.handlers = []
|
||||
#logger.setLevel(level)
|
||||
|
||||
logging.addLevelName(logging.INFO, "II")
|
||||
logging.addLevelName(logging.WARNING, "WW")
|
||||
logging.addLevelName(logging.ERROR, "EE")
|
||||
logging.addLevelName(logging.CRITICAL, "CC")
|
||||
|
||||
streamhandler = logging.StreamHandler()
|
||||
streamhandler.setLevel(level)
|
||||
streamhandler.setFormatter(CustomFormatter())
|
||||
logger.addHandler(streamhandler)
|
||||
|
||||
#logger.addHandler(streamhandler)
|
||||
|
||||
logging.root.setLevel(level)
|
||||
logging.root.addHandler(streamhandler)
|
||||
|
||||
if filepath:
|
||||
if not os.path.exists(filepath):
|
||||
os.makedirs(filepath, exist_ok = True)
|
||||
@ -68,7 +78,9 @@ def setupLogger(logger, level: int, filepath: str = None):
|
||||
)
|
||||
filehandler.setLevel(level)
|
||||
filehandler.setFormatter(CustomFormatter())
|
||||
logger.addHandler(filehandler)
|
||||
#logger.addHandler(filehandler)
|
||||
|
||||
logging.root.addHandler(filehandler)
|
||||
|
||||
|
||||
class struct:
|
||||
@ -313,7 +325,8 @@ class ParallelRunner(object):
|
||||
for n in range(self.nprocs):
|
||||
self.processes.append(Process(
|
||||
target = self.queueRelease,
|
||||
args = (self.queueInput, self.queueOutput)
|
||||
args = (self.queueInput, self.queueOutput),
|
||||
name = f"PP-{ n + 1 }"
|
||||
))
|
||||
|
||||
for proc in self.processes:
|
||||
|
@ -4,4 +4,9 @@
|
||||
from .models import __database__, __models__
|
||||
|
||||
database = __database__
|
||||
tables = __models__
|
||||
|
||||
class tables:
|
||||
pass
|
||||
|
||||
for model in __models__:
|
||||
setattr(tables, model.__name__, model)
|
@ -34,7 +34,6 @@ class Database(SqliteDatabase):
|
||||
#autoconnect = self.autoconnect_
|
||||
)
|
||||
|
||||
print(self.tables)
|
||||
self.connect()
|
||||
self.create_tables(self.tables)
|
||||
self.close()
|
@ -10,8 +10,8 @@ from peewee import (
|
||||
IntegerField, BooleanField,
|
||||
TimeField, DateTimeField
|
||||
)
|
||||
from anisotropy.database.utils import JSONField
|
||||
from .database import Database
|
||||
from .utils import JSONField
|
||||
from .db import Database
|
||||
|
||||
|
||||
__database__ = Database()
|
||||
|
@ -9,7 +9,6 @@ from numpy import pi, sqrt
|
||||
from .occExtended import *
|
||||
from . import Periodic
|
||||
|
||||
|
||||
class Simple(Periodic):
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -1,4 +1,4 @@
|
||||
import os
|
||||
import os, shutil
|
||||
import unittest
|
||||
|
||||
unittest.TestLoader.sortTestMethodsUsing = None
|
||||
@ -28,7 +28,7 @@ class TestDatabase(unittest.TestCase):
|
||||
self.assertTrue(table.table_exists())
|
||||
|
||||
def tearDown(self):
|
||||
os.removedirs(os.outputPath)
|
||||
shutil.rmtree(self.outputPath)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
Loading…
Reference in New Issue
Block a user