Mod: working mesh
This commit is contained in:
parent
4efb13954c
commit
6abbce9eb7
@ -10,4 +10,24 @@ __version__ = "1.1.0"
|
||||
__author__ = __maintainer = "George Kusayko"
|
||||
__email__ = "gkusayko@gmail.com"
|
||||
|
||||
#from anisotropy.core.main import main
|
||||
###
|
||||
# Environment
|
||||
##
|
||||
import os
|
||||
|
||||
env = dict(
|
||||
ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
)
|
||||
env.update(
|
||||
BUILD = os.path.join(env["ROOT"], "build"),
|
||||
LOG = os.path.join(env["ROOT"], "logs"),
|
||||
CONFIG = os.path.join(env["ROOT"], "anisotropy/config/default.toml")
|
||||
)
|
||||
env.update(
|
||||
logger_name = "anisotropy",
|
||||
db_path = env["BUILD"],
|
||||
salome_port = 2810,
|
||||
openfoam_template = os.path.join(env["ROOT"], "anisotropy/openfoam/template")
|
||||
)
|
||||
|
||||
del os
|
||||
|
@ -25,44 +25,51 @@ def compute(stage, params):
|
||||
from anisotropy.core.main import Anisotropy
|
||||
|
||||
model = Anisotropy()
|
||||
model.setupDB()
|
||||
model.db.setup()
|
||||
|
||||
if model.isEmptyDB():
|
||||
if model.db.isempty():
|
||||
paramsAll = model.loadFromScratch()
|
||||
|
||||
for entry in paramsAll:
|
||||
model.updateDB(entry)
|
||||
model.db.update(entry)
|
||||
|
||||
model.loadDB(type, direction, theta)
|
||||
(type, direction, theta) = ("simple", [1.0, 0.0, 0.0], 0.01)
|
||||
|
||||
model.load(type, direction, theta)
|
||||
# TODO: merge cli params with db params here
|
||||
model.evalParams()
|
||||
model.updateDB()
|
||||
model.update()
|
||||
|
||||
# TODO: do smth with output
|
||||
if stage == "all" or stage == "mesh":
|
||||
((out, err, code), elapsed) = model.computeMesh(type, direction, theta)
|
||||
|
||||
model.load(type, direction, theta)
|
||||
model.params["meshresult"]["calculationTime"] = elapsed
|
||||
model.update()
|
||||
|
||||
if stage == "all" or stage == "flow":
|
||||
((out, err, code), elapsed) = model.computeFlow(type, direction, theta)
|
||||
|
||||
|
||||
@anisotropy.command()
|
||||
@click.argument("root")
|
||||
@click.argument("name")
|
||||
@click.argument("type")
|
||||
@click.argument("direction")
|
||||
@click.argument("theta", type = click.FLOAT)
|
||||
def _compute_mesh(root, name, direction, theta):
|
||||
@click.argument("theta")
|
||||
def computemesh(root, type, direction, theta):
|
||||
# [Salome Environment]
|
||||
|
||||
###
|
||||
# Args
|
||||
##
|
||||
direction = list(map(lambda num: float(num), direction[1:-1].split(",")))
|
||||
direction = [ float(num) for num in direction[1:-1].split(" ") if num ]
|
||||
theta = float(theta)
|
||||
|
||||
###
|
||||
# Modules
|
||||
##
|
||||
import salome
|
||||
import os, sys
|
||||
|
||||
sys.path.extend([
|
||||
root,
|
||||
@ -73,8 +80,7 @@ def _compute_mesh(root, name, direction, theta):
|
||||
|
||||
###
|
||||
model = Anisotropy()
|
||||
model.setupDB()
|
||||
model.loadDB(type, direction, theta)
|
||||
model.load(type, direction, theta)
|
||||
|
||||
model.genmesh()
|
||||
|
||||
@ -82,4 +88,5 @@ def _compute_mesh(root, name, direction, theta):
|
||||
###
|
||||
# CLI entry
|
||||
##
|
||||
#anisotropy()
|
||||
if __name__ == "__main__":
|
||||
anisotropy()
|
||||
|
202
anisotropy/core/database.py
Normal file
202
anisotropy/core/database.py
Normal file
@ -0,0 +1,202 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# This file is part of anisotropy.
|
||||
# License: GNU GPL version 3, see the file "LICENSE" for details.
|
||||
|
||||
import os
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
|
||||
from anisotropy import env
|
||||
from anisotropy.core.utils import setupLogger
|
||||
from anisotropy.core.models import db, JOIN, Structure, Mesh, SubMesh, MeshResult
|
||||
|
||||
logger = logging.getLogger(env["logger_name"])
|
||||
setupLogger(logger, logging.INFO, env["LOG"])
|
||||
|
||||
|
||||
class Database(object):
|
||||
def __init__(self, name: str, filepath: str):
|
||||
self.name = name
|
||||
self.filepath = filepath
|
||||
self.__db = db
|
||||
|
||||
|
||||
def setup(self):
|
||||
os.makedirs(self.filepath, exist_ok = True)
|
||||
|
||||
fullpath = os.path.join(self.filepath, "{}.db".format(self.name))
|
||||
self.__db.init(fullpath)
|
||||
|
||||
if not os.path.exists(fullpath):
|
||||
self.__db.create_tables([
|
||||
Structure,
|
||||
Mesh,
|
||||
SubMesh,
|
||||
MeshResult
|
||||
])
|
||||
|
||||
|
||||
def isempty(self) -> bool:
|
||||
query = Structure.select()
|
||||
|
||||
return not query.exists()
|
||||
|
||||
|
||||
def load(self, structure_type: str, structure_direction: list, structure_theta: float) -> dict:
|
||||
structureQuery = (
|
||||
Structure
|
||||
.select()
|
||||
.where(
|
||||
Structure.type == structure_type,
|
||||
Structure.direction == str(structure_direction),
|
||||
Structure.theta == structure_theta
|
||||
)
|
||||
)
|
||||
|
||||
params = {}
|
||||
|
||||
with self.__db.atomic():
|
||||
if structureQuery.exists():
|
||||
params["structure"] = structureQuery.dicts().get()
|
||||
|
||||
meshQuery = structureQuery.get().meshes
|
||||
|
||||
if meshQuery.exists():
|
||||
params["mesh"] = meshQuery.dicts().get()
|
||||
|
||||
submeshQuery = meshQuery.get().submeshes
|
||||
|
||||
if submeshQuery.exists():
|
||||
params["submesh"] = [ entry for entry in submeshQuery.dicts() ]
|
||||
|
||||
meshresultQuery = meshQuery.get().meshresults
|
||||
|
||||
if meshresultQuery.exists():
|
||||
params["meshresult"] = meshresultQuery.dicts().get()
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def update(self, params: dict):
|
||||
if not params:
|
||||
logger.error("Trying to update db from empty parameters")
|
||||
return
|
||||
|
||||
query = (
|
||||
Structure
|
||||
.select(Structure, Mesh)
|
||||
.join(
|
||||
Mesh,
|
||||
JOIN.INNER,
|
||||
on = (Mesh.structure_id == Structure.structure_id)
|
||||
)
|
||||
.where(
|
||||
Structure.type == params["structure"]["type"],
|
||||
Structure.direction == str(params["structure"]["direction"]),
|
||||
Structure.theta == params["structure"]["theta"]
|
||||
)
|
||||
)
|
||||
|
||||
structureID = self._updateStructure(params["structure"], query)
|
||||
|
||||
meshID = self._updateMesh(params["mesh"], query, structureID)
|
||||
|
||||
for submeshParams in params.get("submesh", []):
|
||||
self._updateSubMesh(submeshParams, query, meshID)
|
||||
|
||||
self._updateMeshResult(params.get("meshresult", {}), query, meshID)
|
||||
|
||||
|
||||
def _updateStructure(self, src: dict, queryMain) -> int:
|
||||
raw = deepcopy(src)
|
||||
|
||||
with self.__db.atomic():
|
||||
if not queryMain.exists():
|
||||
tabID = Structure.create(**raw)
|
||||
|
||||
else:
|
||||
req = queryMain.dicts().get()
|
||||
tabID = req["structure_id"]
|
||||
|
||||
query = (
|
||||
Structure.update(**raw)
|
||||
.where(
|
||||
Structure.type == req["type"],
|
||||
Structure.direction == str(req["direction"]),
|
||||
Structure.theta == req["theta"]
|
||||
)
|
||||
)
|
||||
query.execute()
|
||||
|
||||
return tabID
|
||||
|
||||
def _updateMesh(self, src: dict, queryMain, structureID) -> int:
|
||||
raw = deepcopy(src)
|
||||
|
||||
with self.__db.atomic():
|
||||
if not queryMain.exists():
|
||||
tabID = Mesh.create(
|
||||
structure_id = structureID,
|
||||
**raw
|
||||
)
|
||||
|
||||
else:
|
||||
req = queryMain.dicts().get()
|
||||
tabID = req["mesh_id"]
|
||||
|
||||
query = (
|
||||
Mesh.update(**raw)
|
||||
.where(
|
||||
Mesh.structure_id == structureID
|
||||
)
|
||||
)
|
||||
query.execute()
|
||||
|
||||
return tabID
|
||||
|
||||
def _updateSubMesh(self, src: dict, queryMain, meshID):
|
||||
if not src:
|
||||
return
|
||||
|
||||
raw = deepcopy(src)
|
||||
|
||||
with self.__db.atomic():
|
||||
if not SubMesh.select().where(SubMesh.mesh_id == meshID).exists():
|
||||
tabID = SubMesh.create(
|
||||
mesh_id = meshID,
|
||||
**raw
|
||||
)
|
||||
logger.debug(f"[ DB ] Created SubMesh entry { tabID }")
|
||||
|
||||
else:
|
||||
query = (
|
||||
SubMesh.update(**raw)
|
||||
.where(
|
||||
SubMesh.mesh_id == meshID,
|
||||
SubMesh.name == src["name"]
|
||||
)
|
||||
)
|
||||
query.execute()
|
||||
|
||||
def _updateMeshResult(self, src: dict, queryMain, meshID):
|
||||
if not src:
|
||||
return
|
||||
|
||||
raw = deepcopy(src)
|
||||
|
||||
with self.__db.atomic():
|
||||
if not MeshResult.select().where(MeshResult.mesh_id == meshID).exists():
|
||||
tabID = MeshResult.create(
|
||||
mesh_id = meshID,
|
||||
**raw
|
||||
)
|
||||
logger.debug(f"[ DB ] Created MeshResult entry { tabID }")
|
||||
|
||||
else:
|
||||
query = (
|
||||
MeshResult.update(**raw)
|
||||
.where(
|
||||
MeshResult.mesh_id == meshID
|
||||
)
|
||||
)
|
||||
query.execute()
|
@ -1,65 +1,35 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# This file is part of anisotropy.
|
||||
# License: GNU GPL version 3, see the file "LICENSE" for details.
|
||||
|
||||
import os, sys
|
||||
import time
|
||||
from datetime import timedelta, datetime
|
||||
import shutil
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
from math import sqrt
|
||||
|
||||
import toml
|
||||
from copy import deepcopy
|
||||
|
||||
from anisotropy.core.models import db, Structure, Mesh, SubMesh, MeshResult
|
||||
from anisotropy.core.utils import struct, deepupdate
|
||||
|
||||
|
||||
###
|
||||
# Environment variables and config
|
||||
##
|
||||
env = { "ROOT": os.path.abspath(".") }
|
||||
env.update(
|
||||
BUILD = os.path.join(env["ROOT"], "build"),
|
||||
LOG = os.path.join(env["ROOT"], "logs"),
|
||||
DEFAULT_CONFIG = os.path.join(env["ROOT"], "anisotropy/config/default.toml"),
|
||||
CONFIG = os.path.join(env["ROOT"], "conf/config.toml")
|
||||
from anisotropy import (
|
||||
__version__, env,
|
||||
openfoam
|
||||
)
|
||||
env["db_path"] = env["BUILD"]
|
||||
env["salome_port"] = 2810
|
||||
from anisotropy.core.utils import setupLogger, timer
|
||||
from anisotropy.core.database import Database
|
||||
from anisotropy import salomepl
|
||||
import anisotropy.salomepl.utils
|
||||
import anisotropy.salomepl.geometry
|
||||
import anisotropy.salomepl.mesh
|
||||
from anisotropy.samples import Simple, FaceCentered, BodyCentered
|
||||
|
||||
#if os.path.exists(env["CONFIG"]):
|
||||
# config = toml.load(env["CONFIG"])
|
||||
|
||||
# for restricted in ["ROOT", "BUILD", "LOG", "CONFIG"]:
|
||||
# if config.get(restricted):
|
||||
# config.pop(restricted)
|
||||
|
||||
# TODO: not working if custom config empty and etc
|
||||
# for m, structure in enumerate(config["structures"]):
|
||||
# for n, estructure in enumerate(env["structures"]):
|
||||
# if estructure["name"] == structure["name"]:
|
||||
# deepupdate(env["structures"][n], config["structures"][m])
|
||||
|
||||
# config.pop("structures")
|
||||
# deepupdate(env, config)
|
||||
|
||||
|
||||
###
|
||||
# Logger
|
||||
##
|
||||
from anisotropy.core.utils import setupLogger
|
||||
logger_env = env.get("logger", {})
|
||||
|
||||
logger = logging.getLogger(logger_env.get("name", "anisotropy"))
|
||||
setupLogger(logger, logging.INFO)
|
||||
logger = logging.getLogger(env["logger_name"])
|
||||
setupLogger(logger, logging.INFO, env["LOG"])
|
||||
|
||||
peeweeLogger = logging.getLogger("peewee")
|
||||
peeweeLogger.setLevel(logging.INFO)
|
||||
|
||||
from anisotropy.core.utils import timer
|
||||
from anisotropy import __version__
|
||||
from anisotropy import salomepl
|
||||
from anisotropy import openfoam
|
||||
from anisotropy.samples import Simple, FaceCentered, BodyCentered
|
||||
from math import sqrt
|
||||
from peewee import JOIN
|
||||
|
||||
class Anisotropy(object):
|
||||
"""Ultimate class that organize whole working process"""
|
||||
@ -68,10 +38,19 @@ class Anisotropy(object):
|
||||
"""Constructor method"""
|
||||
|
||||
self.env = env
|
||||
self.db = None
|
||||
self.db = Database("anisotropy", env["db_path"])
|
||||
self.params = []
|
||||
|
||||
|
||||
def load(self, structure_type: str, structure_direction: list, structure_theta: float):
|
||||
self.db.setup()
|
||||
self.params = self.db.load(structure_type, structure_direction, structure_theta)
|
||||
|
||||
def update(self, params: dict = None):
|
||||
self.db.setup()
|
||||
self.db.update(self.params if not params else params)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def version():
|
||||
"""Returns versions of all used main programs
|
||||
@ -103,11 +82,11 @@ class Anisotropy(object):
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
if not os.path.exists(self.env["DEFAULT_CONFIG"]):
|
||||
if not os.path.exists(self.env["CONFIG"]):
|
||||
logger.error("Missed default configuration file")
|
||||
return
|
||||
|
||||
buf = toml.load(self.env["DEFAULT_CONFIG"]).get("structures")
|
||||
buf = toml.load(self.env["CONFIG"]).get("structures")
|
||||
paramsAll = []
|
||||
|
||||
# TODO: custom config and merge
|
||||
@ -224,237 +203,22 @@ class Anisotropy(object):
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def getParams(self, structure: str, direction: list, theta: float):
|
||||
for entry in self.params:
|
||||
if entry["name"] == structure and \
|
||||
entry["geometry"]["direction"] == direction and \
|
||||
entry["geometry"]["theta"] == theta:
|
||||
return entry
|
||||
|
||||
|
||||
def setupDB(self):
|
||||
os.makedirs(self.env["db_path"], exist_ok = True)
|
||||
|
||||
dbname = os.path.join(self.env["db_path"], "anisotropy.db")
|
||||
self.db = db
|
||||
self.db.init(dbname)
|
||||
|
||||
if not os.path.exists(dbname):
|
||||
self.db.create_tables([
|
||||
Structure,
|
||||
Mesh,
|
||||
SubMesh,
|
||||
MeshResult
|
||||
])
|
||||
|
||||
|
||||
def _updateStructure(self, src: dict, queryMain) -> int:
|
||||
raw = deepcopy(src)
|
||||
|
||||
with self.db.atomic():
|
||||
if not queryMain.exists():
|
||||
tabID = Structure.create(**raw)
|
||||
|
||||
else:
|
||||
req = queryMain.dicts().get()
|
||||
tabID = req["structure_id"]
|
||||
|
||||
query = (
|
||||
Structure.update(**raw)
|
||||
.where(
|
||||
Structure.type == req["type"],
|
||||
Structure.direction == str(req["direction"]),
|
||||
Structure.theta == req["theta"]
|
||||
)
|
||||
)
|
||||
query.execute()
|
||||
|
||||
return tabID
|
||||
|
||||
def _updateMesh(self, src: dict, queryMain, structureID) -> int:
|
||||
raw = deepcopy(src)
|
||||
|
||||
with self.db.atomic():
|
||||
if not queryMain.exists():
|
||||
tabID = Mesh.create(
|
||||
structure_id = structureID,
|
||||
**raw
|
||||
)
|
||||
|
||||
else:
|
||||
req = queryMain.dicts().get()
|
||||
tabID = req["mesh_id"]
|
||||
|
||||
query = (
|
||||
Mesh.update(**raw)
|
||||
.where(
|
||||
Mesh.structure_id == structureID #req["structure_id"]
|
||||
)
|
||||
)
|
||||
query.execute()
|
||||
|
||||
return tabID
|
||||
|
||||
def _updateSubMesh(self, src: dict, queryMain, meshID) -> None:
|
||||
if not src:
|
||||
return
|
||||
|
||||
raw = deepcopy(src)
|
||||
|
||||
with self.db.atomic():
|
||||
if not SubMesh.select().where(SubMesh.mesh_id == meshID).exists():
|
||||
tabID = SubMesh.create(
|
||||
mesh_id = meshID,
|
||||
**raw
|
||||
)
|
||||
logger.debug(f"[ DB ] Created SubMesh entry { tabID }")
|
||||
|
||||
else:
|
||||
#req = queryMain.dicts().get()
|
||||
#tabID = req["mesh_id"]
|
||||
|
||||
query = (
|
||||
SubMesh.update(**raw)
|
||||
.where(
|
||||
SubMesh.mesh_id == meshID, #req["mesh_id"],
|
||||
SubMesh.name == src["name"]
|
||||
)
|
||||
)
|
||||
query.execute()
|
||||
|
||||
def _updateMeshResult(self, src: dict, queryMain, meshID) -> None:
|
||||
if not src:
|
||||
return
|
||||
|
||||
raw = deepcopy(src)
|
||||
|
||||
with self.db.atomic():
|
||||
if not MeshResult.select().where(MeshResult.mesh_id == meshID).exists():
|
||||
tabID = MeshResult.create(
|
||||
mesh_id = meshID,
|
||||
**raw
|
||||
)
|
||||
logger.debug(f"[ DB ] Created MeshResult entry { tabID }")
|
||||
|
||||
else:
|
||||
#req = queryMain.dicts().get()
|
||||
#tabID = req["mesh_id"]
|
||||
|
||||
query = (
|
||||
MeshResult.update(**raw)
|
||||
.where(
|
||||
MeshResult.mesh_id == meshID #req["mesh_id"]
|
||||
)
|
||||
)
|
||||
query.execute()
|
||||
|
||||
@timer
|
||||
def updateDB(self, src: dict = None):
|
||||
if src:
|
||||
params = src
|
||||
|
||||
elif self.params:
|
||||
params = self.params
|
||||
|
||||
else:
|
||||
logger.error("Trying to update db from empty parameters")
|
||||
return
|
||||
|
||||
query = (
|
||||
Structure
|
||||
.select(Structure, Mesh)
|
||||
.join(
|
||||
Mesh,
|
||||
JOIN.INNER,
|
||||
on = (Mesh.structure_id == Structure.structure_id)
|
||||
)
|
||||
.where(
|
||||
Structure.type == params["structure"]["type"],
|
||||
Structure.direction == str(params["structure"]["direction"]),
|
||||
Structure.theta == params["structure"]["theta"]
|
||||
)
|
||||
)
|
||||
|
||||
structureID = self._updateStructure(params["structure"], query)
|
||||
|
||||
meshID = self._updateMesh(params["mesh"], query, structureID)
|
||||
|
||||
for submeshParams in params.get("submesh", []):
|
||||
self._updateSubMesh(submeshParams, query, meshID)
|
||||
|
||||
self._updateMeshResult(params.get("meshresults", {}), query, meshID)
|
||||
|
||||
|
||||
def loadDB(self, structure_type: str, structure_direction: list, structure_theta: float):
|
||||
structureQuery = (
|
||||
Structure
|
||||
.select()
|
||||
.where(
|
||||
Structure.type == structure_type,
|
||||
Structure.direction == str(structure_direction),
|
||||
Structure.theta == structure_theta
|
||||
)
|
||||
)
|
||||
|
||||
self.params = {}
|
||||
|
||||
with self.db.atomic():
|
||||
if structureQuery.exists():
|
||||
self.params["structure"] = structureQuery.dicts().get()
|
||||
|
||||
meshQuery = structureQuery.get().meshes
|
||||
|
||||
if meshQuery.exists():
|
||||
self.params["mesh"] = meshQuery.dicts().get()
|
||||
|
||||
submeshQuery = meshQuery.get().submeshes
|
||||
|
||||
if submeshQuery.exists():
|
||||
self.params["submesh"] = [ entry for entry in submeshQuery.dicts() ]
|
||||
|
||||
meshresultQuery = meshQuery.get().meshresults
|
||||
|
||||
if meshresultQuery.exists():
|
||||
self.params["meshresult"] = meshresultQuery.dicts().get()
|
||||
|
||||
|
||||
|
||||
# TODO: loadDB (one model), loadsDB (all models)
|
||||
@timer
|
||||
def updateFromDB(self):
|
||||
squery = Structure.select().order_by(Structure.structure_id)
|
||||
mquery = Mesh.select().order_by(Mesh.structure_id)
|
||||
smquery = SubMesh.select()
|
||||
mrquery = MeshResult.select().order_dy(MeshResult.mesh_id)
|
||||
self.params = []
|
||||
|
||||
for s, m, mr in zip(squery.dicts(), mquery.dicts(), mrquery.dicts()):
|
||||
name = s.pop("name")
|
||||
path = s.pop("path")
|
||||
|
||||
self.params.append(dict(
|
||||
name = name,
|
||||
path = path,
|
||||
geometry = s,
|
||||
mesh = m,
|
||||
submesh = [ d for d in smquery.dicts() if d["mesh_id"] == m["mesh_id"] ],
|
||||
meshresults = mr
|
||||
))
|
||||
|
||||
self.params = sorted(self.params, key = lambda entry: f"{ entry['name'] } { entry['geometry']['direction'] } { entry['geometry']['theta'] }")
|
||||
|
||||
@timer
|
||||
def computeMesh(self, type, direction, theta):
|
||||
scriptpath = os.path.join(self.env["ROOT"], "anisotropy/__main__.py")
|
||||
scriptpath = os.path.join(self.env["ROOT"], "anisotropy/core/cli.py")
|
||||
port = 2900
|
||||
|
||||
return salomepl.utils.runSalome(port, scriptpath, self.env["ROOT"], "_compute_mesh", type, direction, theta)
|
||||
return salomepl.utils.runSalome(
|
||||
self.env["salome_port"],
|
||||
scriptpath,
|
||||
self.env["ROOT"],
|
||||
"computemesh", type, direction, theta,
|
||||
logpath = os.path.join(self.env["LOG"], "salome.log")
|
||||
)
|
||||
|
||||
def genmesh(self):
|
||||
# ISSUE: double logger output
|
||||
|
||||
import salome
|
||||
|
||||
p = self.params
|
||||
@ -530,24 +294,24 @@ class Anisotropy(object):
|
||||
mesh.Triangle(subshape, **submesh)
|
||||
|
||||
|
||||
model.updateDB()
|
||||
self.update()
|
||||
returncode, errors = mesh.compute()
|
||||
|
||||
if not returncode:
|
||||
mesh.removePyramids()
|
||||
mesh.assignGroups()
|
||||
|
||||
casePath = model.getCasePath()
|
||||
casePath = self.getCasePath()
|
||||
os.makedirs(casePath, exist_ok = True)
|
||||
mesh.exportUNV(os.path.join(casePath, "mesh.unv"))
|
||||
|
||||
meshStats = mesh.stats()
|
||||
p["meshresults"] = dict(
|
||||
p["meshresult"] = dict(
|
||||
surfaceArea = surfaceArea,
|
||||
volume = volume,
|
||||
**meshStats
|
||||
)
|
||||
model.updateDB()
|
||||
self.update()
|
||||
|
||||
logger.info("mesh stats:\n{}".format(
|
||||
"\n".join(map(lambda v: f"{ v[0] }:\t{ v[1] }", meshStats.items()))
|
||||
@ -556,11 +320,11 @@ class Anisotropy(object):
|
||||
else:
|
||||
logger.error(errors)
|
||||
|
||||
p["meshresults"] = dict(
|
||||
p["meshresult"] = dict(
|
||||
surfaceArea = surfaceArea,
|
||||
volume = volume
|
||||
)
|
||||
model.updateDB()
|
||||
self.update()
|
||||
|
||||
salome.salome_close()
|
||||
|
||||
@ -571,13 +335,15 @@ class Anisotropy(object):
|
||||
##
|
||||
foamCase = [ "0", "constant", "system" ]
|
||||
|
||||
flow = self.params["flow"]
|
||||
|
||||
# ISSUE: ideasUnvToFoam cannot import mesh with '-case' flag so 'os.chdir' for that
|
||||
os.chdir(self.getCasePath())
|
||||
openfoam.foamClean()
|
||||
|
||||
for d in foamCase:
|
||||
shutil.copytree(
|
||||
os.path.join(ROOT, "anisotropy/openfoam/template", d),
|
||||
os.path.join(self.env["openfoam_template"], d),
|
||||
os.path.join(case, d)
|
||||
)
|
||||
|
||||
@ -613,7 +379,7 @@ class Anisotropy(object):
|
||||
if out:
|
||||
logger.info(out)
|
||||
# TODO: replace all task variables
|
||||
openfoam.transformPoints(task.flow.scale)
|
||||
openfoam.transformPoints(flow["scale"])
|
||||
|
||||
###
|
||||
# Decomposition and initial approximation
|
||||
@ -621,15 +387,15 @@ class Anisotropy(object):
|
||||
openfoam.foamDictionary(
|
||||
"constant/transportProperties",
|
||||
"nu",
|
||||
str(task.flow.constant.nu)
|
||||
str(flow["constant"]["nu"])
|
||||
)
|
||||
|
||||
openfoam.decomposePar()
|
||||
|
||||
openfoam.renumberMesh()
|
||||
|
||||
pressureBF = task.flow.approx.pressure.boundaryField
|
||||
velocityBF = task.flow.approx.velocity.boundaryField
|
||||
pressureBF = flow["approx"]["pressure"]["boundaryField"]
|
||||
velocityBF = flow["approx"]["velocity"]["boundaryField"]
|
||||
direction = {
|
||||
"[1, 0, 0]": 0,
|
||||
"[0, 0, 1]": 1,
|
||||
@ -639,14 +405,14 @@ class Anisotropy(object):
|
||||
openfoam.foamDictionary(
|
||||
"0/p",
|
||||
"boundaryField.inlet.value",
|
||||
openfoam.uniform(pressureBF.inlet.value)
|
||||
openfoam.uniform(pressureBF["inlet"]["value"])
|
||||
)
|
||||
openfoam.foamDictionary(
|
||||
"0/p",
|
||||
"boundaryField.outlet.value",
|
||||
openfoam.uniform(pressureBF.outlet.value)
|
||||
openfoam.uniform(pressureBF["outlet"]["value"])
|
||||
)
|
||||
|
||||
# TODO: flow variable
|
||||
openfoam.foamDictionary(
|
||||
"0/U",
|
||||
"boundaryField.inlet.value",
|
||||
@ -710,325 +476,3 @@ class Anisotropy(object):
|
||||
pass
|
||||
|
||||
|
||||
###################################################################################
|
||||
|
||||
|
||||
###
|
||||
# Main
|
||||
##
|
||||
def main():
|
||||
if checkEnv():
|
||||
return
|
||||
|
||||
logger.info(f"args:\n\tconfig:\t{ configPath }\n\tmode:\t{ mode }")
|
||||
|
||||
queue = createQueue()
|
||||
|
||||
for n, case in enumerate(queue):
|
||||
date = datetime.now()
|
||||
logger.info("-" * 80)
|
||||
logger.info(f"""main:
|
||||
task:\t{ n + 1 } / { len(queue) }
|
||||
cpu count:\t{ os.cpu_count() }
|
||||
case:\t{ case }
|
||||
date:\t{ date.date() }
|
||||
time:\t{ date.time() }""")
|
||||
|
||||
###
|
||||
# Compute mesh
|
||||
##
|
||||
taskPath = os.path.join(case, "task.toml")
|
||||
|
||||
task = struct(toml.load(taskPath))
|
||||
|
||||
if not task.status.mesh or mode == "all":
|
||||
computeMesh(case)
|
||||
|
||||
else:
|
||||
logger.info("computeMesh: mesh already computed")
|
||||
|
||||
task = struct(toml.load(taskPath))
|
||||
|
||||
if not task.status.mesh:
|
||||
logger.critical("mesh not computed: skip flow computation")
|
||||
continue
|
||||
|
||||
###
|
||||
# Compute flow
|
||||
##
|
||||
|
||||
if not task.status.flow or mode == "all":
|
||||
computeFlow(case)
|
||||
|
||||
else:
|
||||
logger.info("computeFlow: flow already computed")
|
||||
|
||||
|
||||
def createQueue():
|
||||
queue = []
|
||||
|
||||
###
|
||||
# Special values
|
||||
##
|
||||
parameters_theta = {}
|
||||
mesh_thickness = {}
|
||||
|
||||
for structure in config.base.__dict__.keys():
|
||||
|
||||
theta = getattr(config, structure).geometry.theta
|
||||
parameters_theta[structure] = [ n * theta[2] for n in range(int(theta[0] / theta[2]), int(theta[1] / theta[2]) + 1) ]
|
||||
|
||||
thickness = getattr(config, structure).mesh.thickness
|
||||
count = len(parameters_theta[structure])
|
||||
mesh_thickness[structure] = [ thickness[0] + n * (thickness[1] - thickness[0]) / (count - 1) for n in range(0, count) ]
|
||||
|
||||
|
||||
###
|
||||
# structure type > flow direction > coefficient theta
|
||||
##
|
||||
for structure in config.base.__dict__.keys():
|
||||
if getattr(config.base, structure):
|
||||
for direction in getattr(config, structure).geometry.directions:
|
||||
for n, theta in enumerate(parameters_theta[structure]):
|
||||
# create dirs for case path
|
||||
case = os.path.join(
|
||||
f"{ BUILD }",
|
||||
f"{ structure }",
|
||||
"direction-{}{}{}".format(*direction),
|
||||
f"theta-{ theta }"
|
||||
)
|
||||
|
||||
taskPath = os.path.join(case, "task.toml")
|
||||
|
||||
if os.path.exists(taskPath) and mode == "safe":
|
||||
queue.append(case)
|
||||
continue
|
||||
|
||||
if not os.path.exists(case):
|
||||
os.makedirs(case)
|
||||
|
||||
# prepare configuration for task
|
||||
task = {
|
||||
"logger": dict(config.logger),
|
||||
"structure": structure,
|
||||
"status": {
|
||||
"mesh": False,
|
||||
"flow": False
|
||||
},
|
||||
"statistics": {
|
||||
"meshTime": 0,
|
||||
"flowTime": 0
|
||||
},
|
||||
"geometry": {
|
||||
"theta": theta,
|
||||
"direction": direction,
|
||||
"fillet": getattr(config, structure).geometry.fillet
|
||||
},
|
||||
"mesh": dict(getattr(config, structure).mesh),
|
||||
"flow": dict(config.flow)
|
||||
}
|
||||
|
||||
# reassign special values
|
||||
task["mesh"]["thickness"] = mesh_thickness[structure][n]
|
||||
|
||||
##
|
||||
with open(os.path.join(case, "task.toml"), "w") as io:
|
||||
toml.dump(task, io)
|
||||
|
||||
##
|
||||
queue.append(case)
|
||||
|
||||
return queue
|
||||
|
||||
|
||||
#from salomepl.utils import runExecute, salomeVersion
|
||||
|
||||
def computeMesh(case):
|
||||
scriptpath = os.path.join(ROOT, "salomepl/genmesh.py")
|
||||
port = 2810
|
||||
stime = time.monotonic()
|
||||
|
||||
returncode = runExecute(port, scriptpath, ROOT, case)
|
||||
|
||||
task = struct(toml.load(os.path.join(case, "task.toml")))
|
||||
elapsed = time.monotonic() - stime
|
||||
logger.info("computeMesh: elapsed time: {}".format(timedelta(seconds = elapsed)))
|
||||
|
||||
if returncode == 0:
|
||||
task.statistics.meshTime = elapsed
|
||||
|
||||
with open(os.path.join(case, "task.toml"), "w") as io:
|
||||
toml.dump(dict(task), io)
|
||||
|
||||
|
||||
|
||||
|
||||
def computeFlow(case):
|
||||
###
|
||||
# Case preparation
|
||||
##
|
||||
foamCase = [ "0", "constant", "system" ]
|
||||
|
||||
os.chdir(case)
|
||||
task = struct(toml.load(os.path.join(case, "task.toml")))
|
||||
openfoam.foamClean()
|
||||
|
||||
for d in foamCase:
|
||||
shutil.copytree(
|
||||
os.path.join(ROOT, "openfoam/template", d),
|
||||
os.path.join(case, d)
|
||||
)
|
||||
|
||||
stime = time.monotonic()
|
||||
|
||||
###
|
||||
# Mesh manipulations
|
||||
##
|
||||
if not os.path.exists("mesh.unv"):
|
||||
logger.critical(f"computeFlow: missed 'mesh.unv'")
|
||||
return
|
||||
|
||||
_, returncode = openfoam.ideasUnvToFoam("mesh.unv")
|
||||
|
||||
if returncode:
|
||||
os.chdir(ROOT)
|
||||
|
||||
return returncode
|
||||
|
||||
openfoam.createPatch(dictfile = "system/createPatchDict")
|
||||
|
||||
openfoam.foamDictionary(
|
||||
"constant/polyMesh/boundary",
|
||||
"entry0.defaultFaces.type",
|
||||
"wall"
|
||||
)
|
||||
openfoam.foamDictionary(
|
||||
"constant/polyMesh/boundary",
|
||||
"entry0.defaultFaces.inGroups",
|
||||
"1 (wall)"
|
||||
)
|
||||
|
||||
out = openfoam.checkMesh()
|
||||
|
||||
if out:
|
||||
logger.info(out)
|
||||
|
||||
openfoam.transformPoints(task.flow.scale)
|
||||
|
||||
###
|
||||
# Decomposition and initial approximation
|
||||
##
|
||||
openfoam.foamDictionary(
|
||||
"constant/transportProperties",
|
||||
"nu",
|
||||
str(task.flow.constant.nu)
|
||||
)
|
||||
|
||||
openfoam.decomposePar()
|
||||
|
||||
openfoam.renumberMesh()
|
||||
|
||||
pressureBF = task.flow.approx.pressure.boundaryField
|
||||
velocityBF = task.flow.approx.velocity.boundaryField
|
||||
direction = {
|
||||
"[1, 0, 0]": 0,
|
||||
"[0, 0, 1]": 1,
|
||||
"[1, 1, 1]": 2
|
||||
}[str(task.geometry.direction)]
|
||||
|
||||
openfoam.foamDictionary(
|
||||
"0/p",
|
||||
"boundaryField.inlet.value",
|
||||
openfoam.uniform(pressureBF.inlet.value)
|
||||
)
|
||||
openfoam.foamDictionary(
|
||||
"0/p",
|
||||
"boundaryField.outlet.value",
|
||||
openfoam.uniform(pressureBF.outlet.value)
|
||||
)
|
||||
|
||||
openfoam.foamDictionary(
|
||||
"0/U",
|
||||
"boundaryField.inlet.value",
|
||||
openfoam.uniform(velocityBF.inlet.value[direction])
|
||||
)
|
||||
|
||||
openfoam.potentialFoam()
|
||||
|
||||
###
|
||||
# Main computation
|
||||
##
|
||||
pressureBF = task.flow.main.pressure.boundaryField
|
||||
velocityBF = task.flow.main.velocity.boundaryField
|
||||
|
||||
for n in range(os.cpu_count()):
|
||||
openfoam.foamDictionary(
|
||||
f"processor{n}/0/U",
|
||||
"boundaryField.inlet.type",
|
||||
velocityBF.inlet.type
|
||||
)
|
||||
openfoam.foamDictionary(
|
||||
f"processor{n}/0/U",
|
||||
"boundaryField.inlet.value",
|
||||
openfoam.uniform(velocityBF.inlet.value[direction])
|
||||
)
|
||||
|
||||
returncode, out = openfoam.simpleFoam()
|
||||
if out:
|
||||
logger.info(out)
|
||||
|
||||
###
|
||||
# Check results
|
||||
##
|
||||
elapsed = time.monotonic() - stime
|
||||
logger.info("computeFlow: elapsed time: {}".format(timedelta(seconds = elapsed)))
|
||||
|
||||
if returncode == 0:
|
||||
task.status.flow = True
|
||||
task.statistics.flowTime = elapsed
|
||||
|
||||
postProcessing = "postProcessing/flowRatePatch(name=outlet)/0/surfaceFieldValue.dat"
|
||||
|
||||
with open(postProcessing, "r") as io:
|
||||
lastLine = io.readlines()[-1]
|
||||
flowRate = float(lastLine.replace(" ", "").replace("\n", "").split("\t")[1])
|
||||
|
||||
task.statistics.flowRate = flowRate
|
||||
|
||||
with open(os.path.join(case, "task.toml"), "w") as io:
|
||||
toml.dump(dict(task), io)
|
||||
|
||||
os.chdir(ROOT)
|
||||
|
||||
return returncode
|
||||
|
||||
|
||||
def checkEnv():
|
||||
missed = False
|
||||
|
||||
try:
|
||||
pythonVersion = "Python {}".format(sys.version.split(" ")[0])
|
||||
salomeplVersion = salomeVersion()
|
||||
openfoamVersion = openfoam.foamVersion()
|
||||
|
||||
except Exception as e:
|
||||
logger.critical("Missed environment %s", e)
|
||||
missed = True
|
||||
|
||||
else:
|
||||
logger.info(f"environment:\n\t{pythonVersion}\n\t{salomeplVersion}\n\t{openfoamVersion}")
|
||||
|
||||
finally:
|
||||
return missed
|
||||
|
||||
|
||||
def postprocessing(queue):
|
||||
|
||||
pass
|
||||
|
||||
###
|
||||
# Main entry
|
||||
##
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
@ -1,5 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# This file is part of anisotropy.
|
||||
# License: GNU GPL version 3, see the file "LICENSE" for details.
|
||||
|
||||
from peewee import (
|
||||
SqliteDatabase,
|
||||
SqliteDatabase, JOIN,
|
||||
Model, Field,
|
||||
AutoField, ForeignKeyField,
|
||||
TextField, FloatField,
|
||||
@ -7,6 +11,18 @@ from peewee import (
|
||||
TimeField
|
||||
)
|
||||
|
||||
db = SqliteDatabase(
|
||||
None,
|
||||
pragmas = { "foreign_keys": 1 },
|
||||
field_types = { "list": "text" }
|
||||
)
|
||||
|
||||
|
||||
class BaseModel(Model):
|
||||
class Meta:
|
||||
database = db
|
||||
|
||||
|
||||
class ListField(Field):
|
||||
field_type = "list"
|
||||
|
||||
@ -25,16 +41,6 @@ class ListField(Field):
|
||||
|
||||
return pval
|
||||
|
||||
db = SqliteDatabase(
|
||||
None,
|
||||
pragmas = { "foreign_keys": 1 },
|
||||
field_types = { "list": "text" }
|
||||
)
|
||||
|
||||
class BaseModel(Model):
|
||||
class Meta:
|
||||
database = db
|
||||
|
||||
|
||||
class Structure(BaseModel):
|
||||
structure_id = AutoField()
|
||||
|
@ -8,6 +8,7 @@ from types import FunctionType
|
||||
import os
|
||||
|
||||
class CustomFormatter(logging.Formatter):
|
||||
def _getFormat(self, level: int):
|
||||
grey = "\x1b[38;21m"
|
||||
yellow = "\x1b[33;21m"
|
||||
red = "\x1b[31;21m"
|
||||
@ -15,7 +16,7 @@ class CustomFormatter(logging.Formatter):
|
||||
reset = "\x1b[0m"
|
||||
format = "[ %(asctime)s ] [ %(levelname)s ] %(message)s"
|
||||
|
||||
FORMATS = {
|
||||
formats = {
|
||||
logging.DEBUG: grey + format + reset,
|
||||
logging.INFO: grey + format + reset,
|
||||
logging.WARNING: yellow + format + reset,
|
||||
@ -23,27 +24,45 @@ class CustomFormatter(logging.Formatter):
|
||||
logging.CRITICAL: bold_red + format + reset
|
||||
}
|
||||
|
||||
return formats.get(level)
|
||||
|
||||
def format(self, record):
|
||||
log_fmt = self.FORMATS.get(record.levelno)
|
||||
formatter = logging.Formatter(log_fmt)
|
||||
log_fmt = self._getFormat(record.levelno)
|
||||
time_fmt = "%H:%M:%S %d-%m-%y"
|
||||
formatter = logging.Formatter(log_fmt, time_fmt)
|
||||
|
||||
return formatter.format(record)
|
||||
|
||||
def setupLogger(logger, level: int):
|
||||
|
||||
def setupLogger(logger, level: int, filepath: str = None):
|
||||
"""Applies settings to logger
|
||||
|
||||
:param logger: Instance of :class:`logging.Logger`
|
||||
:type logger: Instance of :class:`logging.Logger`
|
||||
:param level: Logging level (logging.INFO, logging.WARNING, ..)
|
||||
:type level: int
|
||||
:param filepath: Path to directory
|
||||
:type filepath: str, optional
|
||||
"""
|
||||
|
||||
logger.setLevel(level)
|
||||
|
||||
sh = logging.StreamHandler()
|
||||
sh.setLevel(level)
|
||||
sh.setFormatter(CustomFormatter())
|
||||
streamhandler = logging.StreamHandler()
|
||||
streamhandler.setLevel(level)
|
||||
streamhandler.setFormatter(CustomFormatter())
|
||||
logger.addHandler(streamhandler)
|
||||
|
||||
fh = logging.FileHandler(os.path.join("logs", logger.name))
|
||||
fh.setLevel(level)
|
||||
fh.setFormatter(CustomFormatter())
|
||||
if filepath:
|
||||
if not os.path.exists(filepath):
|
||||
os.makedirs(filepath, exist_ok = True)
|
||||
|
||||
logger.addHandler(sh)
|
||||
logger.addHandler(fh)
|
||||
filehandler = logging.FileHandler(
|
||||
os.path.join(filepath, "{}.log".format(logger.name))
|
||||
)
|
||||
filehandler.setLevel(level)
|
||||
filehandler.setFormatter(CustomFormatter())
|
||||
logger.addHandler(filehandler)
|
||||
|
||||
return logger
|
||||
|
||||
class struct:
|
||||
def __init__(self, *args, **kwargs):
|
||||
@ -104,17 +123,32 @@ def deepupdate(target, src):
|
||||
else:
|
||||
target[k] = copy.copy(v)
|
||||
|
||||
#if os.path.exists(env["CONFIG"]):
|
||||
# config = toml.load(env["CONFIG"])
|
||||
|
||||
# for restricted in ["ROOT", "BUILD", "LOG", "CONFIG"]:
|
||||
# if config.get(restricted):
|
||||
# config.pop(restricted)
|
||||
|
||||
# TODO: not working if custom config empty and etc
|
||||
# for m, structure in enumerate(config["structures"]):
|
||||
# for n, estructure in enumerate(env["structures"]):
|
||||
# if estructure["name"] == structure["name"]:
|
||||
# deepupdate(env["structures"][n], config["structures"][m])
|
||||
|
||||
# config.pop("structures")
|
||||
# deepupdate(env, config)
|
||||
|
||||
def timer(func: FunctionType) -> (tuple, float):
|
||||
"""(Decorator) Returns output of inner function and execution time
|
||||
|
||||
:param func: inner function
|
||||
:type: FunctionType
|
||||
:type func: FunctionType
|
||||
|
||||
:return: output, elapsed time
|
||||
:rtype: tuple(tuple, float)
|
||||
"""
|
||||
|
||||
def inner(*args, **kwargs):
|
||||
start = time.monotonic()
|
||||
ret = func(*args, **kwargs)
|
||||
|
@ -28,8 +28,8 @@ def version() -> str:
|
||||
|
||||
return str(out, "utf-8").strip().split(" ")[-1]
|
||||
|
||||
|
||||
def runSalome(port: int, scriptpath: str, root: str, logpath: str = None, *args) -> int:
|
||||
def runSalome(port: int, scriptpath: str, root: str, *args, logpath: str = None) -> int:
|
||||
# ISSUE: salome removes commas from string list
|
||||
|
||||
if os.environ.get("SALOME_PATH"):
|
||||
cmd = [ os.path.join(os.environ["SALOME_PATH"], "salome") ]
|
||||
@ -40,9 +40,10 @@ def runSalome(port: int, scriptpath: str, root: str, logpath: str = None, *args)
|
||||
if not logpath:
|
||||
logpath = "/tmp/salome.log"
|
||||
|
||||
fullargs = list(args)
|
||||
fullargs.extend([ root, logpath ])
|
||||
fmtargs = "args:{}".format(", ".join([ str(arg) for arg in args ]))
|
||||
#fullargs = list(args)
|
||||
args = list(args)
|
||||
args.insert(1, root)
|
||||
fmtargs = "args:{}".format(",".join([ '"{}"'.format(str(arg)) for arg in args ]))
|
||||
cmdargs = [
|
||||
"start", "-t",
|
||||
"--shutdown-servers=1",
|
||||
|
10
tests/anisotropy-cli.py
Normal file
10
tests/anisotropy-cli.py
Normal file
@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import os, sys
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
|
||||
|
||||
from anisotropy.core.cli import anisotropy
|
||||
|
||||
anisotropy()
|
||||
|
||||
|
@ -9,7 +9,7 @@ class TestAnisotropy(unittest.TestCase):
|
||||
self.model = Anisotropy()
|
||||
|
||||
def test_01_create_db(self):
|
||||
self.model.setupDB()
|
||||
self.model.db.setup()
|
||||
path = os.path.join(self.model.env["db_path"], "anisotropy.db")
|
||||
|
||||
self.assertTrue(os.path.exists(path))
|
||||
@ -19,10 +19,9 @@ class TestAnisotropy(unittest.TestCase):
|
||||
|
||||
try:
|
||||
paramsAll = self.model.loadFromScratch()
|
||||
self.model.setupDB()
|
||||
|
||||
for entry in paramsAll:
|
||||
self.model.updateDB(entry)
|
||||
self.model.update(entry)
|
||||
|
||||
except Exception as e:
|
||||
passed = False
|
||||
@ -31,8 +30,7 @@ class TestAnisotropy(unittest.TestCase):
|
||||
self.assertTrue(passed)
|
||||
|
||||
def test_03_load_db(self):
|
||||
self.model.setupDB()
|
||||
self.model.loadDB("simple", [1.0, 0.0, 0.0], 0.01)
|
||||
self.model.load("simple", [1.0, 0.0, 0.0], 0.01)
|
||||
|
||||
self.assertEqual(self.model.params["structure"]["type"], "simple")
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user