diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000..cec530a --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,46 @@ +name: Build and Deploy Documentation + +on: + push: + branches: [main] + workflow_dispatch: + +# Allow only one concurrent deployment +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + build: + name: Build the documentation with Sphinx + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Setup nox + uses: excitedleigh/setup-nox@v2.0.0 + - name: Build docs + run: nox -s docs + - name: Upload artifact + uses: actions/upload-pages-artifact@v1 + with: + path: 'docs/_build/html' + deploy: + name: Deploy documentation to GitHub Pages + needs: build + # Sets permissions of the GITHUB_TOKEN to allow deployment + permissions: + contents: read + pages: write + id-token: write + runs-on: ubuntu-latest + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: + - name: Setup pages + uses: actions/configure-pages@v3 + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v1 + \ No newline at end of file diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..951a50b --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,25 @@ +name: Upload to PyPi +on: + release: + types: [published] + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.x' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install setuptools wheel twine + - name: Build and publish + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + run: | + python setup.py sdist bdist_wheel + twine upload dist/* diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..e49e955 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,26 @@ +name: run tests + +on: [push, pull_request, workflow_dispatch] + +jobs: + test: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [windows-latest, macOS-13, ubuntu-latest] + python-version: ['3.8', '3.9', '3.10'] + fail-fast: false + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install pynumad + run: | + python -m pip install --upgrade pip + pip install -e . + - name: Test with pytest + run: | + pip install --upgrade pytest coverage + coverage run --source=pynumad --omit="*/tests/*" -m pytest diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..0b508b8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +docs/_build/* +docs/_static/* +__pycache__ \ No newline at end of file diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..8ebca8b --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,16 @@ +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXPROJ = ReadtheDocsSphinxTheme +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..0560ed4 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,60 @@ +from opensg import __version__ + +# -- Project information ----------------------------------------------------- +project = u'opensg' +copyright = u'2023 National Technology & Engineering Solutions of Sandia, LLC (NTESS)' +author = u'opensg Developers' + +version = __version__ +release = __version__ + +# -- General configuration --------------------------------------------------- + +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + # 'sphinx.ext.viewcode', # commenting out for now b/c bad render width + 'sphinx.ext.napoleon', + 'sphinxcontrib.bibtex', +] +napoleon_use_rtype = False +viewcode_import = True +numpydoc_show_class_members = True +numpydoc_show_inherited_class_members = False +numpydoc_class_members_toctree = False +autodoc_member_order = 'bysource' +autoclass_content = 'both' +bibtex_bibfiles = ['refs/publications.bib','refs/conclusion.bib'] +templates_path = ['_templates'] +source_suffix = '.rst' +master_doc = 'index' +language = "en" +numfig = True + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path . +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '_user'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. + +html_theme = 'pydata_sphinx_theme' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] +html_style = 'css/my_style.css' + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'opensgdoc' \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..923e77e --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,69 @@ +.. _home: + +pyNuMAD +======== + +The structural design and optimization of wind turbine blades is a +complex task. In many cases it is difficult to find the optimal design +of a turbine blade by hand, or by trial and error, and the software +tools used for such designs are most effective when integrated into +automated optimization and analysis algorithms. A new version of the +software tool `pyNuMAD (Python Numerical Manufacturing And Design) `_ for the design +and modeling of wind turbine blades is developed and described. + +.. toctree:: + :maxdepth: 2 + :hidden: + + user-guide/index + contributing + apidoc/pynumad + reference + release-notes + publications + license + +.. _intro-citation: + +Citing NuMAD +=============== + +To cite pyNuMAD, please utilize the reference below. + +[1] Camarena, E., Anderson, E., Bonney, K. L., Clarke, R. J., & Paquette, J. (2023). pyNuMAD 1.0.0. Zenodo. https://doi.org/10.5281/zenodo.10023189 + + + +.. _developers: + +pyNuMAD Developers +===================== + +pyNuMAD has been developed by `Sandia National Laboratories +(Sandia) `_, +funded by the U.S. Department of Energy’s Wind Energy Technologies Technologies Office. + + +Current members of the development team include: + +- Joshua Paquette (“Josh”) (Sandia - PI) +- Evan Anderson (Sandia) +- Ernesto Camarena (Sandia) +- Kirk Bonney (Sandia) +- Ryan James Clarke (Sandia) + + +Funding +======= + +Development and maintenance of the NuMAD code is funded by the U.S. Department of Energy’s Wind Energy Technologies Office. +Sandia National Laboratories is a multi-mission laboratory managed and operated by National Technology and Engineering Solutions +of Sandia, LLC., a wholly owned subsidiary of Honeywell International, Inc., for the U.S. Department of Energy’s +National Nuclear Security Administration under contract DE-NA0003525. + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/user-guide/index.rst b/docs/user-guide/index.rst new file mode 100644 index 0000000..ccd24e1 --- /dev/null +++ b/docs/user-guide/index.rst @@ -0,0 +1,22 @@ +.. _user-guide: + +User’s Guide +============ + +.. toctree:: + :maxdepth: 2 + :caption: Get Started + + overview + installation + getting-started + +.. toctree:: + :maxdepth: 2 + :caption: User Interface + + blade_definition + meshing + beam_models + shell_models + solid_models diff --git a/docs/user-guide/installation.rst b/docs/user-guide/installation.rst new file mode 100644 index 0000000..3622729 --- /dev/null +++ b/docs/user-guide/installation.rst @@ -0,0 +1,47 @@ +.. _intallation: + +Installation +============ + +Download pyNuMAD +---------------- + +The OpenSG source code is hosted on the `opensg GitHub repository `_. +OpenSG users are recommended to clone the Github repository. +Cloning the repository allows users to easily pull the latest updates to the pyNuMAD source code. +These updates may improve the code's speed, accuracy and add additional functionality or advanced features. + +To download OpenSG using `git `_, type the following in a git interface:: + + git clone https://github.com/sandialabs/pyNuMAD + +Installation +------------ + +To install OpenSG and create an environment where the code runs correctly, please execute the following steps: + +1. Download the OpenSG repository:: + + git clone https://github.com/sandialabs/pyNuMAD + +2. Create the conda environment named `opensg_env` using the `environment.yml` file:: + + conda env create -f environment.yml + +3. On some systems, there is a bug where `gmsh` must be installed through pip, rather than conda, to interface with `dolfinx` correctly. +If this bug occurs for you please run the following commands:: + + conda remove gmsh + pip install gmsh + +4. Install the OpenSG package by running the following command in the root of the OpenSG repository:: + + pip install . + +Alternatively, to perform a development installation run:: + + pip install -e . + + +.. Developers are recommended to install using the instructions on +.. :ref:`contributing` page. diff --git a/environment.yml b/environment.yml new file mode 100644 index 0000000..1aab482 --- /dev/null +++ b/environment.yml @@ -0,0 +1,21 @@ +name: opensg_env +channels: + - conda-forge +dependencies: + - python=3.10 + - numpy + - scipy + - mpi4py + - meshio + - petsc4py + - yaml + - contextlib2 # For compatibility + - pathlib + - typing-extensions + - fenics-dolfinx + - fenics-basix + - fenics-ufl + - mpich + - pyvista + - pyyaml + # - gmsh # I have to install this through pip for it to work. diff --git a/examples/compute_eb_blade.py b/examples/compute_eb_blade.py new file mode 100644 index 0000000..a43b6b8 --- /dev/null +++ b/examples/compute_eb_blade.py @@ -0,0 +1,76 @@ +from os.path import join + +# import pynumad +import opensg +import numpy as np + +# load blade.yml into pynumad +# blade_path = join("data", "blade.yaml") +# blade = pynumad.Blade(blade_path) + +# create mesh + +# NOTE: triangular elements are problematic so will need a workaround. +# Evan may have a solution. + +# create mesh object for use with opensg +# 3 options: +# - create our own mesh class +# - use pynumad class +# - use dolfinx or gmsh structures? not sure if these are flexible enough though + +# blade_mesh_info = mesh() +mesh_yaml = join("data", "bar_urc_shell_mesh.yaml") +mesh_data = opensg.load_yaml(mesh_yaml) + +blade_mesh = opensg.BladeMesh(mesh_data) +section_mesh = blade_mesh.generate_segment_mesh(segment_index=1, filename="section.msh") + +section_layups = section_mesh._generate_layup_data() + +frame = section_mesh.generate_local_orientations() + +section_mesh.extract_boundaries() + +section_mesh.generate_boundary_ABD() + + + +pause + + +## Extract the mesh for the section +nodes = mesh_data['nodes'] +numNds = len(nodes) +elements = mesh_data['elements'] +numEls = len(elements) + +ndNewLabs = -1*np.ones(numNds,dtype=int) +elNewLabs = -1*np.ones(numEls,dtype=int) +elLayID = -1*np.ones(numEls,dtype=int) + +# iterate through blade + +segment_matrices = [] + +for i in range(len(blade.ispan)): + # select ith blade segment + blade_segment_mesh = opensg.blade.select_segment(blade_mesh_info) + # analysis options + # solid vs shell + # whole segment vs boundary + # analyses: + # stresses/stiffness/buckling + # stresses after beamdyn + data = opensg.compute_eb_segment(blade_segment_mesh) + # data = opensg.compute_eb_boundaries(blade_segment_mesh) + # data = opensg.compute_timo_segment(blade_segment_mesh) # ***** high priority + # data = opensg.compute_eb_buckling(blade_segment_mesh) + # data = opensg.compute_timo_buckling(blade_segment_mesh) # **** top priority + # data = opensg.compute_timo_boundaries(blade_segment_mesh) + + segment_matrices.append(data) + + +# ideally, we could also have a step to run beamdyn +opensg.beamdyn.run_analysis(blade_mesh_info, segment_matrices) \ No newline at end of file diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 0000000..0c1bd30 --- /dev/null +++ b/noxfile.py @@ -0,0 +1,45 @@ +import nox + +@nox.session +def tests(session): + """Run tests.""" + session.install(".") + session.install("pytest") + session.run("pytest") + +@nox.session +def lint(session): + """Lint.""" + session.install("flake8") + session.run("flake8", "--import-order-style", "google") + +@nox.session +def docs(session): + """Generate documentation.""" + session.run("pip", "install", "-e", ".") + session.install("sphinx") + session.install("pydata-sphinx-theme") + session.install("sphinxcontrib-bibtex") + session.cd("docs/") + session.run("make", "html") + +@nox.session +def serve(session): + """Serve documentation. Port can be specified as a positional argument.""" + try: + port = session.posargs[0] + except IndexError: + port = "8085" + session.run("python", "-m", "http.server", "-b", "localhost", "-d", "docs/_build/html", port) + +@nox.session +def check_style(session): + """Check if code follows black style.""" + session.install("black") + session.run("black", "--check", "src") + +@nox.session +def enforce_style(session): + """Apply black style to code base.""" + session.install("black") + session.run("black", "src") diff --git a/opensg/__init__.py b/opensg/__init__.py new file mode 100644 index 0000000..59bef2d --- /dev/null +++ b/opensg/__init__.py @@ -0,0 +1,7 @@ +from opensg.solve import ksp_solve, ABD_mat, nullspace +from opensg.timo import local_frame_1D, directional_derivative, local_grad, ddot +from opensg.io import load_yaml, write_yaml +from opensg.mesh import BladeMesh +from opensg import util + +__version__ = "0.0.1" \ No newline at end of file diff --git a/opensg/blade_utils.py b/opensg/blade_utils.py new file mode 100644 index 0000000..e69de29 diff --git a/opensg/io.py b/opensg/io.py new file mode 100644 index 0000000..e96daa9 --- /dev/null +++ b/opensg/io.py @@ -0,0 +1,54 @@ +import yaml +import numpy as np + + +def load_yaml(yaml_file): + with open(yaml_file, 'r') as file: + mesh_data = yaml.load(file, Loader=yaml.CLoader) + return mesh_data + +def write_yaml(data, yaml_file): + with open(yaml_file, 'w') as file: + yaml.dump(data, file, Loader=yaml.CLoader) + return + +# TODO write a function to validate mesh data schema + + +def write_mesh(filename, blade_mesh): + mesh_file = open(filename, 'w') + + mesh_file.write('$MeshFormat\n2.2 0 8\n$EndMeshFormat\n$Nodes\n') + newNumNds = np.max(ndNewLabs) + mesh_file.write(str(newNumNds) + '\n') + + for i, nd in enumerate(nodes): + lab = ndNewLabs[i] + if(lab > -1): + ln = [str(lab),str(nd[2]),str(nd[0]),str(nd[1])] + # ln = [str(lab),str(nd[0]),str(nd[1]),str(nd[2])] + mesh_file.write(' '.join(ln) + '\n') + + mesh_file.write('$EndNodes\n$Elements\n') + + newNumEls = np.max(elNewLabs) + mesh_file.write(str(newNumEls) + '\n') + + for i, el in enumerate(elements): + lab = elNewLabs[i] + if(lab > -1): + ln = [str(lab)] + if(el[3] == -1): + ln.append('2') + else: + ln.append('3') + ln.append('2') + ln.append(str(elLayID[i]+1)) + ln.append(str(elLayID[i]+1)) + for nd in el: + if(nd > -1): + ln.append(str(ndNewLabs[nd])) + mesh_file.write(' '.join(ln) + '\n') + mesh_file.write('$EndElements\n') + + mesh_file.close() \ No newline at end of file diff --git a/opensg/mesh.py b/opensg/mesh.py new file mode 100644 index 0000000..f71fa2f --- /dev/null +++ b/opensg/mesh.py @@ -0,0 +1,343 @@ +import numpy as np +import dolfinx +import basix +from dolfinx.io import gmshio +from mpi4py import MPI + +import opensg + +class BladeMesh: + """This class processes and stores information about a wind turbine blade's mesh + """ + def __init__(self, mesh_data): + """ + + Parameters + ---------- + mesh_data : dict + dictionary of mesh data loaded in from the output of pynumad mesher + """ + self._mesh_data = mesh_data + + self.nodes = mesh_data['nodes'] + self.num_nodes = len(self.nodes) + + self.elements = mesh_data['elements'] + self.num_elements = len(self.elements) + + self.sets = mesh_data["sets"] + self.materials = mesh_data["materials"] + self.sections = mesh_data["sections"] + self.element_orientations = mesh_data["elementOrientations"] + + self._generate_material_database() + + + def _generate_material_database(self): + material_database = dict() + + for i, material in enumerate(self.materials): + material_dict = dict() + + material_dict["id"] = i + + elastic = material['elastic'] + material_dict["E"] = elastic['E'] + material_dict["G"] = elastic['G'] + material_dict["nu"] = elastic['nu'] + + material_database[material['name']] = material_dict + + self.material_database = material_database + return + + + def generate_segment_mesh(self, segment_index, filename): + segment_node_labels = -1 * np.ones(self.num_nodes, dtype=int) + segment_element_labels = -1 * np.ones(self.num_elements, dtype=int) + segment_element_layer_id = -1 * np.ones(self.num_elements, dtype=int) + + layer_count = 0 # NOTE: Are we assuming that element sets are ordered in a particular way? -klb + for element_set in self.sets["element"]: + name = element_set["name"] + name_components = name.split("_") + + labels = element_set["labels"] + if len(name_components) > 2: + if (int(name_components[1]) == segment_index): + for element_label in labels: + segment_element_labels[element_label] = 1 + segment_element_layer_id[element_label] = layer_count + for node in self.elements[element_label]: + if (node > -1): # when would this not be the case? - klb + segment_node_labels[node] = 1 + layer_count += 1 + + element_label = 1 + for i, e in enumerate(segment_element_labels): + if (e == 1): + segment_element_labels[i] = element_label + element_label += 1 + + node_label = 1 + for i, n in enumerate(segment_node_labels): + if (n == 1): + segment_node_labels[i] = node_label + node_label += 1 + + file = open(filename,'w') + + file.write('$MeshFormat\n2.2 0 8\n$EndMeshFormat\n$Nodes\n') + newNumNds = np.max(segment_node_labels) + file.write(str(newNumNds) + '\n') + + for i, nd in enumerate(self.nodes): + lab = segment_node_labels[i] + if(lab > -1): + ln = [str(lab),str(nd[2]),str(nd[0]),str(nd[1])] + # ln = [str(lab),str(nd[0]),str(nd[1]),str(nd[2])] + file.write(' '.join(ln) + '\n') + + file.write('$EndNodes\n$Elements\n') + + newNumEls = np.max(segment_element_labels) + file.write(str(newNumEls) + '\n') + + for i, el in enumerate(self.elements): + lab = segment_element_labels[i] + if(lab > -1): + ln = [str(lab)] + if(el[3] == -1): + ln.append('2') + else: + ln.append('3') + ln.append('2') + ln.append(str(segment_element_layer_id[i]+1)) + ln.append(str(segment_element_layer_id[i]+1)) + for nd in el: + if(nd > -1): + ln.append(str(segment_node_labels[nd])) + file.write(' '.join(ln) + '\n') + file.write('$EndElements\n') + + file.close() + + # initialize segmentmesh object + segment_mesh = SegmentMesh( + segment_node_labels=segment_node_labels, + segment_element_labels=segment_element_labels, + segment_element_layer_id=segment_element_layer_id, + segment_index=segment_index, + parent_blade_mesh=self, + msh_file=filename) + return segment_mesh + + +class SegmentMesh(): + def __init__( + self, + segment_node_labels, + segment_element_labels, + segment_element_layer_id, + segment_index, + parent_blade_mesh, + msh_file): + """This class manages the data and methods for the mesh of a segment of a blade. + + A segment is defined as the part of the blade between two fixed points along the blade span. + Given a set of N span points along the blade, there are N-1 segments defined between each consecutive pair + of span points. For example, the segment indexed by 0 is defined between the span points indexed by 0 and 1 + + Parameters + ---------- + segment_node_labels : array[int] + _description_ + segment_element_labels : array[int] + _description_ + segment_element_layer_id : array[int] + _description_ + segment_index : int + Index of the segment of blade + parent_blade_mesh : BladeMesh + BladeMesh object that SegmentMesh derives from. + msh_file : str or Path + Path to mesh file to load data from + """ + + self.segment_node_labels = segment_node_labels + self.segment_element_labels = segment_element_labels + self.segment_element_layer_id = segment_element_layer_id + self.segment_index = segment_index + self.blade_mesh = parent_blade_mesh + + self.mesh, self.subdomains, self.boundaries = gmshio.read_from_msh(msh_file, MPI.COMM_WORLD,0, gdim=3) + self.original_cell_index = self.mesh.topology.original_cell_index # Original cell Index from mesh file + lnn = self.subdomains.values[:]-1 + self.num_cells = self.mesh.topology.index_map(self.mesh.topology.dim).size_local + cells = np.arange(self.num_cells, dtype=np.int32) + # NOTE: Unsure what this next line does. + self.subdomains = dolfinx.mesh.meshtags(self.mesh, self.mesh.topology.dim, cells, np.array(lnn,dtype=np.int32)) + + self.tdim = self.mesh.topology.dim + self.fdim = self.tdim - 1 + + return + + def _generate_layup_data(self): + layup_database = dict() + + mat_names, thick, angle, nlay = [], [], [], [] + for section in self.blade_mesh.sections: + name_components = section['elementSet'].split('_') + if(len(name_components) > 2): + material_name, t, an = [], [], [] + if(int(name_components[1]) == self.segment_index): + layup = section['layup'] # layup = [[material_name: str, thickness: float, angle:]] + nlay.append(len(layup)) + for layer in layup: + material_name.append(layer[0]) + mat_names.append(material_name) + for layer in layup: + t.append(layer[1]) + thick.append(t) + for layer in layup: + an.append(layer[2]) + angle.append(an) + + layup_database["mat_names"] = mat_names + layup_database["thick"] = thick + layup_database["angle"] = angle + layup_database["nlay"] = nlay + self.layup_database = layup_database + + return layup_database + + + def generate_local_orientations(self): + # Local Orientation (DG0 function) of quad mesh element (from yaml data) + VV = dolfinx.fem.functionspace( + self.mesh, basix.ufl.element( + "DG", self.mesh.topology.cell_name(), + 0, shape=(3, ))) + EE1, EE2, N = dolfinx.fem.Function(VV), dolfinx.fem.Function(VV), dolfinx.fem.Function(VV) + + orientations = [] + for i, eo in enumerate(self.blade_mesh.element_orientations): + if(self.segment_element_labels[i] > -1): + o = [] + for k in range(9): + o.append(eo[k]) + orientations.append(o) + + # Store orientation for each element + # TODO: clarify variable names. why N? + for k, ii in enumerate(self.original_cell_index): + # Storing data to DG0 functions + EE2.x.array[3*k], EE2.x.array[3*k+1], EE2.x.array[3*k+2] = orientations[ii][5], orientations[ii][3], orientations[ii][4] # e2 + N.x.array[3*k], N.x.array[3*k+1], N.x.array[3*k+2] = orientations[ii][8], orientations[ii][6], orientations[ii][7] # e3 + EE1.x.array[3*k], EE1.x.array[3*k+1], EE1.x.array[3*k+2] = orientations[ii][2], orientations[ii][0], orientations[ii][1] # e1 outward normal + self.EE1 = EE1 + self.N = N + self.EE2 = EE2 + frame = [EE1,EE2,N] + return frame + + + def extract_boundaries(self): + # extract geometry + pp = self.mesh.geometry.x + + is_left_boundary, is_right_boundary = opensg.util.generate_boundary_markers( + min(pp[:,0]), max(pp[:,0])) + + + facets_left = dolfinx.mesh.locate_entities_boundary( + self.mesh, dim=self.fdim, marker=is_left_boundary) + facets_right = dolfinx.mesh.locate_entities_boundary( + self.mesh, dim=self.fdim, marker=is_right_boundary) + + right_mesh, right_entity_map, right_vertex_map, right_geom_map = dolfinx.mesh.create_submesh( + self.mesh, self.fdim, facets_right) + left_mesh, left_entity_map, left_vertex_map, left_geom_map = dolfinx.mesh.create_submesh( + self.mesh, self.fdim, facets_left) + + self.right_submesh = { + "mesh": right_mesh, + "entity_map": right_entity_map, + "vertex_map": right_vertex_map, + "geom_map": right_geom_map} + + self.left_submesh = { + "mesh": left_mesh, + "entity_map": left_entity_map, + "vertex_map": left_vertex_map, + "geom_map": left_geom_map} + + # generate subdomains + self.mesh.topology.create_connectivity(2,1) # (quad mesh topology, boundary(1D) mesh topology) + cell_of_facet_mesh = self.mesh.topology.connectivity(2,1) + + # Cell to Edge connectivity + # cell_edge_map = [] + # for i in range(self.num_cells): + # c = [] + # for k in range(4): # 4 is used as number of edges in a quad element + # c.append((cell_of_facet_mesh.array[4*i+k])) + # cell_edge_map.append(c) + # cell_edge_map = np.ndarray.flatten(np.array(cell_edge_map)) + + # + def subdomains_boundary(boundary_mesh, boundary_marker, boundary_entity_map): + boundary_VV = dolfinx.fem.functionspace( + boundary_mesh, basix.ufl.element("DG", boundary_mesh.topology.cell_name(), 0, shape=(3, ))) + + boundary_e1 = dolfinx.fem.Function(boundary_VV) + boundary_e2 = dolfinx.fem.Function(boundary_VV) + boundary_n = dolfinx.fem.Function(boundary_VV) + + boundary_subdomains = [] + + boundary_facets_left = dolfinx.mesh.locate_entities(boundary_mesh, self.fdim, boundary_marker) + + # TODO: review the subdomain assingments with akshat + for i, xx in enumerate(boundary_entity_map): + # assign subdomain + idx = int(np.where(cell_of_facet_mesh.array==xx)[0]/4) # 4 is for number of nodes in quad element + boundary_subdomains.append(self.subdomains.values[idx]) + # assign orientation + for j in range(3): + boundary_e1.x.array[3*i+j] = self.EE1.x.array[3*idx+j] + boundary_e2.x.array[3*i+j] = self.EE2.x.array[3*idx+j] + boundary_n.x.array[3*i+j] = self.N.x.array[3*idx+j] + + frame = [boundary_e1, boundary_e2, boundary_n] + boundary_subdomains = np.array(boundary_subdomains, dtype=np.int32) + boundary_num_cells = boundary_mesh.topology.index_map(boundary_mesh.topology.dim).size_local + boundary_cells = np.arange(boundary_num_cells, dtype=np.int32) + boundary_subdomains = dolfinx.mesh.meshtags(boundary_mesh, boundary_mesh.topology.dim, boundary_cells, boundary_subdomains) + return boundary_subdomains, frame, boundary_facets_left + # Mapping the orinetation data from quad mesh to boundary. The alternative is to use local_frame_1D(mesh_l). + # Either of both can be used in local_boun subroutine + + self.left_submesh["subdomains"], self.left_submesh["subdomains"], self.left_submesh["subdomains"] = subdomains_boundary( + self.left_submesh["mesh"], is_left_boundary, self.left_submesh["entity_map"]) + self.right_submesh["subdomains"], self.right_submesh["subdomains"], self.right_submesh["subdomains"] = subdomains_boundary( + self.right_submesh["mesh"], is_right_boundary, self.right_submesh["entity_map"]) + + def generate_boundary_ABD(self): + nphases = max(self.subdomains.values[:]) + 1 + ABD_ = [] + for i in range(nphases): + ABD_.append(opensg.ABD_mat( + i, + thick=self.layup_database["thick"], + nlay=self.layup_database["nlay"], + mat_names=self.layup_database["mat_names"], + angle=self.layup_database["angle"], + material_database=self.blade_mesh.material_database + )) + + # print('Computed',nphases,'ABD matrix') + + # def ABD_matrix(i): + # return(as_tensor(ABD_[i])) \ No newline at end of file diff --git a/opensg/solve.py b/opensg/solve.py new file mode 100644 index 0000000..15510b1 --- /dev/null +++ b/opensg/solve.py @@ -0,0 +1,328 @@ +# Initialization of libraries +from mpi4py import MPI +import numpy as np +import dolfinx +import basix +from dolfinx.fem import form, petsc, Function, functionspace +from ufl import TrialFunction, TestFunction, inner, lhs, rhs +import petsc4py.PETSc +from mpi4py import MPI +import ufl +from contextlib import ExitStack + + +def ksp_solve(A, F, V): + """Krylov Subspace Solver for Aw = F + + Parameters + ---------- + A : array + stiffness matrix + F : array + Load or force vector + V : function space + _description_ + + Returns + ------- + array + solution vector (displacement field) + """ + w = Function(V) + ksp = petsc4py.PETSc.KSP() + ksp.create(comm=MPI.COMM_WORLD) + ksp.setOperators(A) + ksp.setType("preonly") + ksp.getPC().setType("lu") + ksp.getPC().setFactorSolverType("mumps") + ksp.getPC().setFactorSetUpSolverType() + ksp.getPC().getFactorMatrix().setMumpsIcntl(icntl=24, ival=1) # detect null pivots + ksp.getPC().getFactorMatrix().setMumpsIcntl( + icntl=25, ival=0 + ) # do not compute null space again + ksp.setFromOptions() + ksp.solve(F, w.vector) + w.vector.ghostUpdate( + addv=petsc4py.PETSc.InsertMode.INSERT, mode=petsc4py.PETSc.ScatterMode.FORWARD + ) + ksp.destroy() + return w + +def nullspace(V): + """Compute nullspace to restrict Rigid body motions + + Constructs a translational null space for the vector-valued function space V + and ensures that it is properly orthonormalized. + + Parameters + ---------- + V : functionspace + _description_ + + Returns + ------- + NullSpace + Nullspace of V + """ + # extract the Index Map from the Function Space + index_map = V.dofmap.index_map + + # initialize nullspace basis with petsc vectors + nullspace_basis = [ + dolfinx.la.create_petsc_vector(index_map, V.dofmap.index_map_bs) + for _ in range(4) + ] + + with ExitStack() as stack: + vec_local = [stack.enter_context(xx.localForm()) for xx in nullspace_basis] + basis = [np.asarray(xx) for xx in vec_local] + + # identify the degrees of freedom indices for each subspace (x, y, z) + dofs = [V.sub(i).dofmap.list for i in range(3)] + + # Build translational null space basis + for i in range(3): + basis[i][dofs[i]] = 1.0 + + # Create vector space basis and orthogonalize + dolfinx.la.orthonormalize(nullspace_basis) + + return petsc4py.PETSc.NullSpace().create(nullspace_basis, comm=MPI.COMM_WORLD) + + +def ABD_mat(ii, thick, nlay, angle, mat_names, material_database): + """Compute the ABD matrix for a composite layup structure + + Constructs a local stiffness matrix for a composite laminate + by assembling the contributions from multiple layers. + + Parameters + ---------- + ii : _type_ + _description_ + thick : _type_ + _description_ + nlay : _type_ + _description_ + material_parameters : _type_ + _description_ + matid : _type_ + _description_ + + Returns + ------- + _type_ + _description_ + """ + + ## 1D mesh generation + # initialization + deg = 2 + cell = ufl.Cell("interval") + elem = basix.ufl.element("Lagrange", "interval", 1, shape=(3,)) + domain = ufl.Mesh(elem) + + # nodes (1D SG) + th, s = [0], 0 # Reference starting point + for k in thick[ii]: + s = s + k # Add the thickness of each layer + th.append(s) + points = np.array(th) + + # elements + cell = [] + for k in range(nlay[ii]): + cell.append([k, k + 1]) + cellss = np.array(cell) + + # Create mesh object + dom = dolfinx.mesh.create_mesh(MPI.COMM_WORLD, cellss, points, domain) + num_cells = dom.topology.index_map(dom.topology.dim).size_local + cells = np.arange(num_cells, dtype=np.int32) + + # + # Note: For a layup like layup1: containing 3 layers: + # we defined 1D mesh with 3 elements and make each element a seperate subdomain(where integration is computed by *dx(i) + # for subdomain i) using cells-[0,1,2] + + # assigning each element as subdomain + subdomain = dolfinx.mesh.meshtags(dom, dom.topology.dim, cells, cells) + x, dx = ufl.SpatialCoordinate(dom), ufl.Measure("dx")(domain=dom, subdomain_data=subdomain) + gamma_e = create_gamma_e(x) + + nphases = len(cells) + + # Creating FE function Space + V = functionspace(dom, basix.ufl.element("CG", "interval", deg, shape=(3,))) + u, v = TrialFunction(V), TestFunction(V) + + # Weak form of energy + # dx is for integration + F2 = sum( + [inner(sigma( + vector=u, material_parameters=material_database[mat_names[ii][i]], + theta=angle[ii][i], Eps=gamma_e[:,0])[0], + eps(v)[0]) * dx(i) for i in range(nphases)] + ) + + # lhs gives left hand side of weak form : coeff matrix here + A = petsc.assemble_matrix(form(lhs(F2))) + A.assemble() + null = nullspace(V) + A.setNullSpace(null) # Set the nullspace + xx = 3 * V.dofmap.index_map.local_range[1] # total dofs + + # Initialization + V0, Dhe, D_ee = np.zeros((xx, 6)), np.zeros((xx, 6)), np.zeros((6, 6)) + + # Assembly + for p in range(6): + Eps = gamma_e[:, p] + + # weak form + F2 = sum([inner(sigma(u, i, Eps)[0], eps(v)[0]) * dx(i) for i in range(nphases)]) + + # rhs is used for getting right hand side of Aw = F; (which is F here) + F = petsc.assemble_vector(form(rhs(F2))) + F.ghostUpdate(addv=petsc4py.PETSc.InsertMode.ADD, mode=petsc4py.PETSc.ScatterMode.REVERSE) + null.remove(F) # Orthogonalize F to the null space of A^T + w = ksp_solve(A, F, V) + Dhe[:, p] = F[:] # Dhe matrix formation + V0[:, p] = w.vector[:] # V0 matrix formation + + D1 = np.matmul(V0.T, -Dhe) # Additional information matrix + + # Scalar assembly for each term of D_ee matrix + for s in range(6): + for k in range(6): + # Scalar assembly + f = dolfinx.fem.form(sum([Dee(i)[s, k] * dx(i) for i in range(nphases)])) + D_ee[s, k] = dolfinx.fem.assemble_scalar(f) + + D_eff = D_ee + D1 + + return D_eff + +def create_gamma_e(x): + """_summary_ + + Parameters + ---------- + x : _type_ + _description_ + + Returns + ------- + _type_ + _description_ + """ + gamma_e = ufl.as_tensor([ + (1, 0, 0, x[0], 0, 0), + (0, 1, 0, 0, x[0], 0), + (0, 0, 0, 0, 0, 0), + (0, 0, 0, 0, 0, 0), + (0, 0, 0, 0, 0, 0), + (0, 0, 1, 0, 0, x[0]), + ]) + + return gamma_e + +def R_sig(C, t): + """Compute rotation matrix + + Parameters + ---------- + C : _type_ + _description_ + t : _type_ + _description_ + + Returns + ------- + _type_ + _description_ + """ + th = np.deg2rad(t) + c, s, cs = np.cos(th), np.sin(th), np.cos(th) * np.sin(th) + R_Sig = np.array( + [ + (c**2, s**2, 0, 0, 0, -2 * cs), + (s**2, c**2, 0, 0, 0, 2 * cs), + (0, 0, 1, 0, 0, 0), + (0, 0, 0, c, s, 0), + (0, 0, 0, -s, c, 0), + (cs, -cs, 0, 0, 0, c**2 - s**2), + ] + ) + return np.matmul(np.matmul(R_Sig, C), R_Sig.transpose()) + +def Dee(i): + """ Simplifying gamma_e.T*C*gamma_e + + Parameters + ---------- + i : _type_ + _description_ + + Returns + ------- + _type_ + _description_ + """ + C = sigma(u, i, Eps)[1] + x0 = x[0] + return ufl.as_tensor( + [ + (C[0, 0], C[0, 1], C[0, 5], x0 * C[0, 0], x0 * C[0, 1], x0 * C[0, 5]), + (C[1, 0], C[1, 1], C[1, 5], x0 * C[1, 0], x0 * C[1, 1], x0 * C[1, 5]), + (C[5, 0], C[5, 1], C[5, 5], x0 * C[5, 0], x0 * C[5, 1], x0 * C[5, 5]), + ( + x0 * C[0, 0], + x0 * C[0, 1], + x0 * C[0, 5], + x0 * x0 * C[0, 0], + x0 * x0 * C[0, 1], + x0 * x0 * C[0, 5], + ), + ( + x0 * C[1, 0], + x0 * C[1, 1], + x0 * C[1, 5], + x0 * x0 * C[1, 0], + x0 * x0 * C[1, 1], + x0 * x0 * C[1, 5], + ), + ( + x0 * C[5, 0], + x0 * C[5, 1], + x0 * C[5, 5], + x0 * x0 * C[5, 0], + x0 * x0 * C[5, 1], + x0 * x0 * C[5, 5], + ), + ] + ) + +def sigma(vector, material_parameters, theta, Eps): + E1, E2, E3 = material_parameters["E"] + G12, G13, G23 = material_parameters["G"] + v12, v13, v23 = material_parameters["nu"] + S = np.zeros((6, 6)) + S[0, 0], S[1, 1], S[2, 2] = 1 / E1, 1 / E2, 1 / E3 + S[0, 1], S[0, 2] = -v12 / E1, -v13 / E1 + S[1, 0], S[1, 2] = -v12 / E1, -v23 / E2 + S[2, 0], S[2, 1] = -v13 / E1, -v23 / E2 + S[3, 3], S[4, 4], S[5, 5] = 1 / G23, 1 / G13, 1 / G12 + C = np.linalg.inv(S) + C = R_sig(C, theta) + s1 = ufl.dot(ufl.as_tensor(C), eps(vector)[1] + Eps) + return ufl.as_tensor([(s1[0], s1[5], s1[4]), (s1[5], s1[1], s1[3]), (s1[4], s1[3], s1[2])]), C + +def eps(vector): # (Gamma_h * w) + E1 = ufl.as_vector([0, 0, vector[2].dx(0), (vector[1].dx(0)), (vector[0].dx(0)), 0]) + ans = ufl.as_tensor([ + (E1[0], 0.5 * E1[5], 0.5 * E1[4]), + (0.5 * E1[5], E1[1], 0.5 * E1[3]), + (0.5 * E1[4], 0.5 * E1[3], E1[2]), + ]) + return ans, E1 diff --git a/opensg/timo.py b/opensg/timo.py new file mode 100644 index 0000000..504d657 --- /dev/null +++ b/opensg/timo.py @@ -0,0 +1,101 @@ + +import ufl +import opensg + + +def local_frame_1D(mesh): + t = ufl.Jacobian(mesh) + t1 = ufl.ufl.as_vector( + [t[0, 0], t[1, 0], t[2, 0]] + ) # [x2-x1,y2-y1,z2-z1] tangent vector for each element. + # Direction of tangenet vector about axis (1,0,0) depends on how we defined the mesh elements. + # For given example, final node-initial node --> clockwise direction about (1,0,0) axis. + + e2 = t1 / ufl.sqrt(ufl.dot(t1, t1)) # converting to unit vector + e1 = ufl.ufl.as_vector([1, 0, 0]) # Beam axis in x direction (global) + e3 = ufl.cross(e1, e2) # normal vector + e3 = e3 / ufl.sqrt(ufl.dot(e3, e3)) + return e1, e2, e3 + + + +def directional_derivative(e): + """Obtain the curvature of curved elements + + Directional derivative of e1 w.r.t. e2 --> d(e1)/d(e2) + ufl.grad (.)- is 3X1 vector --> [d(.)/dx ,d(.)/dy,d(.)/dz] + e2_1 shows derivative of e2 vector w.r.t. e1 vector + + Parameters + ---------- + e : _type_ + local element frame + + Returns + ------- + _type_ + _description_ + """ + # a3,1 + + e1, e2, e3 = e[0], e[1], e[2] + e1_1 = ufl.dot(e1, ufl.grad(e1)) + e1_2 = ufl.dot(e2, ufl.grad(e1)) + e2_1 = ufl.dot(e1, ufl.grad(e2)) + e2_2 = ufl.dot(e2, ufl.grad(e2)) + e3_1 = ufl.dot(e1, ufl.grad(e3)) + e3_2 = ufl.dot(e2, ufl.grad(e3)) + + # Initial Curvatures # + k11 = ufl.dot(e3_1, e1) + k12 = ufl.dot(e3_1, e2) + k21 = ufl.dot(e3_2, e1) + k22 = ufl.dot(e3_2, e2) + k13 = ufl.dot(e1_1, e2) + k23 = ufl.dot(e1_2, e2) + return k11, k12, k21, k22, k13, k23 + + +def local_grad(ee, q): + return ufl.dot(ee, ufl.grad(q)) + + +def ddot(w, d1): + return d1[0] * w[0] + d1[1] * w[1] + d1[2] * w[2] + + +def gamma_h(e, x, w): + d11 = ufl.as_vector([-k11 * d3[ii] + k13 * d2[ii] for ii in range(3)]) + d22 = ufl.as_vector([-k22 * d3[ii] - k23 * d1[ii] for ii in range(3)]) + d12 = ufl.as_vector([-k21 * d3[ii] + k23 * d2[ii] for ii in range(3)]) + d21 = ufl.as_vector([-k12 * d3[ii] - k13 * d1[ii] for ii in range(3)]) + + w_d1 = [opensg.local_grad(e[0], w[i]) for i in range(3)] + w_d2 = [opensg.local_grad(e[1], w[i]) for i in range(3)] + w_d11 = [opensg.local_grad(e[0], w_d1[i]) for i in range(3)] + w_d22 = [opensg.local_grad(e[1], w_d2[i]) for i in range(3)] + + w_d12 = [opensg.local_grad(e[1], w_d1[ii]) for ii in range(3)] + w_d21 = [opensg.local_grad(e[0], w_d2[ii]) for ii in range(3)] + w_11 = [opensg.local_grad(d11, w[ii]) for ii in range(3)] + w_22 = [opensg.local_grad(d22, w[ii]) for ii in range(3)] + w_12 = [opensg.local_grad(d12, w[ii]) for ii in range(3)] + w_21 = [opensg.local_grad(d21, w[ii]) for ii in range(3)] + + G1 = opensg.ddot(w_d1, d1) + G2 = opensg.ddot(w_d2, d2) + G3 = opensg.ddot(w_d1, d2) + opensg.ddot(w_d2, d1) + G4 = -k11 * G1 - k12 * 0.5 * G3 - opensg.ddot(w_d11, d3) + k13 * opensg.ddot(w_d2, d3) + G5 = -k22 * G2 - k21 * 0.5 * G3 - opensg.ddot(w_d22, d3) - k23 * opensg.ddot(w_d1, d3) + G6 = ( + -(k11 + k22) * 0.5 * G3 + - k12 * G2 + - k21 * G1 + + k23 * opensg.ddot(w_d2, d3) + - k13 * opensg.ddot(w_d1, d3) + - opensg.ddot(w_d12, d3) + - opensg.ddot(w_d21, d3) + ) + + E1 = as_tensor([G1, G2, G3, G4, G5, G6]) + return E1 \ No newline at end of file diff --git a/opensg/util.py b/opensg/util.py new file mode 100644 index 0000000..f262dae --- /dev/null +++ b/opensg/util.py @@ -0,0 +1,9 @@ +import numpy as np + + +def generate_boundary_markers(xmin, xmax): + def is_left_boundary(x): + return np.isclose(x[0], xmin) + def is_right_boundary(x): + return np.isclose(x[0], xmax) + return is_left_boundary, is_right_boundary \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..712355f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,46 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "opensg" +version = "0.1.0" +description = "" +readme = "README.md" +license.file = "LICENSE" +authors = [ + { name = "Ernesto Camarena", email = "ecamare@sandia.gov" }, +] +# maintainers = [ +# { name = "TODO", email = "TODO" }, +# ] +requires-python = ">=3.9,<3.11" # this syntax might not work... + +# dependencies = [ +# "numpy", +# "dolfinx", +# "meshio", +# "petsc4py", +# "ufl", +# "basix", +# "gmsh" +# ] + +classifiers = [ + "Development Status :: 4 - Beta", + "License :: OSI Approved :: BSD License", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Topic :: Scientific/Engineering :: Physics", +] + +[project.urls] +Homepage = "TODO" +Documentation = "TODO" +"Bug Tracker" = "TODO" +Discussions = "TODO" +Changelog = "TODO"