Skip to content

Commit

Permalink
Fix missing template type in generation (#3404)
Browse files Browse the repository at this point in the history
  • Loading branch information
schnellerhase committed Sep 15, 2024
1 parent 7bb01ad commit 9635a82
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 20 deletions.
34 changes: 17 additions & 17 deletions cpp/dolfinx/mesh/generation.h
Original file line number Diff line number Diff line change
Expand Up @@ -37,36 +37,36 @@ enum class DiagonalType
namespace impl
{
template <std::floating_point T>
Mesh<T> build_tri(MPI_Comm comm, std::array<std::array<double, 2>, 2> p,
Mesh<T> build_tri(MPI_Comm comm, std::array<std::array<T, 2>, 2> p,
std::array<std::int64_t, 2> n,
const CellPartitionFunction& partitioner,
DiagonalType diagonal);

template <std::floating_point T>
Mesh<T> build_quad(MPI_Comm comm, const std::array<std::array<double, 2>, 2> p,
Mesh<T> build_quad(MPI_Comm comm, const std::array<std::array<T, 2>, 2> p,
std::array<std::int64_t, 2> n,
const CellPartitionFunction& partitioner);

template <std::floating_point T>
std::vector<T> create_geom(MPI_Comm comm,
std::array<std::array<double, 3>, 2> p,
std::array<std::array<T, 3>, 2> p,
std::array<std::int64_t, 3> n);

template <std::floating_point T>
Mesh<T> build_tet(MPI_Comm comm, MPI_Comm subcomm,
std::array<std::array<double, 3>, 2> p,
std::array<std::array<T, 3>, 2> p,
std::array<std::int64_t, 3> n,
const CellPartitionFunction& partitioner);

template <std::floating_point T>
Mesh<T> build_hex(MPI_Comm comm, MPI_Comm subcomm,
std::array<std::array<double, 3>, 2> p,
std::array<std::array<T, 3>, 2> p,
std::array<std::int64_t, 3> n,
const CellPartitionFunction& partitioner);

template <std::floating_point T>
Mesh<T> build_prism(MPI_Comm comm, MPI_Comm subcomm,
std::array<std::array<double, 3>, 2> p,
std::array<std::array<T, 3>, 2> p,
std::array<std::int64_t, 3> n,
const CellPartitionFunction& partitioner);
} // namespace impl
Expand All @@ -93,7 +93,7 @@ Mesh<T> build_prism(MPI_Comm comm, MPI_Comm subcomm,
/// @return Mesh
template <std::floating_point T = double>
Mesh<T> create_box(MPI_Comm comm, MPI_Comm subcomm,
std::array<std::array<double, 3>, 2> p,
std::array<std::array<T, 3>, 2> p,
std::array<std::int64_t, 3> n, CellType celltype,
CellPartitionFunction partitioner = nullptr)
{
Expand Down Expand Up @@ -139,7 +139,7 @@ Mesh<T> create_box(MPI_Comm comm, MPI_Comm subcomm,
/// across MPI ranks.
/// @return Mesh
template <std::floating_point T = double>
Mesh<T> create_box(MPI_Comm comm, std::array<std::array<double, 3>, 2> p,
Mesh<T> create_box(MPI_Comm comm, std::array<std::array<T, 3>, 2> p,
std::array<std::int64_t, 3> n, CellType celltype,
const CellPartitionFunction& partitioner = nullptr)
{
Expand All @@ -163,7 +163,7 @@ Mesh<T> create_box(MPI_Comm comm, std::array<std::array<double, 3>, 2> p,
/// @param[in] diagonal Direction of diagonals
/// @return Mesh
template <std::floating_point T = double>
Mesh<T> create_rectangle(MPI_Comm comm, std::array<std::array<double, 2>, 2> p,
Mesh<T> create_rectangle(MPI_Comm comm, std::array<std::array<T, 2>, 2> p,
std::array<std::int64_t, 2> n, CellType celltype,
CellPartitionFunction partitioner,
DiagonalType diagonal = DiagonalType::right)
Expand Down Expand Up @@ -206,7 +206,7 @@ Mesh<T> create_rectangle(MPI_Comm comm, std::array<std::array<double, 2>, 2> p,
/// @param[in] diagonal Direction of diagonals
/// @return Mesh
template <std::floating_point T = double>
Mesh<T> create_rectangle(MPI_Comm comm, std::array<std::array<double, 2>, 2> p,
Mesh<T> create_rectangle(MPI_Comm comm, std::array<std::array<T, 2>, 2> p,
std::array<std::int64_t, 2> n, CellType celltype,
DiagonalType diagonal = DiagonalType::right)
{
Expand All @@ -227,7 +227,7 @@ Mesh<T> create_rectangle(MPI_Comm comm, std::array<std::array<double, 2>, 2> p,
/// across MPI ranks.
/// @return A mesh.
template <std::floating_point T = double>
Mesh<T> create_interval(MPI_Comm comm, std::int64_t n, std::array<double, 2> p,
Mesh<T> create_interval(MPI_Comm comm, std::int64_t n, std::array<T, 2> p,
mesh::GhostMode ghost_mode = mesh::GhostMode::none,
CellPartitionFunction partitioner = nullptr)
{
Expand Down Expand Up @@ -278,7 +278,7 @@ namespace impl
{
template <std::floating_point T>
std::vector<T> create_geom(MPI_Comm comm,
std::array<std::array<double, 3>, 2> p,
std::array<std::array<T, 3>, 2> p,
std::array<std::int64_t, 3> n)
{
// Extract data
Expand Down Expand Up @@ -327,7 +327,7 @@ std::vector<T> create_geom(MPI_Comm comm,

template <std::floating_point T>
Mesh<T> build_tet(MPI_Comm comm, MPI_Comm subcomm,
std::array<std::array<double, 3>, 2> p,
std::array<std::array<T, 3>, 2> p,
std::array<std::int64_t, 3> n,
const CellPartitionFunction& partitioner)
{
Expand Down Expand Up @@ -375,7 +375,7 @@ Mesh<T> build_tet(MPI_Comm comm, MPI_Comm subcomm,

template <std::floating_point T>
mesh::Mesh<T> build_hex(MPI_Comm comm, MPI_Comm subcomm,
std::array<std::array<double, 3>, 2> p,
std::array<std::array<T, 3>, 2> p,
std::array<std::int64_t, 3> n,
const CellPartitionFunction& partitioner)
{
Expand Down Expand Up @@ -418,7 +418,7 @@ mesh::Mesh<T> build_hex(MPI_Comm comm, MPI_Comm subcomm,

template <std::floating_point T>
Mesh<T> build_prism(MPI_Comm comm, MPI_Comm subcomm,
std::array<std::array<double, 3>, 2> p,
std::array<std::array<T, 3>, 2> p,
std::array<std::int64_t, 3> n,
const CellPartitionFunction& partitioner)
{
Expand Down Expand Up @@ -464,7 +464,7 @@ Mesh<T> build_prism(MPI_Comm comm, MPI_Comm subcomm,
}

template <std::floating_point T>
Mesh<T> build_tri(MPI_Comm comm, std::array<std::array<double, 2>, 2> p,
Mesh<T> build_tri(MPI_Comm comm, std::array<std::array<T, 2>, 2> p,
std::array<std::int64_t, 2> n,
const CellPartitionFunction& partitioner,
DiagonalType diagonal)
Expand Down Expand Up @@ -621,7 +621,7 @@ Mesh<T> build_tri(MPI_Comm comm, std::array<std::array<double, 2>, 2> p,
}

template <std::floating_point T>
Mesh<T> build_quad(MPI_Comm comm, const std::array<std::array<double, 2>, 2> p,
Mesh<T> build_quad(MPI_Comm comm, const std::array<std::array<T, 2>, 2> p,
std::array<std::int64_t, 2> n,
const CellPartitionFunction& partitioner)
{
Expand Down
6 changes: 3 additions & 3 deletions python/dolfinx/wrappers/mesh.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ void declare_mesh(nb::module_& m, std::string type)
std::string create_interval("create_interval_" + type);
m.def(
create_interval.c_str(),
[](MPICommWrapper comm, std::int64_t n, std::array<double, 2> p,
[](MPICommWrapper comm, std::int64_t n, std::array<T, 2> p,
dolfinx::mesh::GhostMode ghost_mode,
const PythonCellPartitionFunction& part)
{
Expand All @@ -254,7 +254,7 @@ void declare_mesh(nb::module_& m, std::string type)
std::string create_rectangle("create_rectangle_" + type);
m.def(
create_rectangle.c_str(),
[](MPICommWrapper comm, std::array<std::array<double, 2>, 2> p,
[](MPICommWrapper comm, std::array<std::array<T, 2>, 2> p,
std::array<std::int64_t, 2> n, dolfinx::mesh::CellType celltype,
const PythonCellPartitionFunction& part,
dolfinx::mesh::DiagonalType diagonal)
Expand All @@ -269,7 +269,7 @@ void declare_mesh(nb::module_& m, std::string type)
std::string create_box("create_box_" + type);
m.def(
create_box.c_str(),
[](MPICommWrapper comm, std::array<std::array<double, 3>, 2> p,
[](MPICommWrapper comm, std::array<std::array<T, 3>, 2> p,
std::array<std::int64_t, 3> n, dolfinx::mesh::CellType celltype,
const PythonCellPartitionFunction& part)
{
Expand Down

0 comments on commit 9635a82

Please sign in to comment.