This Message Is From an External Sender
This message came from outside your organization.
Hello,
I'd like to solve transient heat transport at a particle scale using TS, with the per-particle equation being something like dT_i / dt = (S(T_i) + sum(F(T_j, T_i), j connecting to i)) with a nonlinear source term S and a conduction term F. The particles can move, deform and grow/shrink/vanish on a voxel grid, but for the temperature a particle-scale resolution should be sufficient. The particles' connectivity will change during the simulation, but is assumed constant during a single timestep. I have a data structure tracking the particles' connectivity, so I can say which particles should conduct heat to each other. I exploit symmetry and so only save the "forward" edges, so e.g. for touching particles 1->2->3, I only store [[2], [3], []], from which the full list [[2], [1, 3], [2]] could be reconstructed but which I'd like to avoid. In parallel each worker would own some of the particle data, so e.g. for the 1->2->3 example and 2 workers, worker 0 could own [[2]] and worker 1 [[3],[]]. Looking over the DM variants, either DMNetwork or some manual mesh build with DMPlex seem suited for this. I'd especially like it if the adjacency information is handled by the DM automagically based on the edges so I don't have to deal with ghost particle communication myself. I already tried something basic with DMNetwork, though for some reason the offsets I get from DMNetworkGetGlobalVecOffset() are larger than the actual network. I've attached what I have so far but comparing to e.g. src/snes/tutorials/network/ex1.c I don't see what I'm doing wrong if I don't need data at the edges. I might not be seeing the trees for the forest though. The output with -dmnetwork_view looks reasonable to me. Any help in fixing this approach, or if it would seem suitable pointers to using DMPlex for this problem, would be appreciated. Best regards, Marco
#include <petsc.h>
#include <petscdmnetwork.h>
#include <petscts.h>
#include <mpi.h>
#include <assert.h>
// different test networks
//#define NEDGES 2
//#define NEDGES 3
#define NEDGES 24
PetscErrorCode buildGraphLaplacian(DM networkdm, int subnetnum, int compkey, Mat mat);
int main(int argc, char **argv)
{
//PetscFunctionBeginUser;
PetscCall(PetscInitialize(&argc, &argv, NULL, NULL));
int nph = 16;
double *T = (double *)malloc(sizeof(*T) * nph);
for (int i = 0; i < nph; i++) { T[i] = 1; }
DM networkdm;
MPI_Comm comm;
PetscInt compkey;
// create network and register the vertex dof
// step 1/2 of manual
PetscCall(DMNetworkCreate(PETSC_COMM_WORLD, &networkdm));
PetscCall(DMNetworkRegisterComponent(networkdm, "temperature", sizeof(*T), &compkey));
PetscCall(PetscObjectGetComm((PetscObject)networkdm, &comm));
int rank, size;
PetscCallMPI(MPI_Comm_rank(comm, &rank));
PetscCallMPI(MPI_Comm_size(comm, &size));
// only a single network
// step 3 manual
PetscCall(DMNetworkSetNumSubNetworks(networkdm, PETSC_DECIDE, 1));
PetscInt nedges = NEDGES;
// since C doesn't like non-const sizes for arrays use a #define
#if NEDGES == 2
PetscInt edges[2 * NEDGES] = {0, 2, 1, 2}; // example from DMNetworkAddSubnetwork for worker 0
#endif
#if NEDGES == 3
PetscInt edges[2 * NEDGES] = {0, 1, 1, 2, 2, 3}; // 0-1-2-3 forward connections
#endif
#if NEDGES == 24
// "forward" connectivity for a 4x4 square grid of touching particles
PetscInt edges[NEDGES * 2] = {0, 1, 0, 4, 1, 5, 1, 2, 2, 6, 2, 3, 3, 7, 4, 8, 4, 5, 5, 9, 5, 6, 6, 10, 6, 7, 7, 11, 8, 12, 8, 9, 9, 13, 9, 10, 10, 14, 10, 11, 11, 15, 12, 13, 13, 14, 14, 15};
// this can be constructed directly from what I have available
#endif
// add edge data and setup the graph
// step 4/5 of manual
PetscInt subnetnum;
PetscCall(DMNetworkAddSubnetwork(networkdm, "temperature", nedges, edges, &subnetnum));
PetscCall(DMNetworkLayoutSetUp(networkdm));
// get graph info to iter over vertices
PetscInt nv, ne;
const PetscInt *vtx, *retedges;
PetscCall(DMNetworkGetSubnetwork(networkdm, subnetnum, &nv, &ne, &vtx, &retedges));
// add the backing memory for the vertex dofs
// step 6 of manual on the just defined subnetwork
for (PetscInt i = 0; i < nv; i++) { PetscCall(DMNetworkAddComponent(networkdm, i, compkey, &(T[i]), 1)); }
// for(int i=0; i<ne; i++) {
// do I need to add the edges as a separate component when I only use them for adjacency?
// //PetscCall(DMNetworkAddComponent(networkdm, i, compkey, NULL, 0));
// PetscCall(DMNetworkAddComponent(networkdm, i, compkey, &(T[i]), 1));
// }
// setup the DM and distribute
// step 7/8 of the manual
PetscCall(DMSetUp(networkdm));
PetscCall(DMNetworkDistribute(&networkdm, 0));
// to get to know how this works, try to construct a laplacian matrix between connected vertices
Mat mat;
PetscCall(DMCreateMatrix(networkdm, &mat));
PetscCall(buildGraphLaplacian(networkdm, subnetnum, compkey, mat));
PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY));
// later on...
// TS ts;
// PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
// PetscCall(TSSetDM(ts, (DM)networkdm));
// PetscCall(TSSetMaxSteps(ts, 1));
// PetscCall(TSSetType(ts, TSBEULER));
// TODO memory cleanup
PetscCall(PetscFinalize());
return 0;
}
PetscErrorCode buildGraphLaplacian(DM networkdm, PetscInt subnetnum, PetscInt compkey, Mat mat)
{
PetscInt nv, ne;
const PetscInt *vtx, *retedges;
PetscInt m, n;
PetscInt i, v;
PetscFunctionBegin;
// mat sizes for OOB checking
PetscCall(MatGetSize(mat, &m, &n));
// network info
PetscCall(DMNetworkGetSubnetwork(networkdm, subnetnum, &nv, &ne, &vtx, &retedges));
// dT_i/dt ~ -sum(T_j - T_i)
// with backward euler and ignoring dt/dx, area elements, ...
// Tn_i = (n+1)*To_i - sum(To_j, (j, 0, n))
// thus the "diagonal" vertex gets a weight n+1, off-diagonals connected to it get -1
// BCs are implicit in the connectivity and if nothing else is done this corresponds to
// neumann zero BCs
for (v = 0; v < nv; v++) {
PetscBool isghost;
PetscCall(DMNetworkIsGhostVertex(networkdm, vtx[v], &isghost));
if (isghost) continue;
PetscInt nconnedges;
const PetscInt *connedges;
PetscInt goffset;
PetscCall(DMNetworkGetGlobalVecOffset(networkdm, vtx[v], compkey, &goffset));
// according to the docs of DMNetworkGetGlobalVecOffset, the offset should be passable to
// MatSetValues(); for a global array, would need offset-rstart or use DMNetworkGetLocalVecOffset
PetscCall(DMNetworkGetSupportingEdges(networkdm, vtx[v], &nconnedges, &connedges));
//if (goffsetto < 0) goffsetto = -goffsetto - 1;
PetscInt row = goffset;
printf("local vertex no. %d is %d in vtx with global offset %d, max is %d\n", v, vtx[v], row, m - 1);
//assert(row < m); // this fails generally
PetscInt cols[nconnedges + 1];
PetscScalar vals[nconnedges + 1];
PetscInt pos = 0;
cols[pos] = goffset;
vals[pos] = nconnedges + 1; // center of laplacian stencil with h=1
pos++;
for (i = 0; i < nconnedges; i++) {
int e = connedges[i];
const PetscInt *cone;
PetscCall(DMNetworkGetConnectedVertices(networkdm, e, &cone));
// since vfrom isn't vtx[v], would probably need to check which one is the other vertex
// and then only add that part to the matrix
int vfrom = cone[0];
int vto = cone[1];
//assert(vfrom == vtx[v]); // this isn't always met either?
int goffsetfrom, goffsetto;
PetscCall(DMNetworkGetGlobalVecOffset(networkdm, vfrom, compkey, &goffsetfrom));
PetscCall(DMNetworkGetGlobalVecOffset(networkdm, vto, compkey, &goffsetto));
cols[pos] = goffsetto;
vals[pos] = -1; // neighbours in laplacian stencil with h=1
pos++;
}
// crashes since row >= m
//PetscCall(MatSetValues(mat, 1, &row, nconnedges+1, cols, vals, INSERT_VALUES));
}
PetscFunctionReturn(PETSC_SUCCESS);
}
