Actual source code: shvec.c

  1: #define PETSCVEC_DLL
  2: /*
  3:    This file contains routines for Parallel vector operations that use shared memory
  4:  */
 5:  #include ../src/vec/vec/impls/mpi/pvecimpl.h

  7: #if defined(PETSC_USE_SHARED_MEMORY) 

  9: EXTERN PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);

 13: PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
 14: {
 16:   Vec_MPI        *w = (Vec_MPI *)win->data;
 17:   PetscScalar    *array;


 21:   /* first processor allocates entire array and sends it's address to the others */
 22:   PetscSharedMalloc(((PetscObject)win)->comm,win->map->n*sizeof(PetscScalar),win->map->N*sizeof(PetscScalar),(void**)&array);

 24:   VecCreate(((PetscObject)win)->comm,v);
 25:   VecSetSizes(*v,win->map->n,win->map->N);
 26:   VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,array);

 28:   /* New vector should inherit stashing property of parent */
 29:   (*v)->stash.donotstash   = win->stash.donotstash;
 30:   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
 31: 
 32:   PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)*v)->olist);
 33:   PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)*v)->qlist);

 35:   if (win->mapping) {
 36:     PetscObjectReference((PetscObject)win->mapping);
 37:     (*v)->mapping = win->mapping;
 38:   }
 39:   if (win->bmapping) {
 40:     PetscObjectReference((PetscObject)win->bmapping);
 41:     (*v)->bmapping = win->bmapping;
 42:   }
 43:   (*v)->ops->duplicate = VecDuplicate_Shared;
 44:   (*v)->map->bs    = win->map->bs;
 45:   (*v)->bstash.bs = win->bstash.bs;
 46:   return(0);
 47: }


 53: PetscErrorCode  VecCreate_Shared(Vec vv)
 54: {
 56:   PetscScalar    *array;

 59:   PetscSplitOwnership(((PetscObject)vv)->comm,&vv->map->n,&vv->map->N);
 60:   PetscSharedMalloc(((PetscObject)vv)->comm,vv->map->n*sizeof(PetscScalar),vv->map->N*sizeof(PetscScalar),(void**)&array);

 62:   VecCreate_MPI_Private(vv,PETSC_FALSE,0,array);
 63:   vv->ops->duplicate = VecDuplicate_Shared;

 65:   return(0);
 66: }


 70: /* ----------------------------------------------------------------------------------------
 71:      Code to manage shared memory allocation using standard Unix shared memory
 72: */
 73:  #include petscsys.h
 74: #if defined(PETSC_HAVE_PWD_H)
 75: #include <pwd.h>
 76: #endif
 77: #include <ctype.h>
 78: #include <sys/types.h>
 79: #include <sys/stat.h>
 80: #if defined(PETSC_HAVE_UNISTD_H)
 81: #include <unistd.h>
 82: #endif
 83: #if defined(PETSC_HAVE_STDLIB_H)
 84: #include <stdlib.h>
 85: #endif
 86: #if defined(PETSC_HAVE_SYS_PARAM_H)
 87: #include <sys/param.h>
 88: #endif
 89: #if defined(PETSC_HAVE_SYS_UTSNAME_H)
 90: #include <sys/utsname.h>
 91: #endif
 92: #include <fcntl.h>
 93: #include <time.h>  
 94: #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
 95: #include <sys/systeminfo.h>
 96: #endif
 97: #include <sys/shm.h>
 98: #include <sys/mman.h>


101: static PetscMPIInt Petsc_Shared_keyval = MPI_KEYVAL_INVALID;

105: /*
106:    Private routine to delete internal storage when a communicator is freed.
107:   This is called by MPI, not by users.

109:   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
110:   it was MPI_Comm *comm.  
111: */
112: static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void* attr_val,void* extra_state)
113: {

117:   PetscFree(attr_val);
118:   PetscFunctionReturn(MPI_SUCCESS);
119: }

123: /*

125:     This routine is still incomplete and needs work.

127:     For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf
128: cat /etc/sysctl.conf
129: kern.sysv.shmmax=67108864
130: kern.sysv.shmmin=1
131: kern.sysv.shmmni=32
132: kern.sysv.shmseg=512
133: kern.sysv.shmall=1024

135:   This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and
136: ipcrm to remove the shared memory in use.

138: */
139: PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
140: {
142:   PetscInt       shift;
143:   PetscMPIInt    rank,flag;
144:   int            *arena,id,key = 0;
145:   char           *value;

148:   *result = 0;

150:   MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);
151:   shift -= llen;

153:   MPI_Comm_rank(comm,&rank);
154:   if (!rank) {
155:     id = shmget(key,len, 0666 |IPC_CREAT);
156:     if (id == -1) {
157:       perror("Unable to malloc shared memory");
158:       SETERRQ(PETSC_ERR_LIB,"Unable to malloc shared memory");
159:     }
160:   } else {
161:     id = shmget(key,len, 0666);
162:     if (id == -1) {
163:       perror("Unable to malloc shared memory");
164:       SETERRQ(PETSC_ERR_LIB,"Unable to malloc shared memory");
165:     }
166:   }
167:   value = shmat(id,(void*)0,0);
168:   if (value == (char*)-1) {
169:     perror("Unable to access shared memory allocated");
170:     SETERRQ(PETSC_ERR_LIB,"Unable to access shared memory allocated");
171:   }
172:   *result = (void*) (value + shift);

174:   return(0);
175: }

177: #else


186: PetscErrorCode  VecCreate_Shared(Vec vv)
187: {
189:   PetscMPIInt    size;

192:   MPI_Comm_size(((PetscObject)vv)->comm,&size);
193:   if (size > 1) {
194:     SETERRQ(PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
195:   }
196:   VecCreate_Seq(vv);
197:   return(0);
198: }

201: #endif

205: /*@
206:    VecCreateShared - Creates a parallel vector that uses shared memory.

208:    Input Parameters:
209: .  comm - the MPI communicator to use
210: .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
211: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)

213:    Output Parameter:
214: .  vv - the vector

216:    Collective on MPI_Comm
217:  
218:    Notes:
219:    Currently VecCreateShared() is available only on the SGI; otherwise,
220:    this routine is the same as VecCreateMPI().

222:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
223:    same type as an existing vector.

225:    Level: advanced

227:    Concepts: vectors^creating with shared memory

229: .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(), 
230:           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()

232: @*/
233: PetscErrorCode  VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
234: {

238:   VecCreate(comm,v);
239:   VecSetSizes(*v,n,N);
240:   VecSetType(*v,VECSHARED);
241:   return(0);
242: }