Actual source code: vecio.c
1: #define PETSCVEC_DLL
2: /*
3: This file contains simple binary input routines for vectors. The
4: analogous output routines are within each vector implementation's
5: VecView (with viewer types PETSC_VIEWER_BINARY)
6: */
8: #include petscsys.h
9: #include petscvec.h
10: #include private/vecimpl.h
11: #if defined(PETSC_HAVE_PNETCDF)
13: #include "pnetcdf.h"
15: #endif
16: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, const VecType, Vec*);
17: EXTERN PetscErrorCode VecLoad_Netcdf(PetscViewer, Vec*);
18: EXTERN PetscErrorCode VecLoad_HDF5(PetscViewer, Vec*);
19: EXTERN PetscErrorCode VecLoadIntoVector_Binary(PetscViewer, Vec);
20: EXTERN PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer, Vec);
24: /*@C
25: VecLoad - Loads a vector that has been stored in binary format
26: with VecView().
28: Collective on PetscViewer
30: Input Parameters:
31: + viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
32: NetCDF file viewer, obtained from PetscViewerNetcdfOpen()
33: - outtype - the type of vector VECSEQ or VECMPI or PETSC_NULL (which indicates
34: using VECSEQ if the communicator in the Viewer is of size 1; otherwise
35: use VECMPI).
37: Output Parameter:
38: . newvec - the newly loaded vector
40: Level: intermediate
42: Notes:
43: The input file must contain the full global vector, as
44: written by the routine VecView().
46: Notes for advanced users:
47: Most users should not need to know the details of the binary storage
48: format, since VecLoad() and VecView() completely hide these details.
49: But for anyone who's interested, the standard binary matrix storage
50: format is
51: .vb
52: int VEC_FILE_COOKIE
53: int number of rows
54: PetscScalar *values of all entries
55: .ve
57: In addition, PETSc automatically does the byte swapping for
58: machines that store the bytes reversed, e.g. DEC alpha, freebsd,
59: linux, Windows and the paragon; thus if you write your own binary
60: read/write routines you have to swap the bytes; see PetscBinaryRead()
61: and PetscBinaryWrite() to see how this may be done.
63: Concepts: vector^loading from file
65: .seealso: PetscViewerBinaryOpen(), VecView(), MatLoad(), VecLoadIntoVector()
66: @*/
67: PetscErrorCode VecLoad(PetscViewer viewer, const VecType outtype,Vec *newvec)
68: {
70: PetscTruth isbinary,flg;
71: char vtype[256];
72: const char *prefix;
73: #if defined(PETSC_HAVE_PNETCDF)
74: PetscTruth isnetcdf;
75: #endif
76: #if defined(PETSC_HAVE_HDF5)
77: PetscTruth ishdf5;
78: #endif
83: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
84: #if defined(PETSC_HAVE_HDF5)
85: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF5,&ishdf5);
86: #endif
87: #if defined(PETSC_HAVE_PNETCDF)
88: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
89: #endif
91: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
92: VecInitializePackage(PETSC_NULL);
93: #endif
95: PetscLogEventBegin(VEC_Load,viewer,0,0,0);
96: #if defined(PETSC_HAVE_PNETCDF)
97: if (isnetcdf) {
98: VecLoad_Netcdf(viewer,newvec);
99: } else
100: #endif
101: #if defined(PETSC_HAVE_HDF5)
102: if (ishdf5) {
103: SETERRQ(PETSC_ERR_SUP,"Since HDF5 format gives ASCII name for each object in file; must use VecLoadIntoVector() after setting name of Vec with PetscObjectSetName()");
104: } else
105: #endif
106: {
107: Vec factory;
108: MPI_Comm comm;
109: PetscErrorCode (*r)(PetscViewer, const VecType,Vec*);
110: PetscMPIInt size;
112: PetscObjectGetOptionsPrefix((PetscObject)viewer,(const char**)&prefix);
113: PetscOptionsGetString(prefix,"-vec_type",vtype,256,&flg);
114: if (flg) {
115: outtype = vtype;
116: }
117: PetscOptionsGetString(prefix,"-vecload_type",vtype,256,&flg);
118: if (flg) {
119: outtype = vtype;
120: }
121: PetscObjectGetComm((PetscObject)viewer,&comm);
122: if (!outtype) {
123: MPI_Comm_size(comm,&size);
124: outtype = (size > 1) ? VECMPI : VECSEQ;
125: }
127: VecCreate(comm,&factory);
128: VecSetSizes(factory,1,PETSC_DETERMINE);
129: VecSetType(factory,outtype);
130: r = factory->ops->load;
131: VecDestroy(factory);
132: if (!r) SETERRQ1(PETSC_ERR_SUP,"VecLoad is not supported for type: %s",outtype);
133: (*r)(viewer,outtype,newvec);
134: }
135: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
136: return(0);
137: }
139: #if defined(PETSC_HAVE_PNETCDF)
142: PetscErrorCode VecLoad_Netcdf(PetscViewer viewer,Vec *newvec)
143: {
145: PetscMPIInt rank;
146: PetscInt N,n,bs;
147: PetscInt ncid,start;
148: Vec vec;
149: PetscScalar *avec;
150: MPI_Comm comm;
151: PetscTruth flag;
152: char name[NC_MAX_NAME];
155: PetscLogEventBegin(VEC_Load,viewer,0,0,0);
156: PetscObjectGetComm((PetscObject)viewer,&comm);
157: MPI_Comm_rank(comm,&rank);
158: PetscViewerNetcdfGetID(viewer,&ncid);
159: ncmpi_inq_dim(ncid,0,name,(MPI_Offset*)&N); /* N gets the global vector size */
160: VecCreate(comm,&vec);
161: VecSetSizes(vec,PETSC_DECIDE,N);
162: if (!rank) {
163: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
164: if (flag) {
165: VecSetBlockSize(vec,bs);
166: }
167: }
168: VecSetFromOptions(vec);
169: VecGetLocalSize(vec,&n);
170: VecGetOwnershipRange(vec,&start,PETSC_NULL);
171: VecGetArray(vec,&avec);
172: ncmpi_get_vara_double_all(ncid,0,(const MPI_Offset*)&start,(const MPI_Offset*)&n,(double *)avec);
173: VecRestoreArray(vec,&avec);
174: *newvec = vec;
175: VecAssemblyBegin(vec);
176: VecAssemblyEnd(vec);
177: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
178: return(0);
179: }
180: #endif
182: #include "petscmat.h" /* so that MAT_FILE_COOKIE is defined */
186: PetscErrorCode VecLoad_Binary(PetscViewer viewer, const VecType itype,Vec *newvec)
187: {
188: PetscMPIInt size,rank,tag;
189: int fd;
190: PetscInt i,rows,type,n,*range,bs,tr[2];
192: Vec vec;
193: PetscScalar *avec,*avecwork;
194: MPI_Comm comm;
195: MPI_Request request;
196: MPI_Status status;
197: PetscTruth flag;
198: #if defined(PETSC_HAVE_MPIIO)
199: PetscTruth useMPIIO;
200: #endif
203: PetscLogEventBegin(VEC_Load,viewer,0,0,0);
204: PetscViewerBinaryGetDescriptor(viewer,&fd);
205: PetscObjectGetComm((PetscObject)viewer,&comm);
206: MPI_Comm_rank(comm,&rank);
207: MPI_Comm_size(comm,&size);
209: /* Read vector header. */
210: PetscViewerBinaryRead(viewer,tr,2,PETSC_INT);
211: type = tr[0];
212: rows = tr[1];
213: if (type != VEC_FILE_COOKIE) {
214: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
215: if (type == MAT_FILE_COOKIE) {
216: SETERRQ(PETSC_ERR_ARG_WRONG,"Matrix is next in file, not a vector as you requested");
217: } else {
218: SETERRQ(PETSC_ERR_ARG_WRONG,"Not a vector next in file");
219: }
220: }
221: VecCreate(comm,&vec);
222: VecSetSizes(vec,PETSC_DECIDE,rows);
223: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
224: if (flag) {
225: VecSetBlockSize(vec,bs);
226: }
227: VecSetFromOptions(vec);
228: VecGetLocalSize(vec,&n);
229: PetscObjectGetNewTag((PetscObject)viewer,&tag);
230: VecGetArray(vec,&avec);
231: #if defined(PETSC_HAVE_MPIIO)
232: PetscViewerBinaryGetMPIIO(viewer,&useMPIIO);
233: if (!useMPIIO) {
234: #endif
235: if (!rank) {
236: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
238: if (size > 1) {
239: /* read in other chuncks and send to other processors */
240: /* determine maximum chunck owned by other */
241: range = vec->map->range;
242: n = 1;
243: for (i=1; i<size; i++) {
244: n = PetscMax(n,range[i+1] - range[i]);
245: }
246: PetscMalloc(n*sizeof(PetscScalar),&avecwork);
247: for (i=1; i<size; i++) {
248: n = range[i+1] - range[i];
249: PetscBinaryRead(fd,avecwork,n,PETSC_SCALAR);
250: MPI_Isend(avecwork,n,MPIU_SCALAR,i,tag,comm,&request);
251: MPI_Wait(&request,&status);
252: }
253: PetscFree(avecwork);
254: }
255: } else {
256: MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
257: }
258: #if defined(PETSC_HAVE_MPIIO)
259: } else {
260: PetscMPIInt gsizes[1],lsizes[1],lstarts[1];
261: MPI_Datatype view;
262: MPI_File mfdes;
263: MPI_Aint ub,ul;
264: MPI_Offset off;
266: gsizes[0] = PetscMPIIntCast(rows);
267: lsizes[0] = PetscMPIIntCast(n);
268: lstarts[0] = PetscMPIIntCast(vec->map->rstart);
269: MPI_Type_create_subarray(1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
270: MPI_Type_commit(&view);
272: PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
273: PetscViewerBinaryGetMPIIOOffset(viewer,&off);
274: MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char *)"native",MPI_INFO_NULL);
275: MPIU_File_read_all(mfdes,avec,lsizes[0],MPIU_SCALAR,MPI_STATUS_IGNORE);
276: MPI_Type_get_extent(view,&ul,&ub);
277: PetscViewerBinaryAddMPIIOOffset(viewer,ub);
278: MPI_Type_free(&view);
279: }
280: #endif
281: VecRestoreArray(vec,&avec);
282: *newvec = vec;
283: VecAssemblyBegin(vec);
284: VecAssemblyEnd(vec);
285: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
286: return(0);
287: }
289: #if defined(PETSC_HAVE_PNETCDF)
292: PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer viewer,Vec vec)
293: {
295: PetscMPIInt rank;
296: PetscInt N,rows,n,bs;
297: PetscInt ncid,start;
298: PetscScalar *avec;
299: MPI_Comm comm;
300: PetscTruth flag;
301: char name[NC_MAX_NAME];
304: PetscLogEventBegin(VEC_Load,viewer,vec,0,0);
305: PetscObjectGetComm((PetscObject)viewer,&comm);
306: MPI_Comm_rank(comm,&rank);
307: PetscViewerNetcdfGetID(viewer,&ncid);
308: ncmpi_inq_dim(ncid,0,name,(MPI_Offset*)&N); /* N gets the global vector size */
309: if (!rank) {
310: VecGetSize(vec,&rows);
311: if (N != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
312: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
313: if (flag) {
314: VecSetBlockSize(vec,bs);
315: }
316: }
317: VecSetFromOptions(vec);
318: VecGetLocalSize(vec,&n);
319: VecGetOwnershipRange(vec,&start,PETSC_NULL);
320: VecGetArray(vec,&avec);
321: ncmpi_get_vara_double_all(ncid,0,(const MPI_Offset*)&start,(const MPI_Offset*)&n,(double *)avec);
322: VecRestoreArray(vec,&avec);
323: VecAssemblyBegin(vec);
324: VecAssemblyEnd(vec);
325: PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
326: return(0);
327: }
328: #endif
330: #if defined(PETSC_HAVE_HDF5)
333: PetscErrorCode VecLoadIntoVector_HDF5(PetscViewer viewer, Vec xin)
334: {
335: hsize_t rdim,dim = 1; /* Could have dim 2 for blocked vectors */
336: PetscInt n, N, bs, low;
337: PetscScalar *x;
338: PetscTruth flag;
339: hid_t file_id, dset_id, filespace, memspace, plist_id;
340: hsize_t dims[2];
341: hsize_t count[2];
342: hsize_t offset[2];
343: herr_t status;
345: const char *vecname;
348: PetscLogEventBegin(VEC_Load,viewer,xin,0,0);
349: PetscOptionsGetInt(PETSC_NULL, "-vecload_block_size", &bs, &flag);
350: if (flag) {
351: VecSetBlockSize(xin, bs);
352: }
353: VecSetFromOptions(xin);
355: PetscViewerHDF5GetFileId(viewer, &file_id);
357: /* Create the dataset with default properties and close filespace */
358: PetscObjectGetName((PetscObject)xin,&vecname);
359: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
360: dset_id = H5Dopen2(file_id, vecname, H5P_DEFAULT);
361: #else
362: dset_id = H5Dopen(file_id, vecname);
363: #endif
364: if (dset_id == -1) SETERRQ1(PETSC_ERR_LIB,"Could not H5Dopen() with Vec named %s",vecname);
366: /* Retrieve the dataspace for the dataset */
367: VecGetSize(xin, &N);
368: filespace = H5Dget_space(dset_id);
369: if (filespace == -1) SETERRQ(PETSC_ERR_LIB,"Could not H5Dget_space()");
370: rdim = H5Sget_simple_extent_dims(filespace, dims, PETSC_NULL);
371: #if defined(PETSC_USE_COMPLEX)
372: if (rdim != 2) SETERRQ1(PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file %d not 2 (complex numbers) as expected",rdim);
373: #else
374: if (rdim != 1) SETERRQ1(PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file %d not 1 as expected",rdim);
375: #endif
376: if (N != (int) dims[0]) SETERRQ2(PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length (%d) then input vector (%d)", (int) dims[0], N);
378: /* Each process defines a dataset and reads it from the hyperslab in the file */
379: VecGetLocalSize(xin, &n);
380: count[0] = n;
381: #if defined(PETSC_USE_COMPLEX)
382: count[1] = 2;
383: dim++;
384: #endif
385: memspace = H5Screate_simple(dim, count, NULL);
386: if (memspace == -1) SETERRQ(PETSC_ERR_LIB,"Could not H5Screate_simple()");
388: /* Select hyperslab in the file */
389: VecGetOwnershipRange(xin, &low, PETSC_NULL);
390: offset[0] = low;
391: #if defined(PETSC_USE_COMPLEX)
392: offset[1] = 0;
393: #endif
394: status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);
396: /* Create property list for collective dataset read */
397: plist_id = H5Pcreate(H5P_DATASET_XFER);
398: if (plist_id == -1) SETERRQ(PETSC_ERR_LIB,"Could not H5Pcreate()");
399: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
400: status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
401: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
402: #endif
404: VecGetArray(xin, &x);
405: status = H5Dread(dset_id, H5T_NATIVE_DOUBLE, memspace, filespace, plist_id, x);CHKERRQ(status);
406: VecRestoreArray(xin, &x);
408: /* Close/release resources */
409: status = H5Pclose(plist_id);CHKERRQ(status);
410: status = H5Sclose(filespace);CHKERRQ(status);
411: status = H5Sclose(memspace);CHKERRQ(status);
412: status = H5Dclose(dset_id);CHKERRQ(status);
414: VecAssemblyBegin(xin);
415: VecAssemblyEnd(xin);
416: PetscLogEventEnd(VEC_Load,viewer,xin,0,0);
417: return(0);
418: }
419: #endif
423: PetscErrorCode VecLoadIntoVector_Binary(PetscViewer viewer,Vec vec)
424: {
426: PetscMPIInt size,rank,tag;
427: PetscInt i,rows,type,n,*range;
428: int fd;
429: PetscScalar *avec;
430: MPI_Comm comm;
431: MPI_Request request;
432: MPI_Status status;
435: PetscLogEventBegin(VEC_Load,viewer,vec,0,0);
437: PetscViewerBinaryGetDescriptor(viewer,&fd);
438: PetscObjectGetComm((PetscObject)viewer,&comm);
439: MPI_Comm_rank(comm,&rank);
440: MPI_Comm_size(comm,&size);
442: if (!rank) {
443: /* Read vector header. */
444: PetscBinaryRead(fd,&type,1,PETSC_INT);
445: if (type != VEC_FILE_COOKIE) SETERRQ(PETSC_ERR_ARG_WRONG,"Non-vector object");
446: PetscBinaryRead(fd,&rows,1,PETSC_INT);
447: VecGetSize(vec,&n);
448: if (n != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
449: MPI_Bcast(&rows,1,MPIU_INT,0,comm);
451: VecSetFromOptions(vec);
452: VecGetLocalSize(vec,&n);
453: VecGetArray(vec,&avec);
454: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
455: VecRestoreArray(vec,&avec);
457: if (size > 1) {
458: /* read in other chuncks and send to other processors */
459: /* determine maximum chunck owned by other */
460: range = vec->map->range;
461: n = 1;
462: for (i=1; i<size; i++) {
463: n = PetscMax(n,range[i+1] - range[i]);
464: }
465: PetscMalloc(n*sizeof(PetscScalar),&avec);
466: PetscObjectGetNewTag((PetscObject)viewer,&tag);
467: for (i=1; i<size; i++) {
468: n = range[i+1] - range[i];
469: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
470: MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
471: MPI_Wait(&request,&status);
472: }
473: PetscFree(avec);
474: }
475: } else {
476: MPI_Bcast(&rows,1,MPIU_INT,0,comm);
477: VecSetFromOptions(vec);
478: VecGetLocalSize(vec,&n);
479: PetscObjectGetNewTag((PetscObject)viewer,&tag);
480: VecGetArray(vec,&avec);
481: MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
482: VecRestoreArray(vec,&avec);
483: }
484: VecAssemblyBegin(vec);
485: VecAssemblyEnd(vec);
486: PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
487: return(0);
488: }
492: PetscErrorCode VecLoadIntoVector_Default(PetscViewer viewer,Vec vec)
493: {
494: PetscTruth isbinary;
495: #if defined(PETSC_HAVE_PNETCDF)
496: PetscTruth isnetcdf;
497: #endif
498: #if defined(PETSC_HAVE_HDF5)
499: PetscTruth ishdf5;
500: #endif
504: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
505: #if defined(PETSC_HAVE_PNETCDF)
506: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
507: #endif
508: #if defined(PETSC_HAVE_HDF5)
509: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF5,&ishdf5);
510: #endif
512: if (isbinary) {
513: VecLoadIntoVector_Binary(viewer,vec);
514: #if defined(PETSC_HAVE_PNETCDF)
515: } else if (isnetcdf) {
516: VecLoadIntoVector_Netcdf(viewer,vec);
517: #endif
518: #if defined(PETSC_HAVE_HDF5)
519: } else if (ishdf5) {
520: VecLoadIntoVector_HDF5(viewer,vec);
521: #endif
522: } else {
523: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
524: }
525: return(0);
526: }