From 99f5ec3f24a6b2cec19695ba945b0c29effd3ba0 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Mon, 27 Nov 2023 09:22:29 +0000 Subject: [PATCH 01/33] Include header for `size_t` in `nclist.h` --- include/nclist.h | 1 + 1 file changed, 1 insertion(+) diff --git a/include/nclist.h b/include/nclist.h index bad224ac2d..615b0c0da2 100644 --- a/include/nclist.h +++ b/include/nclist.h @@ -4,6 +4,7 @@ #define NCLIST_H 1 #include "ncexternl.h" +#include /* Define the type of the elements in the list*/ From 653e09fd6d5b65bad3ee748e3f54af720fd40b3e Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Mon, 27 Nov 2023 11:36:03 +0000 Subject: [PATCH 02/33] Try to more consistently use `size_t` for `nclistget` index argument --- libdap2/cache.c | 12 +++++---- libdap2/cdf.c | 27 +++++++++++---------- libdap2/constraints.c | 42 +++++++++++++++++--------------- libdap2/dapattr.c | 3 ++- libdap2/dapcvt.c | 7 +++--- libdap2/dapdump.c | 19 ++++++++------- libdap2/daputil.c | 7 +++--- libdap2/dceconstraints.c | 19 ++++++++------- libdap2/dceparse.c | 5 ++-- libdap2/getvara.c | 13 +++++----- libdap2/ncd2dispatch.c | 50 ++++++++++++++++++++------------------ libdap4/d4data.c | 15 +++++++----- libdap4/d4debug.c | 6 +++-- libdap4/d4dump.c | 3 ++- libdap4/d4file.c | 3 ++- libdap4/d4fix.c | 5 ++-- libdap4/d4meta.c | 51 +++++++++++++++++++++++---------------- libdap4/d4parser.c | 17 +++++++------ libdap4/d4printer.c | 20 +++++++-------- libdap4/d4swap.c | 3 ++- libdap4/d4util.c | 5 ++-- libdap4/d4varx.c | 14 +++++++---- libdap4/ncd4dispatch.c | 3 ++- libdispatch/dhttp.c | 3 ++- libdispatch/dinfermodel.c | 30 +++++++++++++---------- libdispatch/drc.c | 26 +++++++++++--------- libdispatch/ds3util.c | 3 ++- libdispatch/dutil.c | 5 ++-- libdispatch/nclist.c | 3 ++- libdispatch/ncuri.c | 27 +++++++++++---------- libhdf5/hdf5filter.c | 8 +++--- libhdf5/hdf5internal.c | 3 ++- libhdf5/hdf5open.c | 8 +++--- libhdf5/hdf5type.c | 5 ++-- libhdf5/nc4hdf.c | 7 +++--- libnczarr/zarr.c | 6 +++-- libnczarr/zfile.c | 3 ++- libnczarr/zfilter.c | 28 +++++++++++++-------- libnczarr/zmap.c | 3 ++- libnczarr/zmap_file.c | 6 +++-- libnczarr/zsync.c | 32 +++++++++++++++--------- libnczarr/ztype.c | 5 ++-- libnczarr/zutil.c | 6 +++-- libnczarr/zxcache.c | 9 ++++--- libsrc4/nc4internal.c | 7 +++--- libsrc4/nc4type.c | 19 ++++++++------- libsrc4/ncindex.c | 9 ++++--- ncdump/nc4printer.c | 9 ++++--- nczarr_test/ut_map.c | 8 +++--- nczarr_test/ut_mapapi.c | 9 ++++--- nczarr_test/zmapio.c | 15 ++++++------ oc2/dapparse.c | 3 ++- oc2/ocnode.c | 5 ++-- 53 files changed, 380 insertions(+), 279 deletions(-) diff --git a/libdap2/cache.c b/libdap2/cache.c index fb6afdfa7f..aa0ddd97d2 100644 --- a/libdap2/cache.c +++ b/libdap2/cache.c @@ -5,6 +5,7 @@ #include "dapincludes.h" #include "dapdump.h" +#include /* Grads servers always require a constraint, @@ -92,7 +93,7 @@ else NCerror prefetchdata(NCDAPCOMMON* nccomm) { - int i; + size_t i; NCFLAGS flags; NCerror ncstat = NC_NOERR; NClist* allvars = nccomm->cdf.ddsroot->tree->varnodes; @@ -341,7 +342,7 @@ fprintf(stderr,"freecachenode: %s\n", void freenccache(NCDAPCOMMON* nccomm, NCcache* cache) { - int i; + size_t i; if(cache == NULL) return; freenccachenode(nccomm,cache->prefetch); for(i=0;inodes);i++) { @@ -367,7 +368,8 @@ createnccache(void) static int iscacheableprojection(DCEprojection* proj) { - int i,cacheable; + size_t i; + int cacheable; if(proj->discrim != CES_VAR) return 0; cacheable = 1; /* assume so */ for(i=0;ivar->segments);i++) { @@ -380,7 +382,7 @@ iscacheableprojection(DCEprojection* proj) static int iscacheableconstraint(DCEconstraint* con) { - int i; + size_t i; if(con == NULL) return 1; if(con->selections != NULL && nclistlength(con->selections) > 0) return 0; /* can't deal with selections */ @@ -400,7 +402,7 @@ A variable is prefetchable if NCerror markprefetch(NCDAPCOMMON* nccomm) { - int i,j; + size_t i,j; NClist* allvars = nccomm->cdf.fullddsroot->tree->varnodes; assert(allvars != NULL); /* mark those variables of sufficiently small size */ diff --git a/libdap2/cdf.c b/libdap2/cdf.c index 4349dbe687..8db0bc845b 100644 --- a/libdap2/cdf.c +++ b/libdap2/cdf.c @@ -6,6 +6,7 @@ #include "dapincludes.h" #include "daputil.h" #include "dapdump.h" +#include #ifdef DAPDEBUG extern char* ocfqn(OCddsnode); @@ -433,7 +434,7 @@ we expected a grid. static int restructr(NCDAPCOMMON* ncc, CDFnode* dxdparent, CDFnode* patternparent, NClist* repairlist) { - int index, i, j, match; + size_t index, i, j, match; #ifdef DEBUG fprintf(stderr,"restruct: matched: %s -> %s\n", @@ -501,7 +502,7 @@ static NCerror repairgrids(NCDAPCOMMON* ncc, NClist* repairlist) { NCerror ncstat = NC_NOERR; - int i; + size_t i; assert(nclistlength(repairlist) % 2 == 0); for(i=0;isubnodes; for(i=0;icdf.ddsroot->tree->nodes; for(i=0;ibasenode; if(basenode == NULL) continue; @@ -689,7 +690,7 @@ dimimprint(NCDAPCOMMON* nccomm) if(noderank == 0) continue; ASSERT(noderank == baserank); #ifdef DEBUG -fprintf(stderr,"dimimprint %s/%d -> %s/%d\n", +fprintf(stderr,"dimimprint %s/%zu -> %s/%zu\n", makecdfpathstring(basenode,"."), noderank, makecdfpathstring(node,"."), @@ -725,7 +726,7 @@ static NClist* clonedimset(NCDAPCOMMON* nccomm, NClist* dimset, CDFnode* var) { NClist* result = NULL; - int i; + size_t i; for(i=0;iocname,(int)nclistlength(dimsetall)) static NCerror definetransdimset(NCDAPCOMMON* nccomm/*notused*/, CDFnode* node) { - int i; + size_t i; int ncstat = NC_NOERR; NClist* dimsettrans = NULL; @@ -842,7 +843,7 @@ Recursive helper for definedimsettrans3 static NCerror definedimsettransR(NCDAPCOMMON* nccomm, CDFnode* node) { - int i; + size_t i; int ncstat = NC_NOERR; definetransdimset(nccomm,node); @@ -882,7 +883,7 @@ Recursive helper static NCerror definedimsetsR(NCDAPCOMMON* nccomm, CDFnode* node) { - int i; + size_t i; int ncstat = NC_NOERR; definedimsetplus(nccomm,node); @@ -1057,7 +1058,7 @@ buildcdftreer(NCDAPCOMMON* nccomm, OCddsnode ocnode, CDFnode* container, void freecdfroot(CDFnode* root) { - int i; + size_t i; CDFtree* tree; NCDAPCOMMON* nccomm; if(root == NULL) return; @@ -1187,7 +1188,7 @@ fix1node(NCDAPCOMMON* nccomm, CDFnode* node) static NCerror fixnodes(NCDAPCOMMON* nccomm, NClist* cdfnodes) { - int i; + size_t i; for(i=0;i static void completesegments(NClist* fullpath, NClist* segments); static NCerror qualifyprojectionnames(DCEprojection* proj); @@ -338,7 +339,8 @@ fprintf(stderr,"matchpartialname: choice: %s %s for %s\n", static int matchsuffix(NClist* matchpath, NClist* segments) { - int i,pathstart; + size_t i; + int pathstart; int nsegs = nclistlength(segments); int pathlen = nclistlength(matchpath); int segmatch; @@ -356,7 +358,7 @@ matchsuffix(NClist* matchpath, NClist* segments) matching as we go */ for(i=0;irank; segmatch = 1; /* until proven otherwise */ @@ -386,12 +388,12 @@ dapbuildvaraprojection(CDFnode* var, const size_t* startp, const size_t* countp, const ptrdiff_t* stridep, DCEprojection** projectionp) { - int i,j; + size_t i,j; NCerror ncstat = NC_NOERR; DCEprojection* projection = NULL; NClist* path = nclistnew(); NClist* segments = NULL; - int dimindex; + size_t dimindex; /* Build a skeleton projection that has 1 segment for every cdfnode from root to the variable of interest. @@ -463,9 +465,10 @@ dapiswholeslice(DCEslice* slice, CDFnode* dim) int dapiswholesegment(DCEsegment* seg) { - int i,whole; + size_t i; + int whole; NClist* dimset = NULL; - unsigned int rank; + size_t rank; if(seg->rank == 0) return 1; if(!seg->slicesdefined) return 0; @@ -483,7 +486,8 @@ dapiswholesegment(DCEsegment* seg) int dapiswholeprojection(DCEprojection* proj) { - int i,whole; + size_t i; + int whole; ASSERT((proj->discrim == CES_VAR)); @@ -498,7 +502,7 @@ dapiswholeprojection(DCEprojection* proj) int dapiswholeconstraint(DCEconstraint* con) { - int i; + size_t i; if(con == NULL) return 1; if(con->projections != NULL) { for(i=0;iprojections);i++) { @@ -528,7 +532,7 @@ The term "expanded" means NCerror dapfixprojections(NClist* list) { - int i,j,k; + size_t i,j,k; NCerror ncstat = NC_NOERR; NClist* tmp = nclistnew(); /* misc. uses */ @@ -619,12 +623,12 @@ next: continue; } /*for(;;)*/ /* remove all NULL elements */ - for(i=nclistlength(list)-1;i>=0;i--) { - DCEprojection* target = (DCEprojection*)nclistget(list,i); + int n; + for(n=nclistlength(list)-1;n>=0;n--) { + DCEprojection* target = (DCEprojection*)nclistget(list,n); if(target == NULL) - nclistremove(list,i); + nclistremove(list,n); } - done: #ifdef DEBUG fprintf(stderr,"fixprojection: exploded = %s\n",dumpprojections(list)); @@ -661,7 +665,7 @@ projectify(CDFnode* field, DCEprojection* container) static int slicematch(NClist* seglist1, NClist* seglist2) { - int i,j; + size_t i,j; if((seglist1 == NULL || seglist2 == NULL) && seglist1 != seglist2) return 0; if(nclistlength(seglist1) != nclistlength(seglist2)) @@ -691,7 +695,7 @@ slicematch(NClist* seglist1, NClist* seglist2) int dapvar2projection(CDFnode* var, DCEprojection** projectionp) { - int i,j; + size_t i,j; int ncstat = NC_NOERR; NClist* path = nclistnew(); NClist* segments; @@ -707,7 +711,7 @@ dapvar2projection(CDFnode* var, DCEprojection** projectionp) for(i=0;iannotation = (void*)n; @@ -757,7 +761,7 @@ int daprestrictprojection(NClist* projections, DCEprojection* var, DCEprojection** resultp) { int ncstat = NC_NOERR; - int i; + size_t i; DCEprojection* result = NULL; #ifdef DEBUG1 fprintf(stderr,"restrictprojection.before: constraints=|%s| vara=|%s|\n", @@ -817,7 +821,7 @@ int dapshiftprojection(DCEprojection* projection) { int ncstat = NC_NOERR; - int i,j; + size_t i,j; NClist* segments; #ifdef DEBUG1 @@ -849,7 +853,7 @@ dapcomputeprojectedvars(NCDAPCOMMON* dapcomm, DCEconstraint* constraint) { NCerror ncstat = NC_NOERR; NClist* vars = NULL; - int i; + size_t i; vars = nclistnew(); diff --git a/libdap2/dapattr.c b/libdap2/dapattr.c index c0e37aa9a7..2ee46383eb 100644 --- a/libdap2/dapattr.c +++ b/libdap2/dapattr.c @@ -4,6 +4,7 @@ *********************************************************************/ #include "dapincludes.h" +#include #define OCCHECK(exp) if((ocstat = (exp))) {THROWCHK(ocstat); goto done;} @@ -18,7 +19,7 @@ and stuff from DODS_EXTRA. int dapmerge(NCDAPCOMMON* nccomm, CDFnode* ddsroot, OCddsnode dasroot) { - int i,j; + size_t i,j; NCerror ncstat = NC_NOERR; OCerror ocstat = OC_NOERR; NClist* allnodes; diff --git a/libdap2/dapcvt.c b/libdap2/dapcvt.c index 5979fb8227..72d305d87a 100644 --- a/libdap2/dapcvt.c +++ b/libdap2/dapcvt.c @@ -4,6 +4,7 @@ *********************************************************************/ #include "config.h" #include "dapincludes.h" +#include #ifdef _WIN32 #include @@ -215,10 +216,10 @@ If we need an int and the string value is out of range, return NC_ERANGE. NCerror dapcvtattrval(nc_type etype, void* dst, NClist* src, NCattribute* att) { - int i; + size_t i; NCerror ncstat = NC_NOERR; - unsigned int memsize = nctypesizeof(etype); - unsigned int nvalues = nclistlength(src); + size_t memsize = nctypesizeof(etype); + size_t nvalues = nclistlength(src); char* dstmem = (char*)dst; for(i=0;i #ifdef USE_PARALLEL #include "netcdf_par.h" #endif @@ -247,7 +248,7 @@ dumppath(CDFnode* leaf) NClist* path = nclistnew(); NCbytes* buf = ncbytesnew(); char* result; - int i; + size_t i; if(leaf == NULL) return nulldup(""); collectnodepath(leaf,path,!WITHDATASET); @@ -272,7 +273,7 @@ dumpindent(int indent, NCbytes* buf) static void dumptreer1(CDFnode* root, NCbytes* buf, int indent, char* tag, int visible) { - int i; + size_t i; dumpindent(indent,buf); ncbytescat(buf,tag); ncbytescat(buf," {\n"); @@ -300,7 +301,7 @@ dumptreer1(CDFnode* root, NCbytes* buf, int indent, char* tag, int visible) static void dumptreer(CDFnode* root, NCbytes* buf, int indent, int visible) { - int i; + size_t i; char* primtype = NULL; NClist* dimset = NULL; @@ -389,7 +390,7 @@ dumpnode(CDFnode* node) { NCbytes* buf = ncbytesnew(); char* result; - int i; + size_t i; char* nctype = NULL; char* primtype = NULL; char tmp[1024]; @@ -456,7 +457,7 @@ dumpnode(CDFnode* node) ncbytescat(buf,tmp); for(i=0;iarray.dimset0);i++) { CDFnode* dim = (CDFnode*)nclistget(node->array.dimset0,i); - snprintf(tmp,sizeof(tmp),"dims[%d]={\n",i); + snprintf(tmp,sizeof(tmp),"dims[%zu]={\n",i); ncbytescat(buf,tmp); snprintf(tmp,sizeof(tmp)," ocname=%s\n",dim->ocname); ncbytescat(buf,tmp); @@ -497,7 +498,7 @@ dumpcachenode(NCcachenode* node) { char* result = NULL; char tmp[8192]; - int i; + size_t i; NCbytes* buf; if(node == NULL) return strdup("cachenode{null}"); @@ -527,7 +528,7 @@ dumpcache(NCcache* cache) { char* result = NULL; char tmp[8192]; - int i; + size_t i; NCbytes* buf; if(cache == NULL) return strdup("cache{null}"); @@ -619,10 +620,10 @@ dumplistraw(NClist* l) void dumpstringlist(NClist* l) { - int i; + size_t i; for(i=0;i #ifdef HAVE_SYS_TIME_H #include @@ -312,7 +313,7 @@ collectocpath(OClink conn, OCddsnode node, NClist* path) char* makeocpathstring(OClink conn, OCddsnode node, const char* sep) { - int i,len,first; + size_t i,len,first; char* result; char* name; OCtype octype; @@ -353,7 +354,7 @@ makeocpathstring(OClink conn, OCddsnode node, const char* sep) char* makepathstring(NClist* path, const char* separator, int flags) { - int i,len,first; + size_t i,len,first; NCbytes* pathname = NULL; char* result; CDFnode* node; @@ -412,7 +413,7 @@ clonenodenamepath(CDFnode* node, NClist* path, int withdataset) char* simplepathstring(NClist* names, char* separator) { - int i; + size_t i; size_t len; char* result; if(nclistlength(names) == 0) return nulldup(""); diff --git a/libdap2/dceconstraints.c b/libdap2/dceconstraints.c index b4895768f8..2215542242 100644 --- a/libdap2/dceconstraints.c +++ b/libdap2/dceconstraints.c @@ -5,6 +5,7 @@ #include "config.h" +#include #include #include #include @@ -218,7 +219,7 @@ Dst will be modified. int dcemergeprojectionlists(NClist* dst, NClist* src) { - int i; + size_t i; NClist* cat = nclistnew(); int ncstat = NC_NOERR; @@ -272,7 +273,7 @@ int dcemergeprojections(DCEprojection* merged, DCEprojection* addition) { int ncstat = NC_NOERR; - int i,j; + size_t i,j; ASSERT((merged->discrim == CES_VAR && addition->discrim == CES_VAR)); ASSERT((nclistlength(merged->var->segments) == nclistlength(addition->var->segments))); @@ -432,7 +433,7 @@ dceclone(DCEnode* node) NClist* dceclonelist(NClist* list) { - int i; + size_t i; NClist* clone; if(list == NULL) return NULL; clone = nclistnew(); @@ -519,7 +520,7 @@ dcefree(DCEnode* node) void dcefreelist(NClist* list) { - int i; + size_t i; if(list == NULL) return; for(i=0;isort == which) @@ -920,8 +921,8 @@ dcemakewholeprojection(DCEprojection* p) int dcesamepath(NClist* list1, NClist* list2) { - int i; - int len = nclistlength(list1); + size_t i; + size_t len = nclistlength(list1); if(len != nclistlength(list2)) return 0; for(i=0;i #include #include #include @@ -91,7 +92,7 @@ segmentlist(DCEparsestate* state, Object var0, Object decl) Object segment(DCEparsestate* state, Object name, Object slices0) { - int i; + size_t i; DCEsegment* segment = (DCEsegment*)dcecreate(CES_SEGMENT); NClist* slices = (NClist*)slices0; segment->name = strdup((char*)name); @@ -222,7 +223,7 @@ array_indices(DCEparsestate* state, Object list0, Object indexno) Object indexer(DCEparsestate* state, Object name, Object indices) { - int i; + size_t i; NClist* list = (NClist*)indices; DCEsegment* seg = (DCEsegment*)dcecreate(CES_SEGMENT); seg->name = strdup((char*)name); diff --git a/libdap2/getvara.c b/libdap2/getvara.c index f3ab9259a3..32ac76e7bf 100644 --- a/libdap2/getvara.c +++ b/libdap2/getvara.c @@ -9,6 +9,7 @@ #include "dapdump.h" #include "ncd2dispatch.h" #include "ocx.h" +#include #define NEWVARM @@ -95,7 +96,7 @@ nc3d_getvarx(int ncid, int varid, { NCerror ncstat = NC_NOERR; OCerror ocstat = OC_NOERR; - int i; + size_t i; NC* drno; NC* substrate; NCDAPCOMMON* dapcomm; @@ -426,7 +427,7 @@ fprintf(stderr,"cache.datadds=%s\n",dumptree(cachenode->datadds)); static NCerror removepseudodims(DCEprojection* proj) { - int i; + size_t i; #ifdef DEBUG1 fprintf(stderr,"removesequencedims.before: %s\n",dumpprojection(proj)); #endif @@ -935,7 +936,7 @@ extractstring( { NCerror ncstat = NC_NOERR; OCerror ocstat = OC_NOERR; - int i; + size_t i; size_t rank0; NClist* strings = NULL; Dapodometer* odom = NULL; @@ -1056,9 +1057,9 @@ It is assumed that both trees have been re-struct'ed if necessary. */ static NCerror -attachr(CDFnode* xnode, NClist* patternpath, int depth) +attachr(CDFnode* xnode, NClist* patternpath, size_t depth) { - unsigned int i,plen,lastnode,gridable; + size_t i,plen,lastnode,gridable; NCerror ncstat = NC_NOERR; CDFnode* patternpathnode; CDFnode* patternpathnext; @@ -1129,7 +1130,7 @@ attachsubsetr(CDFnode* target, CDFnode* pattern) { unsigned int i; NCerror ncstat = NC_NOERR; - int fieldindex; + size_t fieldindex; #ifdef DEBUG2 fprintf(stderr,"attachsubsetr: attach: target=%s pattern=%s\n", diff --git a/libdap2/ncd2dispatch.c b/libdap2/ncd2dispatch.c index e8d433fd77..b60d2eca27 100644 --- a/libdap2/ncd2dispatch.c +++ b/libdap2/ncd2dispatch.c @@ -8,6 +8,7 @@ #include "ncrc.h" #include "ncoffsets.h" #include "netcdf_dispatch.h" +#include #ifdef DEBUG2 #include "dapdump.h" #endif @@ -728,7 +729,7 @@ fprintf(stderr,"define: dim: %s=%ld\n",dim->ncfullname,(long)dim->dim.declsize); static NCerror buildvars(NCDAPCOMMON* dapcomm) { - int i,j; + size_t i,j; NCerror ncstat = NC_NOERR; int varid; NClist* varnodes = dapcomm->cdf.ddsroot->tree->varnodes; @@ -738,7 +739,7 @@ buildvars(NCDAPCOMMON* dapcomm) for(i=0;iinvisible) continue; @@ -773,7 +774,7 @@ fprintf(stderr,"\n"); ncstat = nc_def_var(dapcomm->substrate.nc3id, definename, var->externaltype, - ncrank, + (int)ncrank, (ncrank==0?NULL:dimids), &varid); nullfree(definename); @@ -849,7 +850,7 @@ fprintf(stderr,"\n"); static NCerror buildglobalattrs(NCDAPCOMMON* dapcomm, CDFnode* root) { - int i; + size_t i; NCerror ncstat = NC_NOERR; const char* txt; char *nltxt, *p; @@ -930,9 +931,9 @@ buildglobalattrs(NCDAPCOMMON* dapcomm, CDFnode* root) static NCerror buildattribute(NCDAPCOMMON* dapcomm, CDFnode* var, NCattribute* att) { - int i; + size_t i; NCerror ncstat = NC_NOERR; - unsigned int nvalues = nclistlength(att->values); + size_t nvalues = nclistlength(att->values); int varid = (var == NULL ? NC_GLOBAL : var->ncid); void* mem = NULL; @@ -966,7 +967,7 @@ buildattribute(NCDAPCOMMON* dapcomm, CDFnode* var, NCattribute* att) if(ncstat) goto done; } else { nc_type atype; - unsigned int typesize; + size_t typesize; atype = nctypeconvert(dapcomm,att->etype); typesize = nctypesizeof(atype); if (nvalues > 0) { @@ -1040,7 +1041,7 @@ NCD2_inq_format_extended(int ncid, int* formatp, int* modep) NCerror computecdfdimnames(NCDAPCOMMON* nccomm) { - int i,j; + size_t i,j; char tmp[NC_MAX_NAME*2]; NClist* conflicts = nclistnew(); NClist* varnodes = nccomm->cdf.ddsroot->tree->varnodes; @@ -1116,7 +1117,7 @@ fprintf(stderr,"conflict: %s[%lu] %s[%lu]\n", /* Give all the conflicting dimensions an index */ for(j=0;jdim.index1 = j+1; + dim->dim.index1 = (int)j+1; } } nclistfree(conflicts); @@ -1240,7 +1241,8 @@ paramlookup(NCDAPCOMMON* state, const char* key) static NCerror applyclientparams(NCDAPCOMMON* nccomm) { - int i,len; + size_t i; + int len; int dfaltstrlen = DEFAULTSTRINGLENGTH; int dfaltseqlim = DEFAULTSEQLIMIT; const char* value; @@ -1364,12 +1366,12 @@ applyclientparams(NCDAPCOMMON* nccomm) static void computedimindexanon(CDFnode* dim, CDFnode* var) { - int i; + size_t i; NClist* dimset = var->array.dimsetall; for(i=0;idim.index1=i+1; + dim->dim.index1 = (int)i+1; return; } } @@ -1379,7 +1381,7 @@ computedimindexanon(CDFnode* dim, CDFnode* var) static void replacedims(NClist* dims) { - int i; + size_t i; for(i=0;idim.basedim; @@ -1407,7 +1409,7 @@ equivalentdim(CDFnode* basedim, CDFnode* dupdim) static void getalldimsa(NClist* dimset, NClist* alldims) { - int i; + size_t i; for(i=0;icdf.ddsroot->tree->varnodes; @@ -1450,7 +1452,7 @@ addstringdims(NCDAPCOMMON* dapcomm) in DODS{...} attribute set or defaulting to the variable name. All such dimensions are global. */ - int i; + size_t i; NClist* varnodes = dapcomm->cdf.ddsroot->tree->varnodes; CDFnode* globalsdim = NULL; char dimname[4096]; @@ -1514,7 +1516,7 @@ addstringdims(NCDAPCOMMON* dapcomm) static NCerror defrecorddim(NCDAPCOMMON* dapcomm) { - unsigned int i; + size_t i; NCerror ncstat = NC_NOERR; NClist* basedims; @@ -1590,7 +1592,7 @@ defseqdims(NCDAPCOMMON* dapcomm) static NCerror showprojection(NCDAPCOMMON* dapcomm, CDFnode* var) { - int i,rank; + size_t i,rank; NCerror ncstat = NC_NOERR; NCbytes* projection = ncbytesnew(); NClist* path = nclistnew(); @@ -1848,7 +1850,7 @@ make sure we always have a constraint. static NCerror computeseqcountconstraints(NCDAPCOMMON* dapcomm, CDFnode* seq, NCbytes* seqcountconstraints) { - int i,j; + size_t i,j; NClist* path = NULL; CDFnode* var = NULL; @@ -1875,7 +1877,7 @@ computeseqcountconstraints(NCDAPCOMMON* dapcomm, CDFnode* seq, NCbytes* seqcount ncbytescat(seqcountconstraints,tmp); } } else if(nclistlength(node->array.dimset0) > 0) { - int ndims = nclistlength(node->array.dimset0); + size_t ndims = nclistlength(node->array.dimset0); for(j=0;jarray.dimset0,j); if(DIMFLAG(dim,CDFDIMSTRING)) { @@ -1986,8 +1988,8 @@ cdftotalsize(NClist* dimensions) static void estimatevarsizes(NCDAPCOMMON* dapcomm) { - int ivar; - unsigned int rank; + size_t ivar; + size_t rank; size_t totalsize = 0; for(ivar=0;ivarcdf.ddsroot->tree->varnodes);ivar++) { @@ -2183,7 +2185,7 @@ make them invisible. static NCerror fixzerodims(NCDAPCOMMON* dapcomm) { - int i,j; + size_t i,j; for(i=0;icdf.ddsroot->tree->varnodes);i++) { CDFnode* var = (CDFnode*)nclistget(dapcomm->cdf.ddsroot->tree->varnodes,i); NClist* ncdims = var->array.dimsetplus; @@ -2249,7 +2251,7 @@ applyclientparamcontrols(NCDAPCOMMON* dapcomm) CLRFLAG(dapcomm->controls,NCF_FILLMISMATCH); if((value=dapparamvalue(dapcomm,"encode")) != NULL) { - int i; + size_t i; NClist* encode = nclistnew(); if(dapparamparselist(value,',',encode)) nclog(NCLOGERR,"Malformed encode parameter: %s",value); diff --git a/libdap4/d4data.c b/libdap4/d4data.c index eae5f32ed8..d1e28b4092 100644 --- a/libdap4/d4data.c +++ b/libdap4/d4data.c @@ -6,6 +6,7 @@ #include "d4includes.h" #include #include +#include #include "d4includes.h" #include "d4odom.h" #include "nccrc.h" @@ -58,7 +59,7 @@ int NCD4_parcelvars(NCD4meta* meta, NCD4response* resp) { int ret = NC_NOERR; - int i; + size_t i; NClist* toplevel = NULL; NCD4node* root = meta->root; NCD4offset* offset = NULL; @@ -88,7 +89,7 @@ int NCD4_processdata(NCD4meta* meta, NCD4response* resp) { int ret = NC_NOERR; - int i; + size_t i; NClist* toplevel = NULL; NCD4node* root = meta->root; NCD4offset* offset = NULL; @@ -203,7 +204,8 @@ NCD4_movetoinstance(NCD4meta* meta, NCD4node* type, NCD4offset* offset, void** d static int fillstruct(NCD4meta* meta, NCD4node* type, NCD4offset* offset, void** dstp, NClist* blobs) { - int i,ret = NC_NOERR; + size_t i; + int ret = NC_NOERR; void* dst = *dstp; #ifdef CLEARSTRUCT @@ -370,7 +372,7 @@ int NCD4_getToplevelVars(NCD4meta* meta, NCD4node* group, NClist* toplevel) { int ret = NC_NOERR; - int i; + size_t i; if(group == NULL) group = meta->root; @@ -397,7 +399,8 @@ int NCD4_inferChecksums(NCD4meta* meta, NCD4response* resp) { int ret = NC_NOERR; - int i, attrfound; + size_t i; + int attrfound; NClist* toplevel = NULL; /* Get the toplevel vars */ @@ -407,7 +410,7 @@ NCD4_inferChecksums(NCD4meta* meta, NCD4response* resp) /* First, look thru the DMR to see if there is a checksum attribute */ attrfound = 0; for(i=0;iattributes);a++) { NCD4node* attr = (NCD4node*)nclistget(node->attributes,a); diff --git a/libdap4/d4debug.c b/libdap4/d4debug.c index caed80027a..3074b80fe8 100644 --- a/libdap4/d4debug.c +++ b/libdap4/d4debug.c @@ -4,6 +4,7 @@ *********************************************************************/ #include "config.h" #include +#include #include #include "d4includes.h" @@ -96,7 +97,8 @@ bv inserting the data into the substrate and then writing it out. int NCD4_debugcopy(NCD4INFO* info) { - int i,ret=NC_NOERR; + size_t i; + int ret = NC_NOERR; NCD4meta* meta = info->dmrmetadata; NClist* topvars = nclistnew(); NC* ncp = info->controller; @@ -134,7 +136,7 @@ NCD4_debugcopy(NCD4INFO* info) */ { size_t edges[NC_MAX_VAR_DIMS]; - int d; + size_t d; for(d=0;ddims);d++) { NCD4node* dim = (NCD4node*)nclistget(var->dims,d); edges[d] = (size_t)dim->dim.size; diff --git a/libdap4/d4dump.c b/libdap4/d4dump.c index 2f3103a404..89bd9862ed 100644 --- a/libdap4/d4dump.c +++ b/libdap4/d4dump.c @@ -3,6 +3,7 @@ */ #include "d4includes.h" +#include /* Provide a simple dump of binary data @@ -87,7 +88,7 @@ NCD4_tagdump(size_t size, const void* data0, int swap, const char* tag) void NCD4_dumpvars(NCD4node* group) { - int i; + size_t i; fprintf(stderr,"%s.vars:\n",group->name); for(i=0;ivars);i++) { NCD4node* var = (NCD4node*)nclistget(group->vars,i); diff --git a/libdap4/d4file.c b/libdap4/d4file.c index 46c1a0fdbb..677844ab06 100644 --- a/libdap4/d4file.c +++ b/libdap4/d4file.c @@ -7,6 +7,7 @@ #include "ncd4dispatch.h" #include "d4includes.h" #include "d4curlfunctions.h" +#include #ifdef _MSC_VER #include @@ -678,7 +679,7 @@ NCD4_newMeta(NCD4INFO* info, NCD4meta** metap) void NCD4_reclaimMeta(NCD4meta* dataset) { - int i; + size_t i; if(dataset == NULL) return; for(i=0;iallnodes);i++) { diff --git a/libdap4/d4fix.c b/libdap4/d4fix.c index b76a52d28b..af47ea88e3 100644 --- a/libdap4/d4fix.c +++ b/libdap4/d4fix.c @@ -5,6 +5,7 @@ #include #include +#include #include "d4includes.h" @@ -120,7 +121,7 @@ Do depth first search static void walk(NCD4node* node, NClist* sorted) { - int i; + size_t i; if(node->visited) return; node->visited = 1; @@ -310,7 +311,7 @@ static int delimitStruct(NCD4meta* compiler, NCD4node* basetype, NCD4offset* offset) { int ret = NC_NOERR; - int i; + size_t i; /* The fields are associated with the basetype struct */ for(i=0;ivars);i++) { diff --git a/libdap4/d4meta.c b/libdap4/d4meta.c index 1cea62f613..049469ad36 100644 --- a/libdap4/d4meta.c +++ b/libdap4/d4meta.c @@ -5,6 +5,7 @@ #include "d4includes.h" #include +#include #include "nc4internal.h" #include "ncoffsets.h" @@ -58,7 +59,7 @@ int NCD4_metabuild(NCD4meta* metadata, int ncid) { int ret = NC_NOERR; - int i; + size_t i; metadata->ncid = ncid; metadata->root->meta.id = ncid; @@ -119,7 +120,8 @@ reclaimNode(NCD4node* node) static int build(NCD4meta* builder, NCD4node* root) { - int i,ret = NC_NOERR; + size_t i; + int ret = NC_NOERR; size_t len = nclistlength(builder->allnodes); /* Tag the root group */ @@ -223,7 +225,8 @@ build(NCD4meta* builder, NCD4node* root) static int buildGroups(NCD4meta* builder, NCD4node* parent) { - int i,ret=NC_NOERR; + size_t i; + int ret = NC_NOERR; #ifdef D4DEBUG fprintf(stderr,"build group: %s\n",parent->name); #endif @@ -261,7 +264,8 @@ buildDimension(NCD4meta* builder, NCD4node* dim) static int buildEnumeration(NCD4meta* builder, NCD4node* en) { - int i,ret = NC_NOERR; + size_t i; + int ret = NC_NOERR; NCD4node* group = NCD4_groupFor(en); NCCHECK((nc_def_enum(group->meta.id,en->basetype->meta.id,en->name,&en->meta.id))); for(i=0;ien.econsts);i++) { @@ -342,7 +346,8 @@ buildMetaData(NCD4meta* builder, NCD4node* var) static int buildMaps(NCD4meta* builder, NCD4node* var) { - int i,ret = NC_NOERR; + size_t i; + int ret = NC_NOERR; size_t count = nclistlength(var->maps); char** memory = NULL; char** p; @@ -372,7 +377,8 @@ buildMaps(NCD4meta* builder, NCD4node* var) static int buildAttributes(NCD4meta* builder, NCD4node* varorgroup) { - int i,ret = NC_NOERR; + size_t i; + int ret = NC_NOERR; NClist* blobs = NULL; for(i=0;iattributes);i++) { @@ -479,7 +485,8 @@ buildVlenType(NCD4meta* builder, NCD4node* vlentype) static int buildCompound(NCD4meta* builder, NCD4node* cmpdtype, NCD4node* group, char* name) { - int i,ret = NC_NOERR; + size_t i; + int ret = NC_NOERR; /* Step 1: compute field offsets */ computeOffsets(builder,cmpdtype); @@ -613,7 +620,7 @@ savevarbyid(NCD4node* group, NCD4node* var) char* NCD4_getVarFQN(NCD4node* var, const char* tail) { - int i; + size_t i; NCD4node* x = NULL; NClist* path = NULL; NCbytes* fqn = NULL; @@ -646,7 +653,7 @@ NCD4_getVarFQN(NCD4node* var, const char* tail) static char* getFieldFQN(NCD4node* field, const char* tail) { - int i; + size_t i; NCD4node* x = NULL; NClist* path = NULL; NCbytes* fqn = NULL; @@ -676,8 +683,8 @@ getFieldFQN(NCD4node* field, const char* tail) static size_t getDimrefs(NCD4node* var, int* dimids) { - int i; - int rank = nclistlength(var->dims); + size_t i; + size_t rank = nclistlength(var->dims); for(i=0;idims,i); dimids[i] = dim->meta.id; @@ -688,8 +695,8 @@ getDimrefs(NCD4node* var, int* dimids) static size_t getDimsizes(NCD4node* var, int* dimsizes) { - int i; - int rank = nclistlength(var->dims); + size_t i; + size_t rank = nclistlength(var->dims); for(i=0;idims,i); dimsizes[i] = (int)dim->dim.size; @@ -720,7 +727,8 @@ to nc_put_att(). static int compileAttrValues(NCD4meta* builder, NCD4node* attr, void** memoryp, NClist* blobs) { - int i,ret = NC_NOERR; + size_t i; + int ret = NC_NOERR; unsigned char* memory = NULL; unsigned char* p; size_t size; @@ -730,7 +738,7 @@ compileAttrValues(NCD4meta* builder, NCD4node* attr, void** memoryp, NClist* blo NCD4node* container = attr->container; NCD4node* basetype = attr->basetype; NClist* values = attr->attr.values; - int count = nclistlength(values); + size_t count = nclistlength(values); memset((void*)&converter,0,sizeof(converter)); @@ -887,7 +895,8 @@ Note: this will work if the econst string is a number or a econst name static int decodeEconst(NCD4meta* builder, NCD4node* enumtype, const char* nameorval, union ATOMICS* converter) { - int i,ret=NC_NOERR; + size_t i; + int ret = NC_NOERR; union ATOMICS number; NCD4node* match = NULL; @@ -949,7 +958,7 @@ backslashEscape(const char* s) static int markfixedsize(NCD4meta* meta) { - int i,j; + size_t i,j; for(i=0;iallnodes);i++) { int fixed = 1; NCD4node* n = (NCD4node*)nclistget(meta->allnodes,i); @@ -979,7 +988,7 @@ markfixedsize(NCD4meta* meta) static void computeOffsets(NCD4meta* builder, NCD4node* cmpd) { - int i; + size_t i; d4size_t offset = 0; d4size_t largestalign = 1; d4size_t size = 0; @@ -1103,7 +1112,7 @@ getpadding(d4size_t offset, size_t alignment) static int markdapsize(NCD4meta* meta) { - int i,j; + size_t i,j; for(i=0;iallnodes);i++) { NCD4node* type = (NCD4node*)nclistget(meta->allnodes,i); size_t totalsize; @@ -1161,10 +1170,10 @@ NCD4_findvar(NC* ncp, int gid, int varid, NCD4node** varp, NCD4node** grpp) return THROW(NC_EBADID); /* Locate var node via (grpid,varid) */ grp_id = GROUPIDPART(gid); - group = nclistget(meta->groupbyid,grp_id); + group = nclistget(meta->groupbyid, (size_t)grp_id); if(group == NULL) return THROW(NC_EBADID); - var = nclistget(group->group.varbyid,varid); + var = nclistget(group->group.varbyid, (size_t)varid); if(var == NULL) return THROW(NC_EBADID); if(varp) *varp = var; diff --git a/libdap4/d4parser.c b/libdap4/d4parser.c index 2948f80672..895e88e2a0 100644 --- a/libdap4/d4parser.c +++ b/libdap4/d4parser.c @@ -6,6 +6,7 @@ #include "d4includes.h" #include #include +#include #include "ncxml.h" /** @@ -878,7 +879,8 @@ Find or create an opaque type static NCD4node* getOpaque(NCD4parser* parser, ncxml_t varxml, NCD4node* group) { - int i, ret = NC_NOERR; + size_t i; + int ret = NC_NOERR; long long len; NCD4node* opaquetype = NULL; char* xattr; @@ -1024,7 +1026,7 @@ splitOrigType(NCD4parser* parser, const char* fqn, NCD4node* type) NCD4node* NCD4_findAttr(NCD4node* container, const char* attrname) { - int i; + size_t i; /* Look directly under this xml for */ for(i=0;iattributes);i++) { NCD4node* attr = (NCD4node*)nclistget(container->attributes,i); @@ -1095,7 +1097,8 @@ static int lookupFQNList(NCD4parser* parser, NClist* fqn, NCD4sort sort, NCD4node** result) { int ret = NC_NOERR; - int i,nsteps; + size_t i; + int nsteps; NCD4node* current; char* name = NULL; NCD4node* node = NULL; @@ -1139,7 +1142,7 @@ lookupFQNList(NCD4parser* parser, NClist* fqn, NCD4sort sort, NCD4node** result) assert (i < (nsteps - 1)); i++; /* skip variable name */ for(;;i++) { - int j; + size_t j; name = (char*)nclistget(fqn,i); assert(ISTYPE(current->sort) && ISCMPD(current->subsort)); for(node=NULL,j=0;jvars);j++) { @@ -1166,7 +1169,7 @@ lookupFQNList(NCD4parser* parser, NClist* fqn, NCD4sort sort, NCD4node** result) static NCD4node* lookFor(NClist* elems, const char* name, NCD4sort sort) { - int n,i; + size_t n,i; if(elems == NULL || nclistlength(elems) == 0) return NULL; n = nclistlength(elems); for(i=0;igroup.elements; if(elems == NULL || nclistlength(elems) == 0) return; @@ -1646,7 +1649,7 @@ static int parseForwards(NCD4parser* parser, NCD4node* root) { int ret = NC_NOERR; - int i,j; + size_t i,j; /* process all vars */ for(i=0;ivars);i++) { diff --git a/libdap4/d4printer.c b/libdap4/d4printer.c index 88b1392d24..81183efba1 100644 --- a/libdap4/d4printer.c +++ b/libdap4/d4printer.c @@ -4,6 +4,7 @@ *********************************************************************/ #include "d4includes.h" +#include /** This provides a simple dap4 metadata -> xml printer. @@ -87,7 +88,7 @@ static int printNode(D4printer* out, NCD4node* node, int depth) { int ret = NC_NOERR; - int i; + size_t i; char* fqn = NULL; switch (node->sort) { @@ -297,13 +298,12 @@ static int printGroupBody(D4printer* out, NCD4node* node, int depth) { int ret = NC_NOERR; - int i,ngroups,nvars,ntypes,ndims,nattrs; - - ngroups = nclistlength(node->groups); - nvars = nclistlength(node->vars); - ntypes = nclistlength(node->types); - ndims = nclistlength(node->dims); - nattrs = nclistlength(node->attributes); + size_t i; + size_t ngroups = nclistlength(node->groups); + size_t nvars = nclistlength(node->vars); + size_t ntypes = nclistlength(node->types); + size_t ndims = nclistlength(node->dims); + size_t nattrs = nclistlength(node->attributes); if(ndims > 0) { INDENT(depth); @@ -371,7 +371,7 @@ static int printMetaData(D4printer* out, NCD4node* node, int depth) { int ret = NC_NOERR; - int i; + size_t i; if(nclistlength(node->dims) > 0) { for(i=0;idims);i++) { @@ -456,7 +456,7 @@ static int printAttribute(D4printer* out, NCD4node* attr, int depth) { int ret = NC_NOERR; - int i = 0; + size_t i = 0; char* fqn = NULL; INDENT(depth); CAT(" +#include #include "d4includes.h" /* @@ -151,7 +152,7 @@ static int walkStruct(NCD4response* resp, NCD4node* topvar, NCD4node* structtype, NCD4offset* offset, int doswap) { int ret = NC_NOERR; - int i; + size_t i; for(i=0;ivars);i++) { NCD4node* field = (NCD4node*)nclistget(structtype->vars,i); diff --git a/libdap4/d4util.c b/libdap4/d4util.c index d1ba7f5828..8e422b5cad 100644 --- a/libdap4/d4util.c +++ b/libdap4/d4util.c @@ -4,6 +4,7 @@ *********************************************************************/ #include "d4includes.h" +#include #ifdef HAVE_SYS_STAT_H #include #endif @@ -92,7 +93,7 @@ NCD4_typesize(nc_type tid) d4size_t NCD4_dimproduct(NCD4node* node) { - int i; + size_t i; d4size_t product = 1; for(i=0;idims);i++) { NCD4node* dim = (NCD4node*)nclistget(node->dims,i); @@ -156,7 +157,7 @@ create the last part of the fqn char* NCD4_makeName(NCD4node* elem, const char* sep) { - int i; + size_t i; size_t estimate = 0; NCD4node* n; NClist* path = nclistnew(); diff --git a/libdap4/d4varx.c b/libdap4/d4varx.c index b098855f83..e42bd98558 100644 --- a/libdap4/d4varx.c +++ b/libdap4/d4varx.c @@ -9,6 +9,7 @@ #include "nc4internal.h" #include "d4includes.h" #include "d4odom.h" +#include /* Forward */ static int getvarx(int gid, int varid, NCD4INFO**, NCD4node** varp, nc_type* xtypep, size_t*, nc_type* nc4typep, size_t*); @@ -31,7 +32,8 @@ NCD4_get_vars(int gid, int varid, const size_t *start, const size_t *edges, const ptrdiff_t* stride, void *memoryin, nc_type xtype) { - int i,ret; + size_t i; + int ret; NCD4INFO* info; NCD4meta* meta; NCD4node* ncvar; @@ -41,7 +43,7 @@ NCD4_get_vars(int gid, int varid, size_t nc4size, xsize, dapsize; void* instance = NULL; /* Staging area in case we have to convert */ NClist* blobs = NULL; - int rank; + size_t rank; size_t dimsizes[NC_MAX_VAR_DIMS]; d4size_t dimproduct; size_t dstpos; @@ -246,7 +248,8 @@ findbyname(const char* name, NClist* nodes) static int matchvar(NCD4meta* dmrmeta, NCD4node* dapvar, NCD4node** dmrvarp) { - int i,ret = NC_NOERR; + size_t i; + int ret = NC_NOERR; NCD4node* x = NULL; NClist* dappath = nclistnew(); NClist* dmrpath = nclistnew(); /* compute path for this dmr var */ @@ -259,7 +262,7 @@ matchvar(NCD4meta* dmrmeta, NCD4node* dapvar, NCD4node** dmrvarp) for(i=0;iallnodes);i++) { NCD4node* node = (NCD4node*)nclistget(dmrmeta->allnodes,i); if(ISVAR(node->sort) && strcmp(node->name,dapvar->name)==0) { /* possible candidate */ - int j; + size_t j; found = 0; nclistclear(dmrpath); for(x=node;x != NULL;x=x->container) nclistinsert(dmrpath,0,x); @@ -292,7 +295,8 @@ toplevel dmr var and transfer necessary info; static int mapvars(NCD4meta* dapmeta, NCD4meta* dmrmeta, int inferredchecksumming) { - int i, ret = NC_NOERR; + size_t i; + int ret = NC_NOERR; NCD4node* daproot = dapmeta->root; NClist* daptop = NULL; /* top variables in dap tree */ diff --git a/libdap4/ncd4dispatch.c b/libdap4/ncd4dispatch.c index e7eefb40a4..37e134b55a 100644 --- a/libdap4/ncd4dispatch.c +++ b/libdap4/ncd4dispatch.c @@ -3,6 +3,7 @@ * See netcdf/COPYRIGHT file for copying and redistribution conditions. *********************************************************************/ +#include #ifdef HAVE_CONFIG_H #include "config.h" #endif @@ -841,7 +842,7 @@ NCD4_inq_dim(int ncid, int dimid, char* name, size_t* lenp) NC* ncp; NCD4INFO* info; NCD4meta* meta; - int i; + size_t i; NCD4node* dim = NULL; if((ret = NC_check_id(ncid, (NC**)&ncp)) != NC_NOERR) diff --git a/libdispatch/dhttp.c b/libdispatch/dhttp.c index 6f12e561fa..1008ff62f2 100644 --- a/libdispatch/dhttp.c +++ b/libdispatch/dhttp.c @@ -8,6 +8,7 @@ */ #include "config.h" +#include #include #include #include @@ -715,7 +716,7 @@ headersoff(NC_HTTP_STATE* state) static int lookupheader(NC_HTTP_STATE* state, const char* key, const char** valuep) { - int i; + size_t i; const char* value = NULL; /* Get the content length header */ for(i=0;icurl.response.headers);i+=2) { diff --git a/libdispatch/dinfermodel.c b/libdispatch/dinfermodel.c index 6f5e4a846c..b1c838b24f 100644 --- a/libdispatch/dinfermodel.c +++ b/libdispatch/dinfermodel.c @@ -9,6 +9,7 @@ */ #include "config.h" +#include #include #ifdef HAVE_UNISTD_H #include @@ -399,7 +400,7 @@ parseonchar(const char* s, int ch, NClist* segments) static char* envvlist2string(NClist* envv, const char* delim) { - int i; + size_t i; NCbytes* buf = NULL; char* result = NULL; @@ -441,7 +442,8 @@ processmodearg(const char* arg, NCmodel* model) static int processmacros(NClist* fraglenv, NClist* expanded) { - int i, stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; const struct MACRODEF* macros = NULL; for(i=0;i #ifdef HAVE_UNISTD_H #include #endif @@ -204,7 +205,7 @@ rcfreeentry(NCRCentry* t) static void rcfreeentries(NClist* rc) { - int i; + size_t i; for(i=0;ircinfo; NCRCentry* entry = NULL; @@ -612,7 +614,7 @@ rclocatepos(const char* key, const char* hostport, const char* urlpath) for(i=0;iy static int awsparse(const char* text, NClist* profiles) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; size_t len; AWSparser* parser = NULL; struct AWSprofile* profile = NULL; @@ -1095,7 +1098,7 @@ static void freeprofile(struct AWSprofile* profile) { if(profile) { - int i; + size_t i; #ifdef AWSDEBUG fprintf(stderr,">>> freeprofile: %s\n",profile->name); #endif @@ -1113,7 +1116,7 @@ static void freeprofilelist(NClist* profiles) { if(profiles) { - int i; + size_t i; for(i=0;ircinfo->s3profiles);i++) { @@ -1231,7 +1234,8 @@ NC_authgets3profile(const char* profilename, struct AWSprofile** profilep) int NC_s3profilelookup(const char* profile, const char* key, const char** valuep) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; struct AWSprofile* awsprof = NULL; const char* value = NULL; diff --git a/libdispatch/ds3util.c b/libdispatch/ds3util.c index 5091b1a24e..b447a64350 100644 --- a/libdispatch/ds3util.c +++ b/libdispatch/ds3util.c @@ -52,7 +52,8 @@ if provided, otherwise us-east-1. int NC_s3urlrebuild(NCURI* url, NCS3INFO* s3, NCURI** newurlp) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; NClist* hostsegments = NULL; NClist* pathsegments = NULL; NCbytes* buf = ncbytesnew(); diff --git a/libdispatch/dutil.c b/libdispatch/dutil.c index 83b20dfb69..d60c8e1767 100644 --- a/libdispatch/dutil.c +++ b/libdispatch/dutil.c @@ -4,6 +4,7 @@ *********************************************************************/ #include "config.h" +#include #include #include #include @@ -378,7 +379,7 @@ NC_testmode(NCURI* uri, const char* tag) { int stat = NC_NOERR; int found = 0; - int i; + size_t i; const char* modestr = NULL; NClist* modelist = NULL; @@ -467,7 +468,7 @@ int NC_join(NClist* segments, char** pathp) { int stat = NC_NOERR; - int i; + size_t i; NCbytes* buf = NULL; if(segments == NULL) diff --git a/libdispatch/nclist.c b/libdispatch/nclist.c index f2c3f4d47b..ae4177bcd1 100644 --- a/libdispatch/nclist.c +++ b/libdispatch/nclist.c @@ -1,5 +1,6 @@ /* Copyright 2018, UCAR/Unidata and OPeNDAP, Inc. See the COPYRIGHT file for more information. */ +#include #include #include #include @@ -270,7 +271,7 @@ nclistclone(const NClist* l, int deep) nclistsetlength(clone,l->length); memcpy((void*)clone->content,(void*)l->content,sizeof(void*)*l->length); } else { /*deep*/ - int i; + size_t i; for(i=0;i #include #include #include @@ -431,7 +432,7 @@ static void freestringlist(NClist* list) { if(list != NULL) { - int i; + size_t i; for(i=0;ifraglist,key); nclistpush(duri->fraglist,value); } else { - nullfree(nclistget(duri->fraglist,pos+1)); - nclistset(duri->fraglist,pos+1,strdup(value)); + nullfree(nclistget(duri->fraglist,(size_t)pos+1)); + nclistset(duri->fraglist,(size_t)pos+1,strdup(value)); } /* Rebuild the fragment */ nullfree(duri->fragment); duri->fragment = NULL; @@ -576,8 +577,8 @@ ncuriappendfragmentkey(NCURI* duri,const char* key, const char* value) nclistpush((NClist*)duri->fraglist,strdup(key)); nclistpush((NClist*)duri->fraglist,nulldup(value)); } else { - nullfree(nclistget(duri->fraglist,pos+1)); - nclistset(duri->fraglist,pos+1,nulldup(value)); + nullfree(nclistget(duri->fraglist,(size_t)pos+1)); + nclistset(duri->fraglist,(size_t)pos+1,nulldup(value)); } /* Rebuild the fragment */ nullfree(duri->fraglist); duri->fraglist = NULL; @@ -600,8 +601,8 @@ ncurisetquerykey(NCURI* duri,const char* key, const char* value) nclistpush(duri->querylist,key); nclistpush(duri->querylist,value); } else { - nullfree(nclistget(duri->querylist,pos+1)); - nclistset(duri->querylist,pos+1,strdup(value)); + nullfree(nclistget(duri->querylist,(size_t)pos+1)); + nclistset(duri->querylist,(size_t)pos+1,strdup(value)); } /* Rebuild the query */ nullfree(duri->query); duri->query = NULL; @@ -623,8 +624,8 @@ ncuriappendquerykey(NCURI* duri,const char* key, const char* value) nclistpush((NClist*)duri->querylist,strdup(key)); nclistpush((NClist*)duri->querylist,nulldup(value)); } else { - nullfree(nclistget(duri->querylist,pos+1)); - nclistset(duri->querylist,pos+1,nulldup(value)); + nullfree(nclistget(duri->querylist,(size_t)pos+1)); + nclistset(duri->querylist,(size_t)pos+1,nulldup(value)); } /* Rebuild the query */ nullfree(duri->querylist); duri->querylist = NULL; @@ -768,7 +769,7 @@ ncurifragmentlookup(NCURI* uri, const char* key) if(ensurefraglist(uri)) return NULL; i = ncfind(uri->fraglist,key); if(i < 0) return NULL; - value = nclistget(uri->fraglist,i+1); + value = nclistget(uri->fraglist,(size_t)i+1); return value; } @@ -781,7 +782,7 @@ ncuriquerylookup(NCURI* uri, const char* key) if(ensurequerylist(uri)) return NULL; i = ncfind(uri->querylist,key); if(i < 0) return NULL; - value = nclistget(uri->querylist,i+1); + value = nclistget(uri->querylist,(size_t)i+1); return value; } @@ -832,7 +833,7 @@ ncfind(NClist* params, const char* key) if(key == NULL) return -1; if(params == NULL) return -1; for(i=0;i #include #include "hdf5internal.h" #include "hdf5debug.h" @@ -335,7 +336,7 @@ fprintf(stderr,"\tid=%s\n",id); int NC4_hdf5_filter_lookup(NC_VAR_INFO_T* var, unsigned int id, struct NC_HDF5_Filter** specp) { - int i; + size_t i; NClist* flist = (NClist*)var->filters; if(flist == NULL) { @@ -520,7 +521,7 @@ NC4_hdf5_inq_var_filter_ids(int ncid, int varid, size_t* nfiltersp, unsigned int nfilters = nclistlength(flist); if(nfilters > 0 && ids != NULL) { - int k; + size_t k; for(k=0;kfilterid; @@ -569,7 +570,8 @@ NC4_hdf5_inq_var_filter_info(int ncid, int varid, unsigned int id, size_t* npara int NC4_hdf5_find_missing_filter(NC_VAR_INFO_T* var, unsigned int* idp) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; NClist* flist = (NClist*)var->filters; int id = 0; diff --git a/libhdf5/hdf5internal.c b/libhdf5/hdf5internal.c index 5532ed0c1d..43a8e27c1d 100644 --- a/libhdf5/hdf5internal.c +++ b/libhdf5/hdf5internal.c @@ -17,6 +17,7 @@ #include "config.h" #include "hdf5internal.h" #include "hdf5err.h" /* For BAIL2 */ +#include #ifdef _WIN32 #include #endif @@ -205,7 +206,7 @@ nc4_rec_find_hdf_type(NC_FILE_INFO_T *h5, hid_t target_hdf_typeid) { NC_TYPE_INFO_T *type; htri_t equal; - int i; + size_t i; assert(h5); diff --git a/libhdf5/hdf5open.c b/libhdf5/hdf5open.c index 0c2f5b5234..dec1bff4f2 100644 --- a/libhdf5/hdf5open.c +++ b/libhdf5/hdf5open.c @@ -18,6 +18,7 @@ #include "ncauth.h" #include "ncmodel.h" #include "ncpathmgr.h" +#include #ifdef ENABLE_BYTERANGE #include "H5FDhttp.h" @@ -2088,7 +2089,7 @@ read_type(NC_GRP_INFO_T *grp, hid_t hdf_typeid, char *type_name) return NC_EHDFERR; LOG((5, "compound type has %d members", nmembers)); type->u.c.field = nclistnew(); - nclistsetalloc(type->u.c.field,nmembers); + nclistsetalloc(type->u.c.field, (size_t)nmembers); for (m = 0; m < nmembers; m++) { @@ -2253,7 +2254,7 @@ read_type(NC_GRP_INFO_T *grp, hid_t hdf_typeid, char *type_name) if ((nmembers = H5Tget_nmembers(hdf_typeid)) < 0) return NC_EHDFERR; type->u.e.enum_member = nclistnew(); - nclistsetalloc(type->u.e.enum_member,nmembers); + nclistsetalloc(type->u.e.enum_member, (size_t)nmembers); /* Allocate space for one value. */ if (!(value = calloc(1, type_size))) @@ -2816,7 +2817,8 @@ rec_read_metadata(NC_GRP_INFO_T *grp) hid_t pid = -1; unsigned crt_order_flags = 0; H5_index_t iter_index; - int i, retval = NC_NOERR; + size_t i; + int retval = NC_NOERR; assert(grp && grp->hdr.name && grp->format_grp_info); LOG((3, "%s: grp->hdr.name %s", __func__, grp->hdr.name)); diff --git a/libhdf5/hdf5type.c b/libhdf5/hdf5type.c index 7d971dfcd5..ddd10f5381 100644 --- a/libhdf5/hdf5type.c +++ b/libhdf5/hdf5type.c @@ -14,6 +14,7 @@ #include "config.h" #include "hdf5internal.h" +#include /** * @internal Determine if two types are equal. @@ -69,11 +70,11 @@ NC4_inq_type_equal(int ncid1, nc_type typeid1, int ncid2, /* Not atomic types - so find type1 and type2 information. */ if ((retval = nc4_find_nc4_grp(ncid1, &grpone))) return retval; - if (!(type1 = nclistget(grpone->nc4_info->alltypes, typeid1))) + if (!(type1 = nclistget(grpone->nc4_info->alltypes, (size_t)typeid1))) return NC_EBADTYPE; if ((retval = nc4_find_nc4_grp(ncid2, &grptwo))) return retval; - if (!(type2 = nclistget(grptwo->nc4_info->alltypes, typeid2))) + if (!(type2 = nclistget(grptwo->nc4_info->alltypes, (size_t)typeid2))) return NC_EBADTYPE; /* Are the two types equal? */ diff --git a/libhdf5/nc4hdf.c b/libhdf5/nc4hdf.c index a84abd0223..b589872fdb 100644 --- a/libhdf5/nc4hdf.c +++ b/libhdf5/nc4hdf.c @@ -22,6 +22,7 @@ #include "hdf5err.h" /* For BAIL2 */ #include "hdf5debug.h" #include +#include #ifdef HAVE_INTTYPES_H #define __STDC_FORMAT_MACROS @@ -886,7 +887,7 @@ var_create_dataset(NC_GRP_INFO_T *grp, NC_VAR_INFO_T *var, nc_bool_t write_dimid * nc_def_var_filter(). If the user * has specified a filter, it will be applied here. */ if(var->filters != NULL) { - int j; + size_t j; NClist* filters = (NClist*)var->filters; for(j=0;jhdf_typeid = H5Tcreate(H5T_COMPOUND, type->size)) < 0) return NC_EHDFERR; @@ -1255,7 +1256,7 @@ commit_type(NC_GRP_INFO_T *grp, NC_TYPE_INFO_T *type) else if (type->nc_type_class == NC_ENUM) { NC_ENUM_MEMBER_INFO_T *enum_m; - int i; + size_t i; if (nclistlength(type->u.e.enum_member) == 0) return NC_EINVAL; diff --git a/libnczarr/zarr.c b/libnczarr/zarr.c index 9e22f73d42..832b0d7c40 100644 --- a/libnczarr/zarr.c +++ b/libnczarr/zarr.c @@ -4,6 +4,7 @@ *********************************************************************/ #include "zincludes.h" +#include /**************************************************/ /* Forwards */ @@ -296,7 +297,7 @@ ncz_open_rootgroup(NC_FILE_INFO_T* dataset) static const char* controllookup(NClist* controls, const char* key) { - int i; + size_t i; for(i=0;i /* Forward */ static int NCZ_enddef(NC_FILE_INFO_T* h5); @@ -103,7 +104,7 @@ static int NCZ_enddef(NC_FILE_INFO_T* h5) { NC_VAR_INFO_T *var; - int i,j; + size_t i,j; int stat = NC_NOERR; ZTRACE(1,"h5=%s",h5->hdr.name); diff --git a/libnczarr/zfilter.c b/libnczarr/zfilter.c index e3896bf9a4..07e6d7d3bf 100644 --- a/libnczarr/zfilter.c +++ b/libnczarr/zfilter.c @@ -30,6 +30,7 @@ */ #include "config.h" +#include #include #ifdef HAVE_SYS_STAT_H @@ -287,7 +288,8 @@ static int pluginnamecheck(const char* name); int NCZ_filter_freelists(NC_VAR_INFO_T* var) { - int i, stat=NC_NOERR; + size_t i; + int stat=NC_NOERR; NClist* filters = NULL; NCZ_VAR_INFO_T* zvar = (NCZ_VAR_INFO_T*)var->format_var_info; @@ -444,7 +446,7 @@ NCZ_filter_remove(NC_VAR_INFO_T* var, unsigned int id) static int NCZ_filter_lookup(NC_VAR_INFO_T* var, unsigned int id, struct NCZ_Filter** specp) { - int i; + size_t i; NClist* flist = (NClist*)var->filters; ZTRACE(6,"var=%s id=%u",var->hdr.name,id); @@ -652,7 +654,7 @@ NCZ_inq_var_filter_ids(int ncid, int varid, size_t* nfiltersp, unsigned int* ids nfilters = nclistlength(flist); /* including incomplets */ if(nfilters > 0 && ids != NULL) { - int k; + size_t k; for(k=0;khdf5.id; @@ -842,7 +844,8 @@ NCZ_plugin_loaded(int filterid, NCZ_Plugin** pp) int NCZ_applyfilterchain(const NC_FILE_INFO_T* file, NC_VAR_INFO_T* var, NClist* chain, size_t inlen, void* indata, size_t* outlenp, void** outdatap, int encode) { - int i, stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; void* lastbuffer = NULL; /* if not null, then last allocated buffer */ ZTRACE(6,"|chain|=%u inlen=%u indata=%p encode=%d", (unsigned)nclistlength(chain), (unsigned)inlen, indata, encode); @@ -893,8 +896,9 @@ fprintf(stderr,">>> next: alloc=%u used=%u buf=%p\n",(unsigned)next_alloc,(unsig } } else { /* Apply in reverse order */ - for(i=nclistlength(chain)-1;i>=0;i--) { - f = (struct NCZ_Filter*)nclistget(chain,i); + int k; + for(k=(int)nclistlength(chain)-1;k>=0;k--) { + f = (struct NCZ_Filter*)nclistget(chain,(size_t)k); if(f->flags & FLAG_SUPPRESS) continue; /* this filter should not be applied */ ff = f->plugin->hdf5.filter; /* code can be simplified */ @@ -1151,7 +1155,8 @@ getentries(const char* path, NClist* contents) static int NCZ_load_all_plugins(void) { - int i,j,ret = NC_NOERR; + size_t i,j; + int ret = NC_NOERR; char* pluginroots = NULL; struct stat buf; NClist* dirs = nclistnew(); @@ -1330,7 +1335,8 @@ NCZ_split_plugin_path(const char* path0, NClist* list) static int NCZ_load_plugin_dir(const char* path) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; size_t pathlen; NClist* contents = nclistnew(); char* file = NULL; @@ -1605,7 +1611,8 @@ pluginnamecheck(const char* name) int NCZ_codec_attr(const NC_VAR_INFO_T* var, size_t* lenp, void* data) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; size_t len; char* contents = NULL; NCbytes* buf = NULL; @@ -1761,7 +1768,8 @@ rebuild_visible(const NC_VAR_INFO_T* var, NCZ_Filter* filter) int NCZ_filter_setup(NC_VAR_INFO_T* var) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; NClist* filters = NULL; ZTRACE(6,"var=%s",var->hdr.name); diff --git a/libnczarr/zmap.c b/libnczarr/zmap.c index eb808d431d..611b2fbb49 100644 --- a/libnczarr/zmap.c +++ b/libnczarr/zmap.c @@ -5,6 +5,7 @@ #include "zincludes.h" #include +#include #include "ncpathmgr.h" /**************************************************/ @@ -217,7 +218,7 @@ int nczm_join(NClist* segments, char** pathp) { int stat = NC_NOERR; - int i; + size_t i; NCbytes* buf = NULL; if(segments == NULL) diff --git a/libnczarr/zmap_file.c b/libnczarr/zmap_file.c index 4884e3eec8..4f798c4fde 100755 --- a/libnczarr/zmap_file.c +++ b/libnczarr/zmap_file.c @@ -3,6 +3,7 @@ * See netcdf/COPYRIGHT file for copying and redistribution conditions. */ +#include #undef DEBUG /* Not sure this has any effect */ @@ -529,7 +530,8 @@ static int zfcreategroup(ZFMAP* zfmap, const char* key, int nskip) { int stat = NC_NOERR; - int i, len; + size_t i; + int len; char* fullpath = NULL; NCbytes* path = ncbytesnew(); NClist* segments = nclistnew(); @@ -948,7 +950,7 @@ static int platformdeleter(NCbytes* canonpath, int depth) { int ret = NC_NOERR; - int i; + size_t i; NClist* subfiles = nclistnew(); size_t tpathlen = ncbyteslength(canonpath); char* local = NULL; diff --git a/libnczarr/zsync.c b/libnczarr/zsync.c index 8a14cdc694..2407d1f2b5 100644 --- a/libnczarr/zsync.c +++ b/libnczarr/zsync.c @@ -5,6 +5,7 @@ #include "zincludes.h" #include "zfilter.h" +#include #ifndef nulldup #define nulldup(x) ((x)?strdup(x):(x)) @@ -277,7 +278,8 @@ ncz_sync_grp(NC_FILE_INFO_T* file, NC_GRP_INFO_T* grp, int isclose) static int ncz_sync_var_meta(NC_FILE_INFO_T* file, NC_VAR_INFO_T* var, int isclose) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; NCZ_FILE_INFO_T* zinfo = NULL; char number[1024]; NCZMAP* map = NULL; @@ -435,7 +437,7 @@ ncz_sync_var_meta(NC_FILE_INFO_T* file, NC_VAR_INFO_T* var, int isclose) if((stat = NCJaddstring(jvar,NCJ_STRING,"filters"))) goto done; #ifdef ENABLE_NCZARR_FILTERS if(nclistlength(filterchain) > 1) { - int k; + size_t k; /* jtmp holds the array of filters */ if((stat = NCJnew(NCJ_ARRAY,&jtmp))) goto done; for(k=0;kcontroller->path,grp->hdr.name,nclistlength(diminfo)); @@ -1447,7 +1450,7 @@ static int define_vars(NC_FILE_INFO_T* file, NC_GRP_INFO_T* grp, NClist* varnames) { int stat = NC_NOERR; - int i,j; + size_t i,j; NCZ_FILE_INFO_T* zinfo = NULL; NCZMAP* map = NULL; int purezarr = 0; @@ -1823,7 +1826,8 @@ define_vars(NC_FILE_INFO_T* file, NC_GRP_INFO_T* grp, NClist* varnames) static int define_subgrps(NC_FILE_INFO_T* file, NC_GRP_INFO_T* grp, NClist* subgrpnames) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; ZTRACE(3,"file=%s grp=%s |subgrpnames|=%u",file->controller->path,grp->hdr.name,nclistlength(subgrpnames)); @@ -2059,7 +2063,8 @@ parse_var_dims_pure(NCZ_FILE_INFO_T* zinfo, NC_GRP_INFO_T* grp, NC_VAR_INFO_T* static int searchvars(NCZ_FILE_INFO_T* zfile, NC_GRP_INFO_T* grp, NClist* varnames) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; char* grpkey = NULL; char* varkey = NULL; char* zarray = NULL; @@ -2093,7 +2098,8 @@ searchvars(NCZ_FILE_INFO_T* zfile, NC_GRP_INFO_T* grp, NClist* varnames) static int searchsubgrps(NCZ_FILE_INFO_T* zfile, NC_GRP_INFO_T* grp, NClist* subgrpnames) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; char* grpkey = NULL; char* subkey = NULL; char* zgroup = NULL; @@ -2177,7 +2183,8 @@ Given a list of segments, find corresponding group. static int locategroup(NC_FILE_INFO_T* file, size_t nsegs, NClist* segments, NC_GRP_INFO_T** grpp) { - int i, j, found, stat = NC_NOERR; + size_t i, j; + int found, stat = NC_NOERR; NC_GRP_INFO_T* grp = NULL; grp = file->root_grp; @@ -2206,13 +2213,14 @@ locategroup(NC_FILE_INFO_T* file, size_t nsegs, NClist* segments, NC_GRP_INFO_T* static int parsedimrefs(NC_FILE_INFO_T* file, NClist* dimnames, size64_t* shape, NC_DIM_INFO_T** dims, int create) { - int i, stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; NClist* segments = NULL; for(i=0;i we need to create the dims in root if they do not already exist */ NCZ_FILE_INFO_T* zfile = (NCZ_FILE_INFO_T*)file->format_file_info; NCZ_VAR_INFO_T* zvar = (NCZ_VAR_INFO_T*)(var->format_var_info); diff --git a/libnczarr/ztype.c b/libnczarr/ztype.c index 7c8ecec8da..dd1bb57fa9 100644 --- a/libnczarr/ztype.c +++ b/libnczarr/ztype.c @@ -14,6 +14,7 @@ */ #include "zincludes.h" +#include /** * @internal Determine if two types are equal. @@ -69,11 +70,11 @@ NCZ_inq_type_equal(int ncid1, nc_type typeid1, int ncid2, /* Not atomic types - so find type1 and type2 information. */ if ((retval = nc4_find_nc4_grp(ncid1, &grpone))) return retval; - if (!(type1 = nclistget(grpone->nc4_info->alltypes, typeid1))) + if (!(type1 = nclistget(grpone->nc4_info->alltypes, (size_t)typeid1))) return NC_EBADTYPE; if ((retval = nc4_find_nc4_grp(ncid2, &grptwo))) return retval; - if (!(type2 = nclistget(grptwo->nc4_info->alltypes, typeid2))) + if (!(type2 = nclistget(grptwo->nc4_info->alltypes, (size_t)typeid2))) return NC_EBADTYPE; #ifdef LOOK diff --git a/libnczarr/zutil.c b/libnczarr/zutil.c index 5e10ffaf91..bca5cafeac 100644 --- a/libnczarr/zutil.c +++ b/libnczarr/zutil.c @@ -11,6 +11,7 @@ */ #include "zincludes.h" +#include #undef DEBUG @@ -125,7 +126,7 @@ NCZ_grpkey(const NC_GRP_INFO_T* grp, char** pathp) NClist* segments = nclistnew(); NCbytes* path = NULL; NC_GRP_INFO_T* parent = NULL; - int i; + size_t i; nclistinsert(segments,0,(void*)grp); parent = grp->parent; @@ -475,7 +476,8 @@ Note: need to test with "/", "", and with and without trailing "/". int NCZ_subobjects(NCZMAP* map, const char* prefix, const char* tag, char dimsep, NClist* objlist) { - int i,stat=NC_NOERR; + size_t i; + int stat = NC_NOERR; NClist* matches = nclistnew(); NCbytes* path = ncbytesnew(); diff --git a/libnczarr/zxcache.c b/libnczarr/zxcache.c index 7a469587a3..f997a14791 100644 --- a/libnczarr/zxcache.c +++ b/libnczarr/zxcache.c @@ -14,6 +14,7 @@ #include "zcache.h" #include "ncxcache.h" #include "zfilter.h" +#include #undef DEBUG @@ -423,7 +424,7 @@ constraincache(NCZChunkCache* cache, size64_t needed) /* Flush from LRU end if we are at capacity */ while(nclistlength(cache->mru) > cache->params.nelems || cache->used > final_size) { - int i; + size_t i; void* ptr; NCZCacheEntry* e = ncxcachelast(cache->xcache); /* last entry is the least recently used */ if(e == NULL) break; @@ -773,7 +774,7 @@ get_chunk(NCZChunkCache* cache, NCZCacheEntry* entry) case NC_EEMPTY: empty = 1; stat = NC_NOERR;break; default: goto done; } - entry->isfiltered = FILTERED(cache); /* Is the data being read filtered? */ + entry->isfiltered = (int)FILTERED(cache); /* Is the data being read filtered? */ if(tid == NC_STRING) entry->isfixedstring = 1; /* fill cache is in char[maxstrlen] format */ } @@ -890,7 +891,7 @@ NCZ_printxcache(NCZChunkCache* cache) static char xs[20000]; NCbytes* buf = ncbytesnew(); char s[8192]; - int i; + size_t i; ncbytescat(buf,"NCZChunkCache:\n"); snprintf(s,sizeof(s),"\tvar=%s\n\tndims=%u\n\tchunksize=%u\n\tchunkcount=%u\n\tfillchunk=%p\n", @@ -916,7 +917,7 @@ NCZ_printxcache(NCZChunkCache* cache) ncbytescat(buf,"\t\t\n"); for(i=0;imru);i++) { NCZCacheEntry* e = (NCZCacheEntry*)nclistget(cache->mru,i); - snprintf(s,sizeof(s),"\t\t[%d] ",i); + snprintf(s,sizeof(s),"\t\t[%zu] ", i); ncbytescat(buf,s); if(e == NULL) ncbytescat(buf,""); diff --git a/libsrc4/nc4internal.c b/libsrc4/nc4internal.c index 2c6e64d1b3..c07bae8123 100644 --- a/libsrc4/nc4internal.c +++ b/libsrc4/nc4internal.c @@ -24,6 +24,7 @@ #include "ncdispatch.h" /* from libdispatch */ #include "ncutf8.h" #include +#include #include "ncrc.h" /** @internal Number of reserved attributes. These attributes are @@ -507,7 +508,7 @@ nc4_find_dim(NC_GRP_INFO_T *grp, int dimid, NC_DIM_INFO_T **dim, LOG((4, "%s: dimid %d", __func__, dimid)); /* Find the dim info. */ - if (!((*dim) = nclistget(grp->nc4_info->alldims, dimid))) + if (!((*dim) = nclistget(grp->nc4_info->alldims, (size_t)dimid))) return NC_EBADDIM; /* Give the caller the group the dimension is in. */ @@ -597,7 +598,7 @@ nc4_find_type(const NC_FILE_INFO_T *h5, nc_type typeid, NC_TYPE_INFO_T **type) return NC_NOERR; /* Find the type. */ - if (!(*type = nclistget(h5->alltypes,typeid))) + if (!(*type = nclistget(h5->alltypes, (size_t)typeid))) return NC_EBADTYPID; return NC_NOERR; @@ -1252,7 +1253,7 @@ field_free(NC_FIELD_INFO_T *field) int nc4_type_free(NC_TYPE_INFO_T *type) { - int i; + size_t i; assert(type && type->rc && type->hdr.name); diff --git a/libsrc4/nc4type.c b/libsrc4/nc4type.c index 1aef7ea0e7..b0361a55a5 100644 --- a/libsrc4/nc4type.c +++ b/libsrc4/nc4type.c @@ -13,6 +13,7 @@ */ #include "nc4internal.h" #include "nc4dispatch.h" +#include #if 0 #ifdef ENABLE_DAP4 @@ -187,7 +188,7 @@ NC4_inq_type(int ncid, nc_type typeid1, char *name, size_t *size) return retval; /* Find this type. */ - if (!(type = nclistget(grp->nc4_info->alltypes, typeid1))) + if (!(type = nclistget(grp->nc4_info->alltypes, (size_t)typeid1))) return NC_EBADTYPE; if (name) @@ -237,7 +238,7 @@ NC4_inq_user_type(int ncid, nc_type typeid1, char *name, size_t *size, return retval; /* Find this type. */ - if (!(type = nclistget(grp->nc4_info->alltypes, typeid1))) + if (!(type = nclistget(grp->nc4_info->alltypes, (size_t)typeid1))) return NC_EBADTYPE; /* Count the number of fields. */ @@ -316,11 +317,11 @@ NC4_inq_compound_field(int ncid, nc_type typeid1, int fieldid, char *name, return retval; /* Find this type. */ - if (!(type = nclistget(grp->nc4_info->alltypes, typeid1))) + if (!(type = nclistget(grp->nc4_info->alltypes, (size_t)typeid1))) return NC_EBADTYPE; /* Find the field. */ - if (!(field = nclistget(type->u.c.field,fieldid))) + if (!(field = nclistget(type->u.c.field, (size_t)fieldid))) return NC_EBADFIELD; if (name) @@ -360,7 +361,7 @@ NC4_inq_compound_fieldindex(int ncid, nc_type typeid1, const char *name, int *fi NC_FIELD_INFO_T *field; char norm_name[NC_MAX_NAME + 1]; int retval; - int i; + size_t i; LOG((2, "nc_inq_compound_fieldindex: ncid 0x%x typeid %d name %s", ncid, typeid1, name)); @@ -422,7 +423,7 @@ NC4_inq_enum_ident(int ncid, nc_type xtype, long long value, char *identifier) NC_TYPE_INFO_T *type; NC_ENUM_MEMBER_INFO_T *enum_member; long long ll_val; - int i; + size_t i; int retval; int found; @@ -433,7 +434,7 @@ NC4_inq_enum_ident(int ncid, nc_type xtype, long long value, char *identifier) return retval; /* Find this type. */ - if (!(type = nclistget(grp->nc4_info->alltypes, xtype))) + if (!(type = nclistget(grp->nc4_info->alltypes, (size_t)xtype))) return NC_EBADTYPE; /* Complain if they are confused about the type. */ @@ -525,7 +526,7 @@ NC4_inq_enum_member(int ncid, nc_type typeid1, int idx, char *identifier, return retval; /* Find this type. */ - if (!(type = nclistget(grp->nc4_info->alltypes, typeid1))) + if (!(type = nclistget(grp->nc4_info->alltypes, (size_t)typeid1))) return NC_EBADTYPE; /* Complain if they are confused about the type. */ @@ -533,7 +534,7 @@ NC4_inq_enum_member(int ncid, nc_type typeid1, int idx, char *identifier, return NC_EBADTYPE; /* Move to the desired enum member in the list. */ - if (!(enum_member = nclistget(type->u.e.enum_member, idx))) + if (!(enum_member = nclistget(type->u.e.enum_member, (size_t)idx))) return NC_EINVAL; /* Give the people what they want. */ diff --git a/libsrc4/ncindex.c b/libsrc4/ncindex.c index 8f1aa5cfd8..250aa6c5ab 100644 --- a/libsrc4/ncindex.c +++ b/libsrc4/ncindex.c @@ -14,6 +14,7 @@ */ /* Define this for debug so that table sizes are small */ +#include #undef SMALLTABLE #undef NCNOHASH @@ -369,7 +370,7 @@ sortname(NC_SORT sort) void printindexlist(NClist* lm) { - int i; + size_t i; if(lm == NULL) { fprintf(stderr,"\n"); return; @@ -377,10 +378,10 @@ printindexlist(NClist* lm) for(i=0;i\n",(unsigned long)i); + fprintf(stderr,"[%zu] \n",i); else - fprintf(stderr,"[%ld] sort=%s name=|%s| id=%lu\n", - (unsigned long)i,sortname(o->sort),o->name,(unsigned long)o->id); + fprintf(stderr,"[%zu] sort=%s name=|%s| id=%lu\n", + i,sortname(o->sort),o->name,(unsigned long)o->id); } } diff --git a/ncdump/nc4printer.c b/ncdump/nc4printer.c index 10de3e7856..1569788e76 100644 --- a/ncdump/nc4printer.c +++ b/ncdump/nc4printer.c @@ -10,6 +10,7 @@ */ #include "config.h" +#include #include #include #include @@ -146,7 +147,7 @@ NC4print(NCbytes* buf, int ncid) static void freeNC4Printer(NC4printer* out) { - int i; + size_t i; if(out == NULL) return; @@ -612,17 +613,17 @@ getNumericValue(union NUMVALUE numvalue, nc_type base) static NCID* findType(NC4printer* out, nc_type t) { - int len = nclistlength(out->types); + size_t len = nclistlength(out->types); if(t >= len) abort(); - return (NCID*)nclistget(out->types,t); + return (NCID*)nclistget(out->types, (size_t)t); } static NCID* findDim(NC4printer* out, int dimid) { if(nclistlength(out->dims) <= dimid) abort(); - return (NCID*)nclistget(out->dims,dimid); + return (NCID*)nclistget(out->dims, (size_t)dimid); } static void diff --git a/nczarr_test/ut_map.c b/nczarr_test/ut_map.c index 005cd59c39..6bee606aff 100644 --- a/nczarr_test/ut_map.c +++ b/nczarr_test/ut_map.c @@ -322,7 +322,8 @@ readdata(void) static int searchR(NCZMAP* map, int depth, const char* prefix0, NClist* objects) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; NClist* matches = nclistnew(); char prefix[4096]; /* only ok because we know testdata */ size_t prefixlen; @@ -360,7 +361,8 @@ searchR(NCZMAP* map, int depth, const char* prefix0, NClist* objects) static int search(void) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; NCZMAP* map = NULL; NClist* objects = nclistnew(); @@ -375,7 +377,7 @@ search(void) /* Print out the list */ for(i=0;i #undef DEBUG @@ -342,7 +343,8 @@ simpledata(void) static int searchR(NCZMAP* map, int depth, const char* prefix0, NClist* objects) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; NClist* matches = nclistnew(); char prefix[4096]; /* only ok because we know testdata */ size_t prefixlen; @@ -382,7 +384,8 @@ searchR(NCZMAP* map, int depth, const char* prefix0, NClist* objects) static int search(void) { - int i,stat = NC_NOERR; + size_t i; + int stat = NC_NOERR; NCZMAP* map = NULL; NClist* objects = nclistnew(); @@ -398,7 +401,7 @@ search(void) /* Print out the list */ for(i=0;i #ifdef HAVE_CONFIG_H #include "config.h" #endif @@ -249,7 +250,7 @@ implfor(const char* path) NCURI* uri = NULL; const char* mode = NULL; NClist* segments = nclistnew(); - int i; + size_t i; NCZM_IMPL impl = NCZM_UNDEF; ncuriparse(path,&uri); @@ -318,7 +319,7 @@ objdump(void) NClist* stack = nclistnew(); char* obj = NULL; char* content = NULL; - int depth; + size_t depth; if((stat=nczmap_open(dumpoptions.impl, dumpoptions.infile, NC_NOCLOBBER, 0, NULL, &map))) goto done; @@ -327,10 +328,10 @@ objdump(void) if((stat = breadthfirst(map,"/",stack))) goto done; if(dumpoptions.debug) { - int i; + size_t i; fprintf(stderr,"stack:\n"); for(i=0;itypesize); } - printf("[%d] %s : (%llu)",depth,obj,len); + printf("[%zu] %s : (%llu)",depth,obj,len); if(kind == OK_CHUNK && dumpoptions.nctype->nctype != NC_STRING) printf(" (%s)",dumpoptions.nctype->typename); printf(" |"); @@ -378,10 +379,10 @@ objdump(void) } printf("|\n"); } else { - printf("[%d] %s : (%llu) ||\n",depth,obj,len); + printf("[%zu] %s : (%llu) ||\n",depth,obj,len); } } else { - printf("[%d] %s\n",depth,obj); + printf("[%zu] %s\n",depth,obj); } } done: diff --git a/oc2/dapparse.c b/oc2/dapparse.c index 8d039ad557..446045a812 100644 --- a/oc2/dapparse.c +++ b/oc2/dapparse.c @@ -5,6 +5,7 @@ #include "config.h" #include "dapparselex.h" #include "dapy.h" +#include /* Forward */ @@ -403,7 +404,7 @@ static NClist* scopeduplicates(NClist* list) { unsigned int i,j; - unsigned int len = nclistlength(list); + size_t len = nclistlength(list); NClist* dups = NULL; for(i=0;i static OCerror mergedas1(OCnode* dds, OCnode* das); static OCerror mergedods1(OCnode* dds, OCnode* das); @@ -417,7 +418,7 @@ static OCerror mergeother(OCnode* ddsroot, NClist* dasnodes) { OCerror stat = OC_NOERR; - int i; + size_t i; for(i=0;ifullname,das->etype,das->att.values); nclistpush(root->attributes,(void*)att); } else if(das->octype == OC_Attributeset) { - int i; + size_t i; /* Recurse */ for(i=0;isubnodes);i++) { OCnode* sub = (OCnode*)nclistget(das->subnodes,i); From b05b9be9a196ca3be267cd565e36ac249862b24a Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Mon, 27 Nov 2023 14:59:48 +0000 Subject: [PATCH 03/33] Try to more consistently use `size_t` for argument of `nclistlength` --- libdap2/cache.c | 4 +++- libdap2/cdf.c | 2 +- libdap2/constraints.c | 2 +- libdap2/daputil.c | 3 ++- libdap4/d4meta.c | 17 +++++++---------- oc2/dapparse.c | 4 ++-- oc2/ocnode.c | 8 ++++---- 7 files changed, 20 insertions(+), 20 deletions(-) diff --git a/libdap2/cache.c b/libdap2/cache.c index aa0ddd97d2..d481704e45 100644 --- a/libdap2/cache.c +++ b/libdap2/cache.c @@ -25,7 +25,9 @@ static int iscacheableconstraint(DCEconstraint* con); int iscached(NCDAPCOMMON* nccomm, CDFnode* target, NCcachenode** cachenodep) { - int i,j,found,index; + int i, found; + size_t j; + size_t index; NCcache* cache; NCcachenode* cachenode; diff --git a/libdap2/cdf.c b/libdap2/cdf.c index 8db0bc845b..f1bb335213 100644 --- a/libdap2/cdf.c +++ b/libdap2/cdf.c @@ -71,7 +71,7 @@ computecdfnodesets(NCDAPCOMMON* nccomm, CDFtree* tree) NCerror computevarnodes(NCDAPCOMMON* nccomm, NClist* allnodes, NClist* varnodes) { - unsigned int i,len; + size_t i, len; NClist* allvarnodes = nclistnew(); for(i=0;incfullname,dumpsegments(segments)); break; default: { CDFnode* minnode = NULL; - int minpath = 0; + size_t minpath = 0; int nmin = 0; /* to catch multiple ones with same short path */ /* ok, see if one of the matches has a path that is shorter then all the others */ diff --git a/libdap2/daputil.c b/libdap2/daputil.c index 74c1791766..d679408334 100644 --- a/libdap2/daputil.c +++ b/libdap2/daputil.c @@ -255,7 +255,8 @@ nclistconcat(NClist* l1, NClist* l2) int nclistminus(NClist* l1, NClist* l2) { - unsigned int i,len,found; + size_t i, len; + int found; len = nclistlength(l2); found = 0; for(i=0;ivars);i++) { - int rank; int dimsizes[NC_MAX_VAR_DIMS]; NCD4node* field = (NCD4node*)nclistget(cmpdtype->vars,i); - rank = nclistlength(field->dims); + size_t rank = nclistlength(field->dims); if(rank == 0) { /* scalar */ NCCHECK((nc_insert_compound(group->meta.id, cmpdtype->meta.id, field->name, field->meta.offset, field->basetype->meta.id))); - } else if(rank > 0) { /* array */ + } else { /* array */ int idimsizes[NC_MAX_VAR_DIMS]; int j; getDimsizes(field,dimsizes); @@ -513,7 +512,7 @@ buildCompound(NCD4meta* builder, NCD4node* cmpdtype, NCD4node* group, char* name NCCHECK((nc_insert_array_compound(group->meta.id, cmpdtype->meta.id, field->name, field->meta.offset, field->basetype->meta.id, - rank, idimsizes))); + (int)rank, idimsizes))); } } @@ -551,14 +550,13 @@ buildStructure(NCD4meta* builder, NCD4node* structvar) { int ret = NC_NOERR; NCD4node* group; - int rank; int dimids[NC_MAX_VAR_DIMS]; /* Step 1: define the variable */ - rank = nclistlength(structvar->dims); + size_t rank = nclistlength(structvar->dims); getDimrefs(structvar,dimids); group = NCD4_groupFor(structvar); - NCCHECK((nc_def_var(group->meta.id,structvar->name,structvar->basetype->meta.id,rank,dimids,&structvar->meta.id))); + NCCHECK((nc_def_var(group->meta.id,structvar->name,structvar->basetype->meta.id,(int)rank,dimids,&structvar->meta.id))); /* Tag the var */ savevarbyid(group,structvar); @@ -575,13 +573,12 @@ buildSequence(NCD4meta* builder, NCD4node* seq) int ret = NC_NOERR; NCD4node* group; - int rank; int dimids[NC_MAX_VAR_DIMS]; - rank = nclistlength(seq->dims); + size_t rank = nclistlength(seq->dims); getDimrefs(seq,dimids); group = NCD4_groupFor(seq); - NCCHECK((nc_def_var(group->meta.id,seq->name,seq->basetype->meta.id,rank,dimids,&seq->meta.id))); + NCCHECK((nc_def_var(group->meta.id,seq->name,seq->basetype->meta.id,(int)rank,dimids,&seq->meta.id))); savevarbyid(group,seq); /* Build attributes and map attributes WRT the variable */ diff --git a/oc2/dapparse.c b/oc2/dapparse.c index 446045a812..3612b6616a 100644 --- a/oc2/dapparse.c +++ b/oc2/dapparse.c @@ -236,8 +236,8 @@ isnumber(const char* text) static void dimension(OCnode* node, NClist* dimensions) { - unsigned int i; - unsigned int rank = nclistlength(dimensions); + size_t i; + size_t rank = nclistlength(dimensions); node->array.dimensions = (NClist*)dimensions; node->array.rank = rank; for(i=0;icontainer == NULL || node->name == NULL) continue; slen += strlen(node->name); } slen += ((len-1)*strlen(separator)); slen += 1; /* for null terminator*/ - pathname = (char*)ocmalloc((size_t)slen); + pathname = (char*)ocmalloc(slen); MEMCHECK(pathname,NULL); pathname[0] = '\0'; for(i=0;icontainer == NULL || node->name == NULL) continue; if(strlen(pathname) > 0) strcat(pathname,separator); strcat(pathname,node->name); From 4e1ff160e10bd3336b8ae54ebad98be2615564b2 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Tue, 28 Nov 2023 16:27:40 +0000 Subject: [PATCH 04/33] Change signature of `nczm_sortenvv` to take `size_t` Always called with a `size_t` and passes `n` to `qsort` which expects a `size_t` anyway --- libnczarr/zmap.c | 2 +- libnczarr/zmap.h | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/libnczarr/zmap.c b/libnczarr/zmap.c index 611b2fbb49..337a47f3e7 100644 --- a/libnczarr/zmap.c +++ b/libnczarr/zmap.c @@ -518,7 +518,7 @@ nczm_compare(const void* arg1, const void* arg2) /* quick sort a list of strings */ void -nczm_sortenvv(int n, char** envv) +nczm_sortenvv(size_t n, char** envv) { if(n <= 1) return; qsort(envv, n, sizeof(char*), nczm_compare); diff --git a/libnczarr/zmap.h b/libnczarr/zmap.h index 451959bcbf..c8e022c31f 100644 --- a/libnczarr/zmap.h +++ b/libnczarr/zmap.h @@ -136,6 +136,7 @@ of the implementation. #define ZMAP_H #include "ncexternl.h" +#include #define NCZM_SEP "/" @@ -369,7 +370,7 @@ EXTERNL int nczm_lastsegment(const char* path, char** lastp); /* bubble sorts (note arguments) */ EXTERNL void nczm_sortlist(struct NClist* l); -EXTERNL void nczm_sortenvv(int n, char** envv); +EXTERNL void nczm_sortenvv(size_t n, char** envv); EXTERNL void NCZ_freeenvv(int n, char** envv); #ifdef __cplusplus From 3f4967f216e0dc5b98250cff1ba261bce06cc43f Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 30 Nov 2023 13:19:42 +0000 Subject: [PATCH 05/33] Use built-in `printf` hex format code --- nczarr_test/zhex.c | 10 +--------- nczarr_test/zmapio.c | 9 +-------- 2 files changed, 2 insertions(+), 17 deletions(-) diff --git a/nczarr_test/zhex.c b/nczarr_test/zhex.c index 972407cb46..8c300c385c 100644 --- a/nczarr_test/zhex.c +++ b/nczarr_test/zhex.c @@ -14,13 +14,10 @@ #undef DEBUG -static char hex[16] = "0123456789abcdef"; - int main(int argc, char** argv) { unsigned char c; - unsigned int c0,c1; FILE* f = NULL; if(argc > 1) { @@ -33,12 +30,7 @@ main(int argc, char** argv) for(;;) { int ret = fread(&c, 1, 1, f); if(ret != 1) break; - c1 = c; - c0 = c1 & 0xf; - c1 = (c1 >> 4); - c0 = hex[c0]; - c1 = hex[c1]; - printf("%c%c",(char)c1,(char)c0); + printf("%.2hhx", c); } if(f != stdin) fclose(f); return 0; diff --git a/nczarr_test/zmapio.c b/nczarr_test/zmapio.c index d261f8fcc9..719e6ca18d 100644 --- a/nczarr_test/zmapio.c +++ b/nczarr_test/zmapio.c @@ -441,13 +441,11 @@ breadthfirst(NCZMAP* map, const char* key, NClist* stack) return stat; } -static char hex[16] = "0123456789abcdef"; static void printcontent(size64_t len, const char* content, OBJKIND kind) { size64_t i, count; - unsigned int c0,c1; const char* format = NULL; int strlen = 1; @@ -487,12 +485,7 @@ printcontent(size64_t len, const char* content, OBJKIND kind) printf("%c",content[i]); break; default: - c1 = (unsigned char)(content[i]); - c0 = c1 & 0xf; - c1 = (c1 >> 4); - c0 = hex[c0]; - c1 = hex[c1]; - printf("%c%c",(char)c1,(char)c0); + printf("%.2hhx", content[i]); } } } From 507c8ab9668def8cd6039bde85ab9434547cdeab Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 30 Nov 2023 13:36:27 +0000 Subject: [PATCH 06/33] Remove some unneeded `#ifdef` guards `chunk_size` is only used if `PRINT_CHUNK_WASTE_REPORT` is defined. Also move the declaration of `chunk_size` inside the `#ifdef` to silence `set-but-unused` warning --- nczarr_test/test_zchunks2.c | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/nczarr_test/test_zchunks2.c b/nczarr_test/test_zchunks2.c index 54963761c8..d2ac669bf4 100644 --- a/nczarr_test/test_zchunks2.c +++ b/nczarr_test/test_zchunks2.c @@ -23,7 +23,6 @@ calculate_waste(int ndims, size_t *dimlen, size_t *chunksize, float *waste) int d; float chunked = 1, unchunked = 1; size_t *num_chunks; - size_t chunk_size = 1; assert(waste && dimlen && chunksize && ndims); if (!(num_chunks = calloc(ndims, sizeof(size_t)))) ERR; @@ -56,16 +55,13 @@ calculate_waste(int ndims, size_t *dimlen, size_t *chunksize, float *waste) #ifdef PRINT_CHUNK_WASTE_REPORT printf("\ndimlen\tchunksize\tnum_chunks\n"); -#endif + size_t chunk_size = 1; for (d = 0; d < ndims; d++) { -#ifdef PRINT_CHUNK_WASTE_REPORT printf("%ld\t%ld\t\t%ld\n", (long int)dimlen[d], (long int)chunksize[d], (long int)num_chunks[d]); -#endif chunk_size *= chunksize[d]; } -#ifdef PRINT_CHUNK_WASTE_REPORT printf("size of chunk: %ld elements; wasted space: %2.2f percent\n", (long int)chunk_size, *waste); #endif From fdff1eb1238864b4293a21a8a352b9dce5c93d74 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 30 Nov 2023 13:46:58 +0000 Subject: [PATCH 07/33] Silence conversion warnings in nczarr tests --- nczarr_test/ncdumpchunks.c | 11 ++++++----- nczarr_test/test_unlim_io.c | 7 ++++--- nczarr_test/test_utils.c | 8 ++++---- nczarr_test/test_utils.h | 6 +++--- nczarr_test/test_zchunks.c | 6 +++--- nczarr_test/test_zchunks2.c | 21 +++++++++++---------- nczarr_test/test_zchunks3.c | 2 +- nczarr_test/testfilter.c | 2 +- nczarr_test/testfilter_misc.c | 10 +++++----- nczarr_test/testfilter_multi.c | 2 +- nczarr_test/testfilter_order.c | 10 +++++----- nczarr_test/testfilter_repeat.c | 10 +++++----- nczarr_test/ut_map.c | 5 +++-- nczarr_test/ut_util.c | 2 +- nczarr_test/ut_util.h | 3 ++- nczarr_test/zhex.c | 3 ++- nczarr_test/zisjson.c | 5 +++-- nczarr_test/zmapio.c | 7 ++++--- 18 files changed, 64 insertions(+), 56 deletions(-) diff --git a/nczarr_test/ncdumpchunks.c b/nczarr_test/ncdumpchunks.c index ba2660465a..7381e9514c 100755 --- a/nczarr_test/ncdumpchunks.c +++ b/nczarr_test/ncdumpchunks.c @@ -1,3 +1,4 @@ +#include #ifdef HAVE_CONFIG_H #include "config.h" #endif @@ -59,7 +60,7 @@ typedef struct Format { } Format; typedef struct Odometer { - size_t rank; /*rank */ + int rank; /*rank */ size_t start[NC_MAX_VAR_DIMS]; size_t stop[NC_MAX_VAR_DIMS]; size_t max[NC_MAX_VAR_DIMS]; /* max size of ith index */ @@ -74,7 +75,7 @@ static int ncap = 0; extern int nc__testurl(const char*,char**); -Odometer* odom_new(size_t rank, const size_t* stop, const size_t* max); +Odometer* odom_new(int rank, const size_t* stop, const size_t* max); void odom_free(Odometer* odom); int odom_more(Odometer* odom); int odom_next(Odometer* odom); @@ -119,7 +120,7 @@ cleanup(void) } Odometer* -odom_new(size_t rank, const size_t* stop, const size_t* max) +odom_new(int rank, const size_t* stop, const size_t* max) { int i; Odometer* odom = NULL; @@ -150,7 +151,7 @@ odom_more(Odometer* odom) int odom_next(Odometer* odom) { - size_t i; + int i; for(i=odom->rank-1;i>=0;i--) { odom->index[i]++; if(odom->index[i] < odom->stop[i]) break; @@ -265,7 +266,7 @@ printchunk(Format* format, int* chunkdata, size_t indent) { size_t k[3]; int rank = format->rank; - unsigned cols[3], pos; + size_t cols[3], pos; size_t* chl = format->chunklens; memset(cols,0,sizeof(cols)); diff --git a/nczarr_test/test_unlim_io.c b/nczarr_test/test_unlim_io.c index 20a32a90bb..fa5aee5848 100644 --- a/nczarr_test/test_unlim_io.c +++ b/nczarr_test/test_unlim_io.c @@ -1,3 +1,4 @@ +#include #ifdef HAVE_CONFIG_H #include "config.h" #endif @@ -22,15 +23,15 @@ #define NDATA MAX_DATA static int data[NDATA]; -static unsigned chunkprod; -static unsigned dimprod; +static size_t chunkprod; +static size_t dimprod; static size_t datasize = 0; static int writedata(void) { int ret = NC_NOERR; - size_t i; + int i; for(i=0;idata == 0x7fffffff ? i: options->data); diff --git a/nczarr_test/test_utils.c b/nczarr_test/test_utils.c index 3afc074b0b..970fb692ea 100644 --- a/nczarr_test/test_utils.c +++ b/nczarr_test/test_utils.c @@ -193,7 +193,7 @@ getoptions(int* argcp, char*** argvp) #ifndef _WIN32 if(options->wdebug) { char s[64]; - snprintf(s,sizeof(s),"%u",options->wdebug); + snprintf(s,sizeof(s),"%d",options->wdebug); setenv("NCZ_WDEBUG",s,1); } if(options->optimize) { @@ -404,7 +404,7 @@ parsedata(const char* s0, int* data) p = strchr(q,','); if(p == NULL) {p = q+strlen(q); done=1;} *p++ = '\0'; - data[i++] = (size_t)atoi(q); + data[i++] = atoi(q); } if(s) free(s); return i; @@ -440,7 +440,7 @@ printvector64(int rank, const size64_t* vec) } Odometer* -odom_new(size_t rank, const size_t* start, const size_t* stop, const size_t* stride, const size_t* max) +odom_new(int rank, const size_t* start, const size_t* stop, const size_t* stride, const size_t* max) { size_t i; Odometer* odom = NULL; @@ -473,7 +473,7 @@ odom_more(Odometer* odom) int odom_next(Odometer* odom) { - size_t i; + int i; for(i=odom->rank-1;i>=0;i--) { odom->index[i] += odom->stride[i]; if(odom->index[i] < odom->stop[i]) break; diff --git a/nczarr_test/test_utils.h b/nczarr_test/test_utils.h index 1140cde22e..0e2d227704 100644 --- a/nczarr_test/test_utils.h +++ b/nczarr_test/test_utils.h @@ -21,7 +21,7 @@ typedef enum Op {None=0, Create=1, Read=2, Write=3, Wholechunk=4, Odom=5, Extend typedef struct Options { unsigned debug; - unsigned wdebug; + int wdebug; int optimize; int wholechunk; Op create; @@ -49,7 +49,7 @@ typedef struct Metadata { } Metadata; typedef struct Odometer { - size_t rank; /*rank */ + int rank; /*rank */ size_t start[NC_MAX_VAR_DIMS]; size_t edges[NC_MAX_VAR_DIMS]; size_t stride[NC_MAX_VAR_DIMS]; @@ -60,7 +60,7 @@ typedef struct Odometer { extern void usage(int); -EXTERNL Odometer* odom_new(size_t rank, const size_t* start, const size_t* stop, const size_t* stride, const size_t* max); +EXTERNL Odometer* odom_new(int rank, const size_t* start, const size_t* stop, const size_t* stride, const size_t* max); EXTERNL void odom_free(Odometer* odom); EXTERNL int odom_more(Odometer* odom); EXTERNL int odom_next(Odometer* odom); diff --git a/nczarr_test/test_zchunks.c b/nczarr_test/test_zchunks.c index 7307910aff..6a7df2ad3f 100644 --- a/nczarr_test/test_zchunks.c +++ b/nczarr_test/test_zchunks.c @@ -9,6 +9,7 @@ #include "ut_includes.h" #include "test_nczarr_utils.h" +#include #define DEBUGNOFILL #undef PRINT_DEFAULT_CHUNKSIZE_TABLE @@ -94,8 +95,7 @@ main(int argc, char **argv) #define NUM_DIM 4 #define NUM_TYPE 2 int ncid; - int dim_len[NUM_DIM] = {1, 100, 1000, 2000}; -// int dim_len[NUM_DIM] = {1, 50, 100, 200}; + size_t dim_len[NUM_DIM] = {1, 100, 1000, 2000}; size_t chunksize_in[NUM_DIM]; int type_id[NUM_TYPE] = {NC_BYTE, NC_INT}; int dimid[NUM_DIM], varid[NUM_TYPE]; @@ -110,7 +110,7 @@ main(int argc, char **argv) for (d = 0; d < NUM_DIM; d++) { - sprintf(dim_name, "dim_%d", dim_len[d]); + sprintf(dim_name, "dim_%zu", dim_len[d]); #ifdef PRINT_DEFAULT_CHUNKSIZE_TABLE printf("creating dim[%d] %s = %d\n", d, dim_name, dim_len[d]); #endif diff --git a/nczarr_test/test_zchunks2.c b/nczarr_test/test_zchunks2.c index d2ac669bf4..5c22c0036d 100644 --- a/nczarr_test/test_zchunks2.c +++ b/nczarr_test/test_zchunks2.c @@ -9,6 +9,7 @@ #include "ut_includes.h" #include "test_nczarr_utils.h" +#include #define FILE_NAME "tst_chunks2" #define MAX_WASTE 25.0 @@ -37,13 +38,13 @@ calculate_waste(int ndims, size_t *dimlen, size_t *chunksize, float *waste) for (num_chunks[d] = 0; (num_chunks[d] * chunksize[d]) < (dimlen[d] ? dimlen[d] : 1); num_chunks[d]++) ; - chunked *= (num_chunks[d] * chunksize[d]); + chunked *= (float)(num_chunks[d] * chunksize[d]); } /* Calculate the minimum space required for this data * (i.e. unchunked) or one record of it. */ for (d = 0; d < ndims; d++) - unchunked *= (dimlen[d] ? dimlen[d] : 1); + unchunked *= (dimlen[d] ? (float)dimlen[d] : 1); #ifdef PRINT_CHUNK_WASTE_REPORT printf("size for unchunked %g elements; size for chunked %g elements\n", @@ -51,7 +52,7 @@ calculate_waste(int ndims, size_t *dimlen, size_t *chunksize, float *waste) #endif /* Percent of the chunked file that is wasted space. */ - *waste = ((float)(chunked - unchunked) / (float)chunked) * 100.0f; + *waste = ((chunked - unchunked) / chunked) * 100.0f; #ifdef PRINT_CHUNK_WASTE_REPORT printf("\ndimlen\tchunksize\tnum_chunks\n"); @@ -359,7 +360,7 @@ main(int argc, char **argv) /* Create a few dimensions. */ for (d = 0; d < NDIMS3; d++) { - dim_len[d] = rand(); + dim_len[d] = (size_t)rand(); sprintf(dim_name, "dim_%d", d); if (nc_def_dim(ncid, dim_name, dim_len[d], &dimids[d])) ERR; } @@ -393,9 +394,9 @@ main(int argc, char **argv) { if (nc_create(itoptions.path, NC_NETCDF4 | NC_CLOBBER, &ncid)) ERR; - dim_len[0] = rand(); - dim_len[1] = rand(); - dim_len[2] = rand() % 1000; + dim_len[0] = (size_t)rand(); + dim_len[1] = (size_t)rand(); + dim_len[2] = (size_t)rand() % 1000; /* Create a few dimensions. */ for (d = 0; d < NDIMS3; d++) { @@ -432,9 +433,9 @@ main(int argc, char **argv) { if (nc_create(itoptions.path, NC_NETCDF4 | NC_CLOBBER, &ncid)) ERR; - dim_len[0] = rand(); - dim_len[1] = rand() % 1000; - dim_len[2] = rand() % 1000; + dim_len[0] = (size_t)rand(); + dim_len[1] = (size_t)rand() % 1000; + dim_len[2] = (size_t)rand() % 1000; /* Create a few dimensions. */ for (d = 0; d < NDIMS3; d++) { diff --git a/nczarr_test/test_zchunks3.c b/nczarr_test/test_zchunks3.c index fa0c575194..0f84eccc12 100644 --- a/nczarr_test/test_zchunks3.c +++ b/nczarr_test/test_zchunks3.c @@ -66,7 +66,7 @@ main(int argc, char** argv) /* fvar is unchanged */ for(i=0; i < NVALS; i++) { - fvar_data[i] = NVALS - i; + fvar_data[i] = (float)(NVALS - i); } if ((ret=nc_put_var(ncid, fvarid, fvar_data))) LERR; diff --git a/nczarr_test/testfilter.c b/nczarr_test/testfilter.c index e459f28738..ee599a0542 100644 --- a/nczarr_test/testfilter.c +++ b/nczarr_test/testfilter.c @@ -65,7 +65,7 @@ netcdf bzip2 { static size_t dimsize = DIMSIZE; static size_t chunksize = CHUNKSIZE; -static size_t actualdims = NDIMS; +static int actualdims = NDIMS; static size_t actualproduct = 1; /* x-product over dim sizes */ static size_t chunkproduct = 1; /* x-product over chunksizes */ diff --git a/nczarr_test/testfilter_misc.c b/nczarr_test/testfilter_misc.c index 85fdf3df9d..b54c9864fc 100644 --- a/nczarr_test/testfilter_misc.c +++ b/nczarr_test/testfilter_misc.c @@ -51,7 +51,7 @@ static size_t dimsize[NDIMS] = {4,4,4,4}; static size_t chunksize[NDIMS] = {4,4,4,4}; #endif -static size_t ndims = NDIMS; +static int ndims = NDIMS; static size_t totalproduct = 1; /* x-product over max dims */ static size_t actualproduct = 1; /* x-product over actualdims */ @@ -513,8 +513,8 @@ odom_offset(void) int i; int offset = 0; for(i=0;i extern int parseslices(const char* s0, int* nslicesp, NCZSlice* slices); extern int parsedimdef(const char* s0, Dimdef** defp); extern int parsevardef(const char* s0, NClist* dimdefs, Vardef** varp); @@ -17,7 +18,7 @@ extern void freeranges(NCZChunkRange* ranges); extern void freeslices(NCZSlice* slices); extern void freestringvec(char** vec); extern void freeprojvector(int rank, NCZProjection** vec); -extern int ut_typesize(nc_type t); +extern size_t ut_typesize(nc_type t); extern nc_type ut_typeforname(const char* tname); extern NCZM_IMPL kind2impl(const char* kind); extern const char* impl2kind(NCZM_IMPL impl); diff --git a/nczarr_test/zhex.c b/nczarr_test/zhex.c index 8c300c385c..97f0f141e3 100644 --- a/nczarr_test/zhex.c +++ b/nczarr_test/zhex.c @@ -7,6 +7,7 @@ #include "stdlib.h" #include "stdio.h" +#include #ifdef HAVE_UNISTD_H #include @@ -28,7 +29,7 @@ main(int argc, char** argv) f = stdin; for(;;) { - int ret = fread(&c, 1, 1, f); + size_t ret = fread(&c, 1, 1, f); if(ret != 1) break; printf("%.2hhx", c); } diff --git a/nczarr_test/zisjson.c b/nczarr_test/zisjson.c index e13c69f23f..256dad82ff 100644 --- a/nczarr_test/zisjson.c +++ b/nczarr_test/zisjson.c @@ -8,6 +8,7 @@ */ +#include #ifdef HAVE_UNISTD_H #include #endif @@ -93,7 +94,7 @@ main(int argc, char** argv) int stat = NC_NOERR; char text[MAXREAD+1]; NCjson* json = NULL; - int i, red, c; + int i, c; FILE* f = NULL; nc_initialize(); @@ -127,7 +128,7 @@ main(int argc, char** argv) /* Read json from stdin */ for(i=0;;i++) { unsigned char c; - red = fread(&c, 1, 1, f); + size_t red = fread(&c, 1, 1, f); if(red != 1) break; if(i < MAXREAD) text[i] = (char)c; } diff --git a/nczarr_test/zmapio.c b/nczarr_test/zmapio.c index 719e6ca18d..53a1612b42 100644 --- a/nczarr_test/zmapio.c +++ b/nczarr_test/zmapio.c @@ -3,6 +3,8 @@ * See netcdf/COPYRIGHT file for copying and redistribution conditions. */ +#include "ncconfigure.h" +#include #ifdef HAVE_CONFIG_H #include "config.h" #endif @@ -54,7 +56,7 @@ static struct Mops { static struct Type { const char* typename; nc_type nctype; - int typesize; + size_t typesize; const char format[16]; } types[] = { {"ubyte",NC_UBYTE,1,"%u"}, @@ -448,12 +450,11 @@ printcontent(size64_t len, const char* content, OBJKIND kind) size64_t i, count; const char* format = NULL; - int strlen = 1; + size64_t strlen = (size64_t)dumpoptions.strlen; format = dumpoptions.nctype->format; if(dumpoptions.format[0] != '\0') format = dumpoptions.format; - strlen = dumpoptions.strlen; count = len; #ifdef DEBUG From 5f6def8b6bca3424533c50dfa398e833b7b16061 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 30 Nov 2023 14:01:34 +0000 Subject: [PATCH 08/33] Fix conditional that will always be true `options->file` is allocated on the stack and so will never be `NULL`, making this conditional always true. Instead, we want to check the value of `file`. As `options` is allocated with `calloc`, we know that `file` will be zero-initialised, and it's safe to just check the first element --- nczarr_test/test_utils.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nczarr_test/test_utils.c b/nczarr_test/test_utils.c index 970fb692ea..d13b880186 100644 --- a/nczarr_test/test_utils.c +++ b/nczarr_test/test_utils.c @@ -165,7 +165,7 @@ getoptions(int* argcp, char*** argvp) } /* Figure out the FORMATX for this file */ - if(options->file) { + if(options->file[0]) { NCURI* uri = NULL; ncuriparse(options->file,&uri); if(uri == NULL) { /* not a url */ From 3e374d8c71ee4ccf8e587b0c60157a2071943a43 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 30 Nov 2023 14:07:07 +0000 Subject: [PATCH 09/33] Remove some unused code from `ut_util` library --- nczarr_test/ut_test.c | 3 --- nczarr_test/ut_test.h | 1 - nczarr_test/ut_util.c | 38 -------------------------------------- nczarr_test/ut_util.h | 1 - 4 files changed, 43 deletions(-) diff --git a/nczarr_test/ut_test.c b/nczarr_test/ut_test.c index 0a4cdefe6a..8092eea15c 100755 --- a/nczarr_test/ut_test.c +++ b/nczarr_test/ut_test.c @@ -94,9 +94,6 @@ ut_init(int argc, char** argv, struct UTOptions * options) case 's': /*slices*/ if((stat=parseslices(optarg,&options->nslices,options->slices))) usage(THROW(stat)); break; - case 'W': /*walk data*/ - options->idatalen = parseintvector(optarg,4,(void**)&options->idata); - break; case '?': fprintf(stderr,"unknown option: '%c'\n",c); stat = NC_EINVAL; diff --git a/nczarr_test/ut_test.h b/nczarr_test/ut_test.h index efe26c3658..01dcf79c90 100644 --- a/nczarr_test/ut_test.h +++ b/nczarr_test/ut_test.h @@ -53,7 +53,6 @@ struct UTOptions { NCZSlice slices[NC_MAX_VAR_DIMS]; NClist* dimdefs; /*List */ NClist* vardefs; /*List */ - size_t idatalen; int* idata; }; diff --git a/nczarr_test/ut_util.c b/nczarr_test/ut_util.c index a956693d4a..2d566811f0 100644 --- a/nczarr_test/ut_util.c +++ b/nczarr_test/ut_util.c @@ -199,44 +199,6 @@ parsestringvector(const char* s0, int stopchar, char*** namesp) return nelems; } -int -parseintvector(const char* s0, int typelen, void** vectorp) -{ - int count,nchars,nelems,index; - const char* s = NULL; - void* vector = NULL; - - /* First, compute number of elements */ - for(s=s0,nelems=1;*s;s++) { - if(*s == ',') nelems++; - } - - vector = calloc(nelems,typelen); - - /* Extract the elements of the vector */ - /* Skip any leading bracketchar */ - s=s0; - if(strchr(OPEN,*s0) != NULL) s++; - for(index=0;*s;index++) { - long long elem; - nchars = -1; - count = sscanf(s,"%lld%n",&elem,&nchars); - if(nchars == -1 || count != 1) return THROW(NC_EINVAL); - s += nchars; - if(*s == ',') s++; - switch (typelen) { - case 1: ((char*)vector)[index] = (char)elem; break; - case 2: ((short*)vector)[index] = (short)elem; break; - case 4: ((int*)vector)[index] = (int)elem; break; - case 8: ((long long*)vector)[index] = (long long)elem; break; - default: abort(); - } - } - assert(nelems == index); - if(vectorp) *vectorp = vector; - return nelems; -} - void freedimdefs(NClist* defs) { diff --git a/nczarr_test/ut_util.h b/nczarr_test/ut_util.h index 3454a512ee..51b3eab279 100644 --- a/nczarr_test/ut_util.h +++ b/nczarr_test/ut_util.h @@ -11,7 +11,6 @@ extern int parseslices(const char* s0, int* nslicesp, NCZSlice* slices); extern int parsedimdef(const char* s0, Dimdef** defp); extern int parsevardef(const char* s0, NClist* dimdefs, Vardef** varp); extern int parsestringvector(const char* s0, int stopchar, char*** namesp); -extern int parseintvector(const char* s0, int typelen, void** vectorp); extern void freedimdefs(NClist* defs); extern void freevardefs(NClist* defs); extern void freeranges(NCZChunkRange* ranges); From e995c2e6969aba82e2ce3f4e713f78b6f8a1d853 Mon Sep 17 00:00:00 2001 From: Kyle Shores Date: Fri, 5 Jan 2024 10:06:32 -0600 Subject: [PATCH 10/33] moving the version into the project command in cmake --- CMakeLists.txt | 6 ++---- include/netcdf_meta.h.in | 6 +++--- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index a925a88d51..6330f21e92 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -13,6 +13,7 @@ project(netCDF LANGUAGES C CXX HOMEPAGE_URL "https://www.unidata.ucar.edu/software/netcdf/" DESCRIPTION "NetCDF is a set of software libraries and machine-independent data formats that support the creation, access, and sharing of array-oriented scientific data." +VERSION 4.9.3 ) set(PACKAGE "netCDF" CACHE STRING "") @@ -35,11 +36,8 @@ endif () # http://www.gnu.org/software/libtool/manual/libtool.html#Libtool-versioning ##### -SET(NC_VERSION_MAJOR 4) -SET(NC_VERSION_MINOR 9) -SET(NC_VERSION_PATCH 3) SET(NC_VERSION_NOTE "-development") -SET(netCDF_VERSION ${NC_VERSION_MAJOR}.${NC_VERSION_MINOR}.${NC_VERSION_PATCH}${NC_VERSION_NOTE}) +SET(netCDF_VERSION ${PROJECT_VERSION}${NC_VERSION_NOTE}) SET(VERSION ${netCDF_VERSION}) SET(NC_VERSION ${netCDF_VERSION}) SET(PACKAGE_VERSION ${VERSION}) diff --git a/include/netcdf_meta.h.in b/include/netcdf_meta.h.in index 8f24e759f9..eda9081d51 100644 --- a/include/netcdf_meta.h.in +++ b/include/netcdf_meta.h.in @@ -23,9 +23,9 @@ #ifndef NETCDF_META_H #define NETCDF_META_H -#define NC_VERSION_MAJOR @NC_VERSION_MAJOR@ /*!< netcdf-c major version. */ -#define NC_VERSION_MINOR @NC_VERSION_MINOR@ /*!< netcdf-c minor version. */ -#define NC_VERSION_PATCH @NC_VERSION_PATCH@ /*!< netcdf-c patch version. */ +#define NC_VERSION_MAJOR @netCDF_VERSION_MAJOR@ /*!< netcdf-c major version. */ +#define NC_VERSION_MINOR @netCDF_VERSION_MINOR@ /*!< netcdf-c minor version. */ +#define NC_VERSION_PATCH @netCDF_VERSION_PATCH@ /*!< netcdf-c patch version. */ #define NC_VERSION_NOTE "@NC_VERSION_NOTE@" /*!< netcdf-c note. May be blank. */ /*! netcdf-c version string. From ce2d5d9045dec457ab637172fb10f2f0335629ee Mon Sep 17 00:00:00 2001 From: Kyle Shores Date: Mon, 8 Jan 2024 10:29:14 -0600 Subject: [PATCH 11/33] matching cmake variables in autotools configuration --- configure.ac | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/configure.ac b/configure.ac index 3c5b7e0a9e..740ba7fce0 100644 --- a/configure.ac +++ b/configure.ac @@ -19,9 +19,9 @@ AC_INIT([netCDF],[4.9.3-development],[support-netcdf@unidata.ucar.edu],[netcdf-c ## : ${CFLAGS=""} -AC_SUBST([NC_VERSION_MAJOR]) NC_VERSION_MAJOR=4 -AC_SUBST([NC_VERSION_MINOR]) NC_VERSION_MINOR=9 -AC_SUBST([NC_VERSION_PATCH]) NC_VERSION_PATCH=3 +AC_SUBST([netCDF_VERSION_MAJOR]) netCDF_VERSION_MAJOR=4 +AC_SUBST([netCDF_VERSION_MINOR]) netCDF_VERSION_MINOR=9 +AC_SUBST([netCDF_VERSION_PATCH]) netCDF_VERSION_PATCH=3 AC_SUBST([NC_VERSION_NOTE]) NC_VERSION_NOTE="-development" ## From 9b5eccc636a4bf7f5e6db62e504616b34f2f6775 Mon Sep 17 00:00:00 2001 From: Anthony Islas Date: Mon, 8 Jan 2024 22:36:13 +0000 Subject: [PATCH 12/33] Define USE_SZIP variable for nc-config.cmake.in --- CMakeLists.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index a925a88d51..dac3adee30 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1158,6 +1158,7 @@ ENDIF() set(STD_FILTERS "deflate") # Always have deflate*/ set_std_filter(Szip) SET(HAVE_SZ ${Szip_FOUND}) +SET(USE_SZIP ${HAVE_SZ}) set_std_filter(Blosc) IF(Zstd_FOUND) set_std_filter(Zstd) From efc20c64d2655e8602d7d46af62bdef649a8360e Mon Sep 17 00:00:00 2001 From: Kyle Shores Date: Wed, 10 Jan 2024 15:18:15 -0600 Subject: [PATCH 13/33] putting dependencies into separate file --- CMakeLists.txt | 593 ----------------------------------- cmake/dependencies.cmake | 652 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 652 insertions(+), 593 deletions(-) create mode 100644 cmake/dependencies.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt index a925a88d51..9d53ad0228 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -129,9 +129,7 @@ INCLUDE(CheckCSourceCompiles) INCLUDE(TestBigEndian) INCLUDE(CheckSymbolExists) INCLUDE(GetPrerequisites) - INCLUDE(CheckCCompilerFlag) -FIND_PACKAGE(PkgConfig QUIET) # A check to see if the system is big endian TEST_BIG_ENDIAN(BIGENDIAN) @@ -168,11 +166,6 @@ MACRO(CHECK_C_LINKER_FLAG M_FLAG M_RESULT) SET(CMAKE_REQUIRED_FLAGS "${T_REQ_FLAG}") ENDMACRO() -# Enable 'dist and distcheck'. -# File adapted from http://ensc.de/cmake/FindMakeDist.cmake -FIND_PACKAGE(MakeDist) -# End 'enable dist and distcheck' - # Set the build type. IF(NOT CMAKE_BUILD_TYPE) SET(CMAKE_BUILD_TYPE DEBUG CACHE STRING "Choose the type of build, options are: None, Debug, Release." @@ -515,65 +508,6 @@ SET(ENABLE_DAP OFF CACHE BOOL "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_DAP=NO" SET(ENABLE_DAP4 OFF CACHE BOOL "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_DAP4=NO" FORCE) ENDIF() -IF(ENABLE_HDF4) - SET(USE_HDF4 ON) - # Check for include files, libraries. - - FIND_PATH(MFHDF_H_INCLUDE_DIR mfhdf.h) - IF(NOT MFHDF_H_INCLUDE_DIR) - MESSAGE(FATAL_ERROR "HDF4 Support specified, cannot find file mfhdf.h") - ELSE() - INCLUDE_DIRECTORIES(${MFHDF_H_INCLUDE_DIR}) - ENDIF() - - FIND_LIBRARY(HDF4_DF_LIB NAMES df libdf hdf) - IF(NOT HDF4_DF_LIB) - MESSAGE(FATAL_ERROR "Can't find or link to the hdf4 df library.") - ENDIF() - - FIND_LIBRARY(HDF4_MFHDF_LIB NAMES mfhdf libmfhdf) - IF(NOT HDF4_MFHDF_LIB) - MESSAGE(FATAL_ERROR "Can't find or link to the hdf4 mfhdf library.") - ENDIF() - - SET(HAVE_LIBMFHDF TRUE) - - SET(HDF4_LIBRARIES ${HDF4_DF_LIB} ${HDF4_MFHDF_LIB}) - # End include files, libraries. - MESSAGE(STATUS "HDF4 libraries: ${HDF4_DF_LIB}, ${HDF4_MFHDF_LIB}") - - MESSAGE(STATUS "Seeking HDF4 jpeg dependency.") - - # Look for the jpeglib.h header file. - FIND_PATH(JPEGLIB_H_INCLUDE_DIR jpeglib.h) - IF(NOT JPEGLIB_H_INCLUDE_DIR) - MESSAGE(FATAL_ERROR "HDF4 Support enabled but cannot find jpeglib.h") - ELSE() - SET(HAVE_JPEGLIB_H ON CACHE BOOL "") - SET(HAVE_LIBJPEG TRUE) - INCLUDE_DIRECTORIES(${JPEGLIB_H_INCLUDE_DIR}) - ENDIF() - - FIND_LIBRARY(JPEG_LIB NAMES jpeg libjpeg) - IF(NOT JPEG_LIB) - MESSAGE(FATAL_ERROR "HDF4 Support enabled but cannot find libjpeg") - ENDIF() - SET(HDF4_LIBRARIES ${JPEG_LIB} ${HDF4_LIBRARIES}) - MESSAGE(STATUS "Found JPEG libraries: ${JPEG_LIB}") - - # Option to enable HDF4 file tests. - OPTION(ENABLE_HDF4_FILE_TESTS "Run HDF4 file tests. This fetches sample HDF4 files from the Unidata ftp site to test with (requires curl)." ON) - IF(ENABLE_HDF4_FILE_TESTS) - FIND_PROGRAM(PROG_CURL NAMES curl) - IF(PROG_CURL) - SET(USE_HDF4_FILE_TESTS ON) - ELSE() - MESSAGE(STATUS "Unable to locate 'curl'. Disabling hdf4 file tests.") - SET(USE_HDF4_FILE_TESTS OFF) - ENDIF() - ENDIF() -ENDIF() - # Option to Build DLL IF(WIN32) OPTION(ENABLE_DLL "Build a Windows DLL." ${BUILD_SHARED_LIBS}) @@ -629,424 +563,7 @@ ENDIF(ENABLE_STRICT_NULL_BYTE_HEADER_PADDING) # 3. is nczarr enabled? # We need separate flags for cases 1 and 2 -## -# Option to Enable HDF5 -# -# The HDF5 cmake variables differ between platform (linux/osx and Windows), -# as well as between HDF5 versions. As a result, this section is a bit convoluted. -# -# Note that the behavior seems much more stable across HDF5 versions under linux, -# so we do not have to do as much version-based tweaking. -# -# At the end of it, we should have the following defined: -# -# * HDF5_C_LIBRARY -# * HDF5_HL_LIBRARY -# * HDF5_LIBRARIES -# * HDF5_INCLUDE_DIR -# * -## SET(USE_HDF5 ${ENABLE_HDF5}) -IF(USE_HDF5) - - ## - # Assert HDF5 version meets minimum required version. - ## - SET(HDF5_VERSION_REQUIRED 1.8.10) - - - ## - # Accommodate developers who have hdf5 libraries and - # headers on their system, but do not have a the hdf - # .cmake files. If this is the case, they should - # specify HDF5_HL_LIBRARY, HDF5_LIBRARY, HDF5_INCLUDE_DIR manually. - # - # This script will attempt to determine the version of the HDF5 library programatically. - ## - IF(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR) - SET(HDF5_LIBRARIES ${HDF5_C_LIBRARY} ${HDF5_HL_LIBRARY}) - SET(HDF5_C_LIBRARIES ${HDF5_C_LIBRARY}) - SET(HDF5_C_LIBRARY_hdf5 ${HDF5_C_LIBRARY}) - SET(HDF5_HL_LIBRARIES ${HDF5_HL_LIBRARY}) - INCLUDE_DIRECTORIES(${HDF5_INCLUDE_DIR}) - MESSAGE(STATUS "Using HDF5 C Library: ${HDF5_C_LIBRARY}") - MESSAGE(STATUS "Using HDF5 HL LIbrary: ${HDF5_HL_LIBRARY}") - if (EXISTS "${HDF5_INCLUDE_DIR}/H5pubconf.h") - file(READ "${HDF5_INCLUDE_DIR}/H5pubconf.h" _hdf5_version_lines - REGEX "#define[ \t]+H5_VERSION") - string(REGEX REPLACE ".*H5_VERSION .*\"\(.*\)\".*" "\\1" _hdf5_version "${_hdf5_version_lines}") - set(HDF5_VERSION "${_hdf5_version}" CACHE STRING "") - unset(_hdf5_version) - unset(_hdf5_version_lines) - endif () - MESSAGE(STATUS "Found HDF5 libraries version ${HDF5_VERSION}") - ### - # If HDF5_VERSION is still empty, we have a problem. - # Error out. - ### - IF("${HDF5_VERSION}" STREQUAL "") - MESSAGE(FATAL_ERR "Unable to determine HDF5 version. NetCDF requires at least version ${HDF5_VERSION_REQUIRED}. Please ensure that libhdf5 is installed and accessible.") - ENDIF() - - ### - # Now that we know HDF5_VERSION isn't empty, we can check for minimum required version, - # and toggle various options. - ### - IF(${HDF5_VERSION} VERSION_LESS ${HDF5_VERSION_REQUIRED}) - MESSAGE(FATAL_ERROR "netCDF requires at least HDF5 ${HDF5_VERSION_REQUIRED}. Found ${HDF5_VERSION}.") - ENDIF() - - ELSE(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR) # We are seeking out HDF5 with Find Package. - ### - # For now we assume that if we are building netcdf - # as a shared library, we will use hdf5 as a shared - # library. If we are building netcdf statically, - # we will use a static library. This can be toggled - # by explicitly modifying NC_FIND_SHARED_LIBS. - ## - IF(NC_FIND_SHARED_LIBS) - SET(NC_HDF5_LINK_TYPE "shared") - SET(NC_HDF5_LINK_TYPE_UPPER "SHARED") - ADD_DEFINITIONS(-DH5_BUILT_AS_DYNAMIC_LIB) - ELSE(NC_FIND_SHARED_LIBS) - SET(NC_HDF5_LINK_TYPE "static") - SET(NC_HDF5_LINK_TYPE_UPPER "STATIC") - ADD_DEFINITIONS(-DH5_BUILT_AS_STATIC_LIB) - ENDIF(NC_FIND_SHARED_LIBS) - - ##### - # First, find the C and HL libraries. - # - # This has been updated to reflect what is in the hdf5 - # examples, even though the previous version of what we - # had worked. - ##### - IF(MSVC) - SET(SEARCH_PACKAGE_NAME ${HDF5_PACKAGE_NAME}) - FIND_PACKAGE(HDF5 NAMES ${SEARCH_PACKAGE_NAME} COMPONENTS C HL CONFIG REQUIRED ${NC_HDF5_LINK_TYPE}) - ELSE(MSVC) - FIND_PACKAGE(HDF5 COMPONENTS C HL REQUIRED) - ENDIF(MSVC) - - ## - # Next, check the HDF5 version. This will inform which - # HDF5 variables we need to munge. - ## - - # Some versions of HDF5 set HDF5_VERSION_STRING instead of HDF5_VERSION - IF(HDF5_VERSION_STRING AND NOT HDF5_VERSION) - SET(HDF5_VERSION ${HDF5_VERSION_STRING}) - ENDIF() - - - ### - # If HDF5_VERSION is undefined, attempt to determine it programatically. - ### - IF("${HDF5_VERSION}" STREQUAL "") - MESSAGE(STATUS "HDF5_VERSION not detected. Attempting to determine programatically.") - IF (EXISTS "${HDF5_INCLUDE_DIR}/H5pubconf.h") - file(READ "${HDF5_INCLUDE_DIR}/H5pubconf.h" _hdf5_version_lines - REGEX "#define[ \t]+H5_VERSION") - string(REGEX REPLACE ".*H5_VERSION .*\"\(.*\)\".*" "\\1" _hdf5_version "${_hdf5_version_lines}") - set(HDF5_VERSION "${_hdf5_version}" CACHE STRING "") - unset(_hdf5_version) - unset(_hdf5_version_lines) - MESSAGE(STATUS "Found HDF5 libraries version ${HDF5_VERSION}") - ENDIF() - ELSE() - SET(HDF5_VERSION ${HDF5_VERSION} CACHE STRING "") - ENDIF() - - ### - # If HDF5_VERSION is still empty, we have a problem. - # Error out. - ### - IF("${HDF5_VERSION}" STREQUAL "") - MESSAGE(FATAL_ERR "Unable to determine HDF5 version. NetCDF requires at least version ${HDF5_VERSION_REQUIRED}. Please ensure that libhdf5 is installed and accessible.") - ENDIF() - - ### - # Now that we know HDF5_VERSION isn't empty, we can check for minimum required version, - # and toggle various options. - ### - - IF(${HDF5_VERSION} VERSION_LESS ${HDF5_VERSION_REQUIRED}) - MESSAGE(FATAL_ERROR "netCDF requires at least HDF5 ${HDF5_VERSION_REQUIRED}. Found ${HDF5_VERSION}.") - ENDIF() - - - - ## - # Include the HDF5 include directory. - ## - IF(HDF5_INCLUDE_DIRS AND NOT HDF5_INCLUDE_DIR) - SET(HDF5_INCLUDE_DIR ${HDF5_INCLUDE_DIRS}) - ENDIF() - MESSAGE(STATUS "Using HDF5 include dir: ${HDF5_INCLUDE_DIR}") - INCLUDE_DIRECTORIES(${HDF5_INCLUDE_DIR}) - - ### - # This is the block where we figure out what the appropriate - # variables are, and we ensure that we end up with - # HDF5_C_LIBRARY, HDF5_HL_LIBRARY and HDF5_LIBRARIES. - ### - IF(MSVC) - #### - # Environmental variables in Windows when using MSVC - # are a hot mess between versions. - #### - - ## - # HDF5 1.8.15 defined HDF5_LIBRARIES. - ## - IF(${HDF5_VERSION} VERSION_LESS "1.8.16") - SET(HDF5_C_LIBRARY hdf5) - SET(HDF5_C_LIBRARY_hdf5 hdf5) - ENDIF(${HDF5_VERSION} VERSION_LESS "1.8.16") - - IF(${HDF5_VERSION} VERSION_GREATER "1.8.15") - IF(NOT HDF5_LIBRARIES AND HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY AND HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY) - SET(HDF5_C_LIBRARY ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY}) - SET(HDF5_C_LIBRARY_hdf5 ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY}) - SET(HDF5_HL_LIBRARY ${HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY}) - - SET(HDF5_LIBRARIES ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ${HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY}) - ENDIF() - ENDIF(${HDF5_VERSION} VERSION_GREATER "1.8.15") - - ELSE(MSVC) - - # Depending on the install, either HDF5_hdf_library or - # HDF5_C_LIBRARIES may be defined. We must check for either. - IF(HDF5_C_LIBRARIES AND NOT HDF5_hdf5_LIBRARY) - SET(HDF5_hdf5_LIBRARY ${HDF5_C_LIBRARIES}) - ENDIF() - - # Some versions of FIND_PACKAGE set HDF5_C_LIBRARIES, but not HDF5_C_LIBRARY - # We use HDF5_C_LIBRARY below, so need to make sure it is set. - IF(HDF5_C_LIBRARIES AND NOT HDF5_C_LIBRARY) - SET(HDF5_C_LIBRARY ${HDF5_C_LIBRARIES}) - ENDIF() - - # Same issue as above... - IF(HDF5_HL_LIBRARIES AND NOT HDF5_HL_LIBRARY) - SET(HDF5_HL_LIBRARY ${HDF5_HL_LIBRARIES}) - ENDIF() - - ENDIF(MSVC) - IF(NOT HDF5_C_LIBRARY) - SET(HDF5_C_LIBRARY hdf5) - ENDIF() - - ENDIF(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR) - - FIND_PACKAGE(Threads) - - # There is a missing case in the above code so default it - IF(NOT HDF5_C_LIBRARY_hdf5 OR "${HDF5_C_LIBRARY_hdf5}" STREQUAL "" ) - SET(HDF5_C_LIBRARY_hdf5 "${HDF5_C_LIBRARY}") - ENDIF() - - FIND_PATH(HAVE_HDF5_H hdf5.h PATHS ${HDF5_INCLUDE_DIR} NO_DEFAULT_PATH) - IF(NOT HAVE_HDF5_H) - MESSAGE(FATAL_ERROR "Compiling a test with hdf5 failed. Either hdf5.h cannot be found, or the log messages should be checked for another reason.") - ELSE(NOT HAVE_HDF5_H) - INCLUDE_DIRECTORIES(${HAVE_HDF5_H}) - ENDIF(NOT HAVE_HDF5_H) - - set (CMAKE_REQUIRED_INCLUDES ${HDF5_INCLUDE_DIR}) - - # Check to ensure that HDF5 was built with zlib. - # This needs to be near the beginning since we - # need to know whether to add "-lz" to the symbol - # tests below. - CHECK_C_SOURCE_COMPILES("#include - #if !H5_HAVE_ZLIB_H - #error - #endif - int main() { - int x = 1;}" HAVE_HDF5_ZLIB) - IF(NOT HAVE_HDF5_ZLIB) - MESSAGE(FATAL_ERROR "HDF5 was built without zlib. Rebuild HDF5 with zlib.") - ELSE() - # If user has specified the `ZLIB_LIBRARY`, use it; otherwise try to find... - IF(NOT ZLIB_LIBRARY) - find_package(ZLIB) - IF(ZLIB_FOUND) - SET(ZLIB_LIBRARY ${ZLIB_LIBRARIES}) - ELSE() - MESSAGE(FATAL_ERROR "HDF5 Requires ZLIB, but cannot find libz.") - ENDIF() - ENDIF() - SET(CMAKE_REQUIRED_LIBRARIES ${ZLIB_LIBRARY} ${CMAKE_REQUIRED_LIBRARIES}) - MESSAGE(STATUS "HDF5 has zlib.") - ENDIF() - - #Check to see if H5Z_SZIP exists in HDF5_Libraries. If so, we must use szip library. - CHECK_C_SOURCE_COMPILES("#include - #if !H5_HAVE_FILTER_SZIP - #error - #endif - int main() { - int x = 1;}" USE_HDF5_SZIP) - IF(USE_HDF5_SZIP) - SET(HAVE_H5Z_SZIP yes) - ENDIF() - - #### - # Check to see if HDF5 library is 1.10.6 or greater. - # Used to control path name conversion - #### - IF(${HDF5_VERSION} VERSION_GREATER "1.10.5") - SET(HDF5_UTF8_PATHS ON) - ELSE() - SET(HDF5_UTF8_PATHS OFF) - ENDIF() - - MESSAGE("-- HDF5_UTF8_PATHS (HDF5 version 1.10.6+): ${HDF5_UTF8_PATHS}") - - # Find out if HDF5 was built with parallel support. - # Do that by checking for the targets H5Pget_fapl_mpiposx and - # H5Pget_fapl_mpio in ${HDF5_LIB}. - - # H5Pset_fapl_mpiposix and H5Pget_fapl_mpiposix have been removed since HDF5 1.8.12. - # Use H5Pset_fapl_mpio and H5Pget_fapl_mpio, instead. - # CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pget_fapl_mpiposix "" HDF5_IS_PARALLEL_MPIPOSIX) - - CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pget_fapl_mpio "" HDF5_IS_PARALLEL_MPIO) - IF(HDF5_IS_PARALLEL_MPIO) - SET(HDF5_PARALLEL ON) - ELSE() - SET(HDF5_PARALLEL OFF) - ENDIF() - - #Check to see if HDF5 library has collective metadata APIs, (HDF5 >= 1.10.0) - CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pset_all_coll_metadata_ops "" HDF5_HAS_COLL_METADATA_OPS) - - IF(HDF5_PARALLEL) - SET(HDF5_CC h5pcc) - ELSE() - SET(HDF5_CC h5cc) - ENDIF() - - # Check to see if H5Dread_chunk is available - CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Dread_chunk "" HAS_READCHUNKS) - - # Check to see if H5Pset_fapl_ros3 is available - CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pset_fapl_ros3 "" HAS_HDF5_ROS3) - - # Check to see if this is hdf5-1.10.3 or later. - IF(HAS_READCHUNKS) - SET(HDF5_SUPPORTS_PAR_FILTERS ON) - SET(ENABLE_NCDUMPCHUNKS ON) - ENDIF() - - # Record if ROS3 Driver is available - IF(HAS_HDF5_ROS3) - SET(ENABLE_HDF5_ROS3 ON) - ENDIF() - - IF (HDF5_SUPPORTS_PAR_FILTERS) - SET(HDF5_HAS_PAR_FILTERS TRUE CACHE BOOL "") - SET(HAS_PAR_FILTERS yes CACHE STRING "") - ELSE() - SET(HDF5_HAS_PAR_FILTERS FALSE CACHE BOOL "") - SET(HAS_PAR_FILTERS no CACHE STRING "") - ENDIF() - - FIND_PATH(HAVE_HDF5_H hdf5.h PATHS ${HDF5_INCLUDE_DIR} NO_DEFAULT_PATH) - IF(NOT HAVE_HDF5_H) - MESSAGE(FATAL_ERROR "Compiling a test with hdf5 failed. Either hdf5.h cannot be found, or the log messages should be checked for another reason.") - ELSE(NOT HAVE_HDF5_H) - INCLUDE_DIRECTORIES(${HAVE_HDF5_H}) - ENDIF(NOT HAVE_HDF5_H) - - #option to include HDF5 High Level header file (hdf5_hl.h) in case we are not doing a make install - INCLUDE_DIRECTORIES(${HDF5_HL_INCLUDE_DIR}) - -ENDIF(USE_HDF5) - -# See if we have libcurl -FIND_PACKAGE(CURL) -ADD_DEFINITIONS(-DCURL_STATICLIB=1) -INCLUDE_DIRECTORIES(${CURL_INCLUDE_DIRS}) - -# Define a test flag for have curl library -IF(CURL_LIBRARIES OR CURL_LIBRARY) - SET(FOUND_CURL TRUE) -ELSE() - SET(FOUND_CURL FALSE) -ENDIF() - -# Start disabling if curl not found -IF(NOT FOUND_CURL) - MESSAGE(WARNING "ENABLE_REMOTE_FUNCTIONALITY requires libcurl; disabling") - SET(ENABLE_REMOTE_FUNCTIONALITY OFF CACHE BOOL "ENABLE_REMOTE_FUNCTIONALITY requires libcurl; disabling" FORCE) -ENDIF() - -set (CMAKE_REQUIRED_INCLUDES ${CURL_INCLUDE_DIRS}) -# Check to see if we have libcurl 7.66 or later -CHECK_C_SOURCE_COMPILES(" -#include -int main() { -#if LIBCURL_VERSION_NUM < 0x074200 - choke me; -#endif -}" HAVE_LIBCURL_766) - -IF (HAVE_LIBCURL_766) - # If libcurl version is >= 7.66, then can skip tests - # for these symbols which were added in an earlier version - set(HAVE_CURLOPT_USERNAME TRUE) - set(HAVE_CURLOPT_PASSWORD TRUE) - set(HAVE_CURLOPT_KEYPASSWD TRUE) - set(HAVE_CURLINFO_RESPONSE_CODE TRUE) - set(HAVE_CURLINFO_HTTP_CONNECTCODE TRUE) - set(HAVE_CURLOPT_BUFFERSIZE TRUE) - set(HAVE_CURLOPT_KEEPALIVE TRUE) -ELSE() - # Check to see if CURLOPT_USERNAME is defined. - # It is present starting version 7.19.1. - CHECK_C_SOURCE_COMPILES(" - #include - int main() {int x = CURLOPT_USERNAME;}" HAVE_CURLOPT_USERNAME) - - # Check to see if CURLOPT_PASSWORD is defined. - # It is present starting version 7.19.1. - CHECK_C_SOURCE_COMPILES(" - #include - int main() {int x = CURLOPT_PASSWORD;}" HAVE_CURLOPT_PASSWORD) - - # Check to see if CURLOPT_KEYPASSWD is defined. - # It is present starting version 7.16.4. - CHECK_C_SOURCE_COMPILES(" - #include - int main() {int x = CURLOPT_KEYPASSWD;}" HAVE_CURLOPT_KEYPASSWD) - - # Check to see if CURLINFO_RESPONSE_CODE is defined. - # It showed up in curl 7.10.7. - CHECK_C_SOURCE_COMPILES(" - #include - int main() {int x = CURLINFO_RESPONSE_CODE;}" HAVE_CURLINFO_RESPONSE_CODE) - - # Check to see if CURLINFO_HTTP_CONNECTCODE is defined. - # It showed up in curl 7.10.7. - CHECK_C_SOURCE_COMPILES(" - #include - int main() {int x = CURLINFO_HTTP_CONNECTCODE;}" HAVE_CURLINFO_HTTP_CONNECTCODE) - - # Check to see if CURLOPT_BUFFERSIZE is defined. - # It is present starting version 7.59 - CHECK_C_SOURCE_COMPILES(" - #include - int main() {int x = CURLOPT_BUFFERSIZE;}" HAVE_CURLOPT_BUFFERSIZE) - - # Check to see if CURLOPT_TCP_KEEPALIVE is defined. - # It is present starting version 7.25 - CHECK_C_SOURCE_COMPILES(" - #include - int main() {int x = CURLOPT_TCP_KEEPALIVE;}" HAVE_CURLOPT_KEEPALIVE) -ENDIF() IF(ENABLE_DAP) SET(USE_DAP ON CACHE BOOL "") @@ -1072,22 +589,6 @@ MESSAGE(WARNING "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_BYTERANGE=NO") SET(ENABLE_BYTERANGE OFF CACHE BOOL "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_BYTERANGE=NO" FORCE) ENDIF() - -# Check for the math library so it can be explicitly linked. -IF(NOT WIN32) - FIND_LIBRARY(HAVE_LIBM NAMES math m libm) - IF(NOT HAVE_LIBM) - CHECK_FUNCTION_EXISTS(exp HAVE_LIBM_FUNC) - IF(NOT HAVE_LIBM_FUNC) - MESSAGE(FATAL_ERROR "Unable to find the math library.") - ELSE(NOT HAVE_LIBM_FUNC) - SET(HAVE_LIBM "") - ENDIF() - ELSE(NOT HAVE_LIBM) - MESSAGE(STATUS "Found Math library: ${HAVE_LIBM}") - ENDIF() -ENDIF() - # Option to Enable DAP long tests, remote tests. OPTION(ENABLE_DAP_REMOTE_TESTS "Enable DAP remote tests." ON) OPTION(ENABLE_EXTERNAL_SERVER_TESTS "Enable external Server remote tests." OFF) @@ -1102,16 +603,6 @@ ENDIF() SET(REMOTETESTSERVERS "remotetest.unidata.ucar.edu" CACHE STRING "test servers to use for remote test") SET(REMOTETESTSERVERS "remotetest.unidata.ucar.edu" CACHE STRING "test servers to use for remote test") -# See if we have zlib -FIND_PACKAGE(ZLIB) - -# Define a test flag for have zlib library -IF(ZLIB_FOUND) - INCLUDE_DIRECTORIES(${ZLIB_INCLUDE_DIRS}) - SET(ENABLE_ZLIB TRUE) -ELSE() - SET(ENABLE_ZLIB FALSE) -ENDIF() macro(set_std_filter filter) # Upper case the filter name @@ -1139,39 +630,6 @@ OPTION(ENABLE_FILTER_SZIP "Enable use of Szip compression library if it is avai OPTION(ENABLE_FILTER_BZ2 "Enable use of Bz2 compression library if it is available." ON) OPTION(ENABLE_FILTER_BLOSC "Enable use of blosc compression library if it is available." ON) OPTION(ENABLE_FILTER_ZSTD "Enable use of Zstd compression library if it is available." ON) -IF (ENABLE_FILTER_SZIP) - FIND_PACKAGE(Szip) -ELSEIF(ENABLE_NCZARR) - FIND_PACKAGE(Szip) -ENDIF() -IF (ENABLE_FILTER_BZ2) - FIND_PACKAGE(Bz2) -ENDIF() -IF (ENABLE_FILTER_BLOSC) - FIND_PACKAGE(Blosc) -ENDIF() -IF (ENABLE_FILTER_ZSTD) - FIND_PACKAGE(Zstd) -ENDIF() - -# Accumulate standard filters -set(STD_FILTERS "deflate") # Always have deflate*/ -set_std_filter(Szip) -SET(HAVE_SZ ${Szip_FOUND}) -set_std_filter(Blosc) -IF(Zstd_FOUND) - set_std_filter(Zstd) - SET(HAVE_ZSTD ON) -ENDIF() -IF(Bz2_FOUND) - set_std_filter(Bz2) -ELSE() - # The reason we use a local version is to support a more comples test case - MESSAGE("libbz2 not found using built-in version") - SET(HAVE_LOCAL_BZ2 ON) - SET(HAVE_BZ2 ON CACHE BOOL "") - set(STD_FILTERS "${STD_FILTERS} bz2") -ENDIF() # If user wants, then install selected plugins (default on) SET(PLUGIN_INSTALL_DIR "NO" CACHE STRING "Whether and where we should install plugins; defaults to yes") @@ -1237,10 +695,6 @@ ENDIF() # Try to enable NCZarr zip support OPTION(ENABLE_NCZARR_ZIP "Enable NCZarr ZIP support." OFF) -IF (ENABLE_NCZARR_ZIP) - FIND_PACKAGE(Zip REQUIRED) - INCLUDE_DIRECTORIES(${Zip_INCLUDE_DIRS}) -ENDIF () # libdl is always available; built-in in Windows and OSX OPTION(ENABLE_PLUGINS "Enable dynamically loaded plugins (default on)." ON) @@ -1326,36 +780,6 @@ IF(NOT ENABLE_REMOTE_FUNCTIONALITY) SET(WITH_S3_TESTING OFF CACHE STRING "" FORCE) ENDIF() -# Note we check for the library after checking for enable_s3 -# because for some reason this screws up if we unconditionally test for sdk -# and it is not available. Fix someday -IF(ENABLE_S3) - IF(NOT ENABLE_S3_INTERNAL) - # See if aws-s3-sdk is available - find_package(AWSSDK REQUIRED COMPONENTS s3;transfer) - IF(AWSSDK_FOUND) - SET(ENABLE_S3_AWS ON CACHE BOOL "S3 AWS" FORCE) - INCLUDE_DIRECTORIES(${AWSSDK_INCLUDE_DIR}) - ELSE(AWSSDK_FOUND) - SET(ENABLE_S3_AWS OFF CACHE BOOL "S3 AWS" FORCE) - ENDIF(AWSSDK_FOUND) - ELSE(NOT ENABLE_S3_INTERNAL) - # Find crypto libraries required with testing with the internal s3 api. - #FIND_LIBRARY(SSL_LIB NAMES ssl openssl) - find_package(OpenSSL REQUIRED) - IF(NOT OpenSSL_FOUND) - MESSAGE(FATAL_ERROR "Can't find an ssl library, required by S3_INTERNAL") - ENDIF(NOT OpenSSL_FOUND) - - #find_package(Crypto REQUIRED) - #IF(NOT CRYPTO_LIB) - # MESSAGE(FATAL_ERROR "Can't find a crypto library, required by S3_INTERNAL") - #ENDIF(NOT CRYPTO_LIB) - - ENDIF(NOT ENABLE_S3_INTERNAL) -ELSE() - SET(ENABLE_S3_AWS OFF CACHE BOOL "S3 AWS" FORCE) -ENDIF() IF(ENABLE_S3) IF(NOT ENABLE_S3_AWS AND NOT ENABLE_S3_INTERNAL) @@ -1378,19 +802,6 @@ ENDIF() OPTION(ENABLE_LIBXML2 "Link against libxml2 if it is available, use the packaged tinyxml2 parser otherwise." ON) SET(XMLPARSER "tinyxml2 (bundled)") -# see if we have libxml2 - -IF(ENABLE_LIBXML2) - find_package(LibXml2) - IF(LibXml2_FOUND) - SET(HAVE_LIBXML2 TRUE) - INCLUDE_DIRECTORIES(${LIBXML2_INCLUDE_DIRS}) - SET(XMLPARSER "libxml2") - ELSE() - SET(HAVE_LIBXML2 FALSE) - ENDIF() -ENDIF(ENABLE_LIBXML2) - IF(NOT ENABLE_BYTERANGE AND ENABLE_HDF5_ROS3) MESSAGE(WARNING "ROS3 support requires ENABLE_BYTERANGE=TRUE; disabling ROS3 support") SET(ENABLE_HDF5_ROS3 OFF CACHE BOOL "ROS3 support" FORCE) @@ -1515,7 +926,6 @@ IF(ENABLE_PARALLEL4 AND ENABLE_HDF5) SET(USE_PARALLEL OFF CACHE BOOL "") MESSAGE(STATUS "Cannot find HDF5 library built with parallel support. Disabling parallel build.") ELSE() - FIND_PACKAGE(MPI REQUIRED) SET(HDF5_PARALLEL ON CACHE BOOL "") SET(USE_PARALLEL ON CACHE BOOL "") SET(USE_PARALLEL4 ON CACHE BOOL "") @@ -1540,8 +950,6 @@ IF(ENABLE_PNETCDF) # Check for ncmpi_create in libpnetcdf, define USE_PNETCDF # Does the user want to turn on PnetCDF read ability? SET(USE_PNETCDF ON CACHE BOOL "") - FIND_LIBRARY(PNETCDF NAMES pnetcdf) - FIND_PATH(PNETCDF_INCLUDE_DIR pnetcdf.h) IF(NOT PNETCDF) MESSAGE(STATUS "Cannot find PnetCDF library. Disabling PnetCDF support.") SET(USE_PNETCDF OFF CACHE BOOL "") @@ -1695,7 +1103,6 @@ SET(ENABLE_CLIENTSIDE_FILTERS OFF) # Determine whether or not to generate documentation. OPTION(ENABLE_DOXYGEN "Enable generation of doxygen-based documentation." OFF) IF(ENABLE_DOXYGEN) - FIND_PACKAGE(Doxygen REQUIRED) # Offer the option to build internal documentation. OPTION(ENABLE_INTERNAL_DOCS "Build internal documentation. This is of interest to developers only." OFF) IF(ENABLE_INTERNAL_DOCS) diff --git a/cmake/dependencies.cmake b/cmake/dependencies.cmake new file mode 100644 index 0000000000..3616331a5c --- /dev/null +++ b/cmake/dependencies.cmake @@ -0,0 +1,652 @@ +################################ +# PkgConfig +################################ +FIND_PACKAGE(PkgConfig QUIET) + +################################ +# MakeDist +################################ +# Enable 'dist and distcheck'. +# File adapted from http://ensc.de/cmake/FindMakeDist.cmake +FIND_PACKAGE(MakeDist) +# End 'enable dist and distcheck' + +################################ +# HDF4 +################################ +IF(ENABLE_HDF4) + SET(USE_HDF4 ON ) + # Check for include files, libraries. + + FIND_PATH(MFHDF_H_INCLUDE_DIR mfhdf.h) + IF(NOT MFHDF_H_INCLUDE_DIR) + MESSAGE(FATAL_ERROR "HDF4 Support specified, cannot find file mfhdf.h") + ELSE() + INCLUDE_DIRECTORIES(${MFHDF_H_INCLUDE_DIR}) + ENDIF() + + FIND_LIBRARY(HDF4_DF_LIB NAMES df libdf hdf) + IF(NOT HDF4_DF_LIB) + MESSAGE(FATAL_ERROR "Can't find or link to the hdf4 df library.") + ENDIF() + + FIND_LIBRARY(HDF4_MFHDF_LIB NAMES mfhdf libmfhdf) + IF(NOT HDF4_MFHDF_LIB) + MESSAGE(FATAL_ERROR "Can't find or link to the hdf4 mfhdf library.") + ENDIF() + + SET(HAVE_LIBMFHDF TRUE ) + + SET(HDF4_LIBRARIES ${HDF4_DF_LIB} ${HDF4_MFHDF_LIB} ) + # End include files, libraries. + MESSAGE(STATUS "HDF4 libraries: ${HDF4_DF_LIB}, ${HDF4_MFHDF_LIB}") + + MESSAGE(STATUS "Seeking HDF4 jpeg dependency.") + + # Look for the jpeglib.h header file. + FIND_PATH(JPEGLIB_H_INCLUDE_DIR jpeglib.h) + IF(NOT JPEGLIB_H_INCLUDE_DIR) + MESSAGE(FATAL_ERROR "HDF4 Support enabled but cannot find jpeglib.h") + ELSE() + SET(HAVE_JPEGLIB_H ON CACHE BOOL "") + SET(HAVE_LIBJPEG TRUE ) + INCLUDE_DIRECTORIES(${JPEGLIB_H_INCLUDE_DIR}) + ENDIF() + + FIND_LIBRARY(JPEG_LIB NAMES jpeg libjpeg) + IF(NOT JPEG_LIB) + MESSAGE(FATAL_ERROR "HDF4 Support enabled but cannot find libjpeg") + ENDIF() + SET(HDF4_LIBRARIES ${JPEG_LIB} ${HDF4_LIBRARIES} ) + MESSAGE(STATUS "Found JPEG libraries: ${JPEG_LIB}") + + # Option to enable HDF4 file tests. + OPTION(ENABLE_HDF4_FILE_TESTS "Run HDF4 file tests. This fetches sample HDF4 files from the Unidata ftp site to test with (requires curl)." ON) + IF(ENABLE_HDF4_FILE_TESTS) + FIND_PROGRAM(PROG_CURL NAMES curl) + IF(PROG_CURL) + SET(USE_HDF4_FILE_TESTS ON ) + ELSE() + MESSAGE(STATUS "Unable to locate 'curl'. Disabling hdf4 file tests.") + SET(USE_HDF4_FILE_TESTS OFF ) + ENDIF() + SET(USE_HDF4_FILE_TESTS ${USE_HDF4_FILE_TESTS} ) + ENDIF() +ENDIF() + +################################ +# HDF5 +################################ +## +# Option to Enable HDF5 +# +# The HDF5 cmake variables differ between platform (linux/osx and Windows), +# as well as between HDF5 versions. As a result, this section is a bit convoluted. +# +# Note that the behavior seems much more stable across HDF5 versions under linux, +# so we do not have to do as much version-based tweaking. +# +# At the end of it, we should have the following defined: +# +# * HDF5_C_LIBRARY +# * HDF5_HL_LIBRARY +# * HDF5_LIBRARIES +# * HDF5_INCLUDE_DIR +# * +## +IF(USE_HDF5) + + ## + # Assert HDF5 version meets minimum required version. + ## + SET(HDF5_VERSION_REQUIRED 1.8.10) + + + ## + # Accommodate developers who have hdf5 libraries and + # headers on their system, but do not have a the hdf + # .cmake files. If this is the case, they should + # specify HDF5_HL_LIBRARY, HDF5_LIBRARY, HDF5_INCLUDE_DIR manually. + # + # This script will attempt to determine the version of the HDF5 library programatically. + ## + IF(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR) + SET(HDF5_LIBRARIES ${HDF5_C_LIBRARY} ${HDF5_HL_LIBRARY} ) + SET(HDF5_C_LIBRARIES ${HDF5_C_LIBRARY} ) + SET(HDF5_C_LIBRARY_hdf5 ${HDF5_C_LIBRARY} ) + SET(HDF5_HL_LIBRARIES ${HDF5_HL_LIBRARY} ) + INCLUDE_DIRECTORIES(${HDF5_INCLUDE_DIR}) + MESSAGE(STATUS "Using HDF5 C Library: ${HDF5_C_LIBRARY}") + MESSAGE(STATUS "Using HDF5 HL LIbrary: ${HDF5_HL_LIBRARY}") + if (EXISTS "${HDF5_INCLUDE_DIR}/H5pubconf.h") + file(READ "${HDF5_INCLUDE_DIR}/H5pubconf.h" _hdf5_version_lines + REGEX "#define[ \t]+H5_VERSION") + string(REGEX REPLACE ".*H5_VERSION .*\"\(.*\)\".*" "\\1" _hdf5_version "${_hdf5_version_lines}") + set(HDF5_VERSION "${_hdf5_version}" CACHE STRING "") + set(HDF5_VERSION ${HDF5_VERSION} ) + unset(_hdf5_version) + unset(_hdf5_version_lines) + endif () + MESSAGE(STATUS "Found HDF5 libraries version ${HDF5_VERSION}") + ### + # If HDF5_VERSION is still empty, we have a problem. + # Error out. + ### + IF("${HDF5_VERSION}" STREQUAL "") + MESSAGE(FATAL_ERR "Unable to determine HDF5 version. NetCDF requires at least version ${HDF5_VERSION_REQUIRED}. Please ensure that libhdf5 is installed and accessible.") + ENDIF() + + ### + # Now that we know HDF5_VERSION isn't empty, we can check for minimum required version, + # and toggle various options. + ### + IF(${HDF5_VERSION} VERSION_LESS ${HDF5_VERSION_REQUIRED}) + MESSAGE(FATAL_ERROR "netCDF requires at least HDF5 ${HDF5_VERSION_REQUIRED}. Found ${HDF5_VERSION}.") + ENDIF() + + ELSE(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR) # We are seeking out HDF5 with Find Package. + ### + # For now we assume that if we are building netcdf + # as a shared library, we will use hdf5 as a shared + # library. If we are building netcdf statically, + # we will use a static library. This can be toggled + # by explicitly modifying NC_FIND_SHARED_LIBS. + ## + IF(NC_FIND_SHARED_LIBS) + SET(NC_HDF5_LINK_TYPE "shared") + SET(NC_HDF5_LINK_TYPE_UPPER "SHARED") + ADD_DEFINITIONS(-DH5_BUILT_AS_DYNAMIC_LIB) + ELSE(NC_FIND_SHARED_LIBS) + SET(NC_HDF5_LINK_TYPE "static") + SET(NC_HDF5_LINK_TYPE_UPPER "STATIC") + ADD_DEFINITIONS(-DH5_BUILT_AS_STATIC_LIB ) + ENDIF(NC_FIND_SHARED_LIBS) + + ##### + # First, find the C and HL libraries. + # + # This has been updated to reflect what is in the hdf5 + # examples, even though the previous version of what we + # had worked. + ##### + IF(MSVC) + SET(SEARCH_PACKAGE_NAME ${HDF5_PACKAGE_NAME}) + FIND_PACKAGE(HDF5 NAMES ${SEARCH_PACKAGE_NAME} COMPONENTS C HL CONFIG REQUIRED ${NC_HDF5_LINK_TYPE}) + ELSE(MSVC) + FIND_PACKAGE(HDF5 COMPONENTS C HL REQUIRED) + ENDIF(MSVC) + + ## + # Next, check the HDF5 version. This will inform which + # HDF5 variables we need to munge. + ## + + # Some versions of HDF5 set HDF5_VERSION_STRING instead of HDF5_VERSION + IF(HDF5_VERSION_STRING AND NOT HDF5_VERSION) + SET(HDF5_VERSION ${HDF5_VERSION_STRING}) + ENDIF() + + + ### + # If HDF5_VERSION is undefined, attempt to determine it programatically. + ### + IF("${HDF5_VERSION}" STREQUAL "") + MESSAGE(STATUS "HDF5_VERSION not detected. Attempting to determine programatically.") + IF (EXISTS "${HDF5_INCLUDE_DIR}/H5pubconf.h") + file(READ "${HDF5_INCLUDE_DIR}/H5pubconf.h" _hdf5_version_lines + REGEX "#define[ \t]+H5_VERSION") + string(REGEX REPLACE ".*H5_VERSION .*\"\(.*\)\".*" "\\1" _hdf5_version "${_hdf5_version_lines}") + set(HDF5_VERSION "${_hdf5_version}" CACHE STRING "") + unset(_hdf5_version) + unset(_hdf5_version_lines) + MESSAGE(STATUS "Found HDF5 libraries version ${HDF5_VERSION}") + ENDIF() + ELSE() + SET(HDF5_VERSION ${HDF5_VERSION} CACHE STRING "") + ENDIF() + + ### + # If HDF5_VERSION is still empty, we have a problem. + # Error out. + ### + IF("${HDF5_VERSION}" STREQUAL "") + MESSAGE(FATAL_ERR "Unable to determine HDF5 version. NetCDF requires at least version ${HDF5_VERSION_REQUIRED}. Please ensure that libhdf5 is installed and accessible.") + ENDIF() + + ### + # Now that we know HDF5_VERSION isn't empty, we can check for minimum required version, + # and toggle various options. + ### + + IF(${HDF5_VERSION} VERSION_LESS ${HDF5_VERSION_REQUIRED}) + MESSAGE(FATAL_ERROR "netCDF requires at least HDF5 ${HDF5_VERSION_REQUIRED}. Found ${HDF5_VERSION}.") + ENDIF() + + + + ## + # Include the HDF5 include directory. + ## + IF(HDF5_INCLUDE_DIRS AND NOT HDF5_INCLUDE_DIR) + SET(HDF5_INCLUDE_DIR ${HDF5_INCLUDE_DIRS} ) + ENDIF() + MESSAGE(STATUS "Using HDF5 include dir: ${HDF5_INCLUDE_DIR}") + INCLUDE_DIRECTORIES(${HDF5_INCLUDE_DIR}) + + ### + # This is the block where we figure out what the appropriate + # variables are, and we ensure that we end up with + # HDF5_C_LIBRARY, HDF5_HL_LIBRARY and HDF5_LIBRARIES. + ### + IF(MSVC) + #### + # Environmental variables in Windows when using MSVC + # are a hot mess between versions. + #### + + ## + # HDF5 1.8.15 defined HDF5_LIBRARIES. + ## + IF(${HDF5_VERSION} VERSION_LESS "1.8.16") + SET(HDF5_C_LIBRARY hdf5 ) + SET(HDF5_C_LIBRARY_hdf5 hdf5 ) + ENDIF(${HDF5_VERSION} VERSION_LESS "1.8.16") + + IF(${HDF5_VERSION} VERSION_GREATER "1.8.15") + IF(NOT HDF5_LIBRARIES AND HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY AND HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY) + SET(HDF5_C_LIBRARY ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ) + SET(HDF5_C_LIBRARY_hdf5 ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ) + SET(HDF5_HL_LIBRARY ${HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ) + + SET(HDF5_LIBRARIES ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ${HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ) + ENDIF() + ENDIF(${HDF5_VERSION} VERSION_GREATER "1.8.15") + + ELSE(MSVC) + + # Depending on the install, either HDF5_hdf_library or + # HDF5_C_LIBRARIES may be defined. We must check for either. + IF(HDF5_C_LIBRARIES AND NOT HDF5_hdf5_LIBRARY) + SET(HDF5_hdf5_LIBRARY ${HDF5_C_LIBRARIES} ) + ENDIF() + + # Some versions of FIND_PACKAGE set HDF5_C_LIBRARIES, but not HDF5_C_LIBRARY + # We use HDF5_C_LIBRARY below, so need to make sure it is set. + IF(HDF5_C_LIBRARIES AND NOT HDF5_C_LIBRARY) + SET(HDF5_C_LIBRARY ${HDF5_C_LIBRARIES} ) + ENDIF() + + # Same issue as above... + IF(HDF5_HL_LIBRARIES AND NOT HDF5_HL_LIBRARY) + SET(HDF5_HL_LIBRARY ${HDF5_HL_LIBRARIES} ) + ENDIF() + + ENDIF(MSVC) + IF(NOT HDF5_C_LIBRARY) + SET(HDF5_C_LIBRARY hdf5 ) + ENDIF() + + ENDIF(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR) + + FIND_PACKAGE(Threads) + + # There is a missing case in the above code so default it + IF(NOT HDF5_C_LIBRARY_hdf5 OR "${HDF5_C_LIBRARY_hdf5}" STREQUAL "" ) + SET(HDF5_C_LIBRARY_hdf5 "${HDF5_C_LIBRARY}" ) + ENDIF() + + FIND_PATH(HAVE_HDF5_H hdf5.h PATHS ${HDF5_INCLUDE_DIR} NO_DEFAULT_PATH) + IF(NOT HAVE_HDF5_H) + MESSAGE(FATAL_ERROR "Compiling a test with hdf5 failed. Either hdf5.h cannot be found, or the log messages should be checked for another reason.") + ELSE(NOT HAVE_HDF5_H) + INCLUDE_DIRECTORIES(${HAVE_HDF5_H}) + ENDIF(NOT HAVE_HDF5_H) + + set (CMAKE_REQUIRED_INCLUDES ${HDF5_INCLUDE_DIR}) + + # Check to ensure that HDF5 was built with zlib. + # This needs to be near the beginning since we + # need to know whether to add "-lz" to the symbol + # tests below. + CHECK_C_SOURCE_COMPILES("#include + #if !H5_HAVE_ZLIB_H + #error + #endif + int main() { + int x = 1;}" HAVE_HDF5_ZLIB) + IF(NOT HAVE_HDF5_ZLIB) + MESSAGE(FATAL_ERROR "HDF5 was built without zlib. Rebuild HDF5 with zlib.") + ELSE() + # If user has specified the `ZLIB_LIBRARY`, use it; otherwise try to find... + IF(NOT ZLIB_LIBRARY) + find_package(ZLIB) + IF(ZLIB_FOUND) + SET(ZLIB_LIBRARY ${ZLIB_LIBRARIES} ) + ELSE() + MESSAGE(FATAL_ERROR "HDF5 Requires ZLIB, but cannot find libz.") + ENDIF() + ENDIF() + SET(CMAKE_REQUIRED_LIBRARIES ${ZLIB_LIBRARY} ${CMAKE_REQUIRED_LIBRARIES} ) + MESSAGE(STATUS "HDF5 has zlib.") + ENDIF() + + #Check to see if H5Z_SZIP exists in HDF5_Libraries. If so, we must use szip library. + CHECK_C_SOURCE_COMPILES("#include + #if !H5_HAVE_FILTER_SZIP + #error + #endif + int main() { + int x = 1;}" USE_HDF5_SZIP) + IF(USE_HDF5_SZIP) + SET(HAVE_H5Z_SZIP yes ) + ENDIF() + + #### + # Check to see if HDF5 library is 1.10.6 or greater. + # Used to control path name conversion + #### + IF(${HDF5_VERSION} VERSION_GREATER "1.10.5") + SET(HDF5_UTF8_PATHS ON ) + ELSE() + SET(HDF5_UTF8_PATHS OFF ) + ENDIF() + + MESSAGE("-- HDF5_UTF8_PATHS (HDF5 version 1.10.6+): ${HDF5_UTF8_PATHS}") + + # Find out if HDF5 was built with parallel support. + # Do that by checking for the targets H5Pget_fapl_mpiposx and + # H5Pget_fapl_mpio in ${HDF5_LIB}. + + # H5Pset_fapl_mpiposix and H5Pget_fapl_mpiposix have been removed since HDF5 1.8.12. + # Use H5Pset_fapl_mpio and H5Pget_fapl_mpio, instead. + # CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pget_fapl_mpiposix "" HDF5_IS_PARALLEL_MPIPOSIX) + + CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pget_fapl_mpio "" HDF5_IS_PARALLEL_MPIO) + IF(HDF5_IS_PARALLEL_MPIO) + SET(HDF5_PARALLEL ON) + ELSE() + SET(HDF5_PARALLEL OFF) + ENDIF() + SET(HDF5_PARALLEL ${HDF5_PARALLEL} ) + + #Check to see if HDF5 library has collective metadata APIs, (HDF5 >= 1.10.0) + CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pset_all_coll_metadata_ops "" HDF5_HAS_COLL_METADATA_OPS) + + IF(HDF5_PARALLEL) + SET(HDF5_CC h5pcc ) + ELSE() + SET(HDF5_CC h5cc ) + ENDIF() + + # Check to see if H5Dread_chunk is available + CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Dread_chunk "" HAS_READCHUNKS) + + # Check to see if H5Pset_fapl_ros3 is available + CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pset_fapl_ros3 "" HAS_HDF5_ROS3) + + # Check to see if this is hdf5-1.10.3 or later. + IF(HAS_READCHUNKS) + SET(HDF5_SUPPORTS_PAR_FILTERS ON ) + SET(ENABLE_NCDUMPCHUNKS ON ) + ENDIF() + + # Record if ROS3 Driver is available + IF(HAS_HDF5_ROS3) + SET(ENABLE_HDF5_ROS3 ON ) + ENDIF() + + IF (HDF5_SUPPORTS_PAR_FILTERS) + SET(HDF5_HAS_PAR_FILTERS TRUE CACHE BOOL "" ) + SET(HAS_PAR_FILTERS yes CACHE STRING "" ) + ELSE() + SET(HDF5_HAS_PAR_FILTERS FALSE CACHE BOOL "" ) + SET(HAS_PAR_FILTERS no CACHE STRING "" ) + ENDIF() + + FIND_PATH(HAVE_HDF5_H hdf5.h PATHS ${HDF5_INCLUDE_DIR} NO_DEFAULT_PATH) + IF(NOT HAVE_HDF5_H) + MESSAGE(FATAL_ERROR "Compiling a test with hdf5 failed. Either hdf5.h cannot be found, or the log messages should be checked for another reason.") + ELSE(NOT HAVE_HDF5_H) + INCLUDE_DIRECTORIES(${HAVE_HDF5_H}) + ENDIF(NOT HAVE_HDF5_H) + + #option to include HDF5 High Level header file (hdf5_hl.h) in case we are not doing a make install + INCLUDE_DIRECTORIES(${HDF5_HL_INCLUDE_DIR}) + +ENDIF(USE_HDF5) + +################################ +# Curl +################################ +# See if we have libcurl +FIND_PACKAGE(CURL) +ADD_DEFINITIONS(-DCURL_STATICLIB=1) +INCLUDE_DIRECTORIES(${CURL_INCLUDE_DIRS}) + +# Define a test flag for have curl library +IF(CURL_LIBRARIES OR CURL_LIBRARY) + SET(FOUND_CURL TRUE) +ELSE() + SET(FOUND_CURL FALSE) +ENDIF() +SET(FOUND_CURL ${FOUND_CURL} TRUE ) + +# Start disabling if curl not found +IF(NOT FOUND_CURL) + MESSAGE(WARNING "ENABLE_REMOTE_FUNCTIONALITY requires libcurl; disabling") + SET(ENABLE_REMOTE_FUNCTIONALITY OFF CACHE BOOL "ENABLE_REMOTE_FUNCTIONALITY requires libcurl; disabling" FORCE ) +ENDIF() + +set (CMAKE_REQUIRED_INCLUDES ${CURL_INCLUDE_DIRS}) +# Check to see if we have libcurl 7.66 or later +CHECK_C_SOURCE_COMPILES(" +#include +int main() { +#if LIBCURL_VERSION_NUM < 0x074200 + choke me; +#endif +}" HAVE_LIBCURL_766) + +IF (HAVE_LIBCURL_766) + # If libcurl version is >= 7.66, then can skip tests + # for these symbols which were added in an earlier version + set(HAVE_CURLOPT_USERNAME TRUE) + set(HAVE_CURLOPT_PASSWORD TRUE) + set(HAVE_CURLOPT_KEYPASSWD TRUE) + set(HAVE_CURLINFO_RESPONSE_CODE TRUE) + set(HAVE_CURLINFO_HTTP_CONNECTCODE TRUE) + set(HAVE_CURLOPT_BUFFERSIZE TRUE) + set(HAVE_CURLOPT_KEEPALIVE TRUE) +ELSE() + # Check to see if CURLOPT_USERNAME is defined. + # It is present starting version 7.19.1. + CHECK_C_SOURCE_COMPILES(" + #include + int main() {int x = CURLOPT_USERNAME;}" HAVE_CURLOPT_USERNAME) + + # Check to see if CURLOPT_PASSWORD is defined. + # It is present starting version 7.19.1. + CHECK_C_SOURCE_COMPILES(" + #include + int main() {int x = CURLOPT_PASSWORD;}" HAVE_CURLOPT_PASSWORD) + + # Check to see if CURLOPT_KEYPASSWD is defined. + # It is present starting version 7.16.4. + CHECK_C_SOURCE_COMPILES(" + #include + int main() {int x = CURLOPT_KEYPASSWD;}" HAVE_CURLOPT_KEYPASSWD) + + # Check to see if CURLINFO_RESPONSE_CODE is defined. + # It showed up in curl 7.10.7. + CHECK_C_SOURCE_COMPILES(" + #include + int main() {int x = CURLINFO_RESPONSE_CODE;}" HAVE_CURLINFO_RESPONSE_CODE) + + # Check to see if CURLINFO_HTTP_CONNECTCODE is defined. + # It showed up in curl 7.10.7. + CHECK_C_SOURCE_COMPILES(" + #include + int main() {int x = CURLINFO_HTTP_CONNECTCODE;}" HAVE_CURLINFO_HTTP_CONNECTCODE) + + # Check to see if CURLOPT_BUFFERSIZE is defined. + # It is present starting version 7.59 + CHECK_C_SOURCE_COMPILES(" + #include + int main() {int x = CURLOPT_BUFFERSIZE;}" HAVE_CURLOPT_BUFFERSIZE) + + # Check to see if CURLOPT_TCP_KEEPALIVE is defined. + # It is present starting version 7.25 + CHECK_C_SOURCE_COMPILES(" + #include + int main() {int x = CURLOPT_TCP_KEEPALIVE;}" HAVE_CURLOPT_KEEPALIVE) +ENDIF() + +################################ +# Math +################################ +# Check for the math library so it can be explicitly linked. +IF(NOT WIN32) + FIND_LIBRARY(HAVE_LIBM NAMES math m libm) + IF(NOT HAVE_LIBM) + CHECK_FUNCTION_EXISTS(exp HAVE_LIBM_FUNC) + IF(NOT HAVE_LIBM_FUNC) + MESSAGE(FATAL_ERROR "Unable to find the math library.") + ELSE(NOT HAVE_LIBM_FUNC) + SET(HAVE_LIBM "") + ENDIF() + ELSE(NOT HAVE_LIBM) + MESSAGE(STATUS "Found Math library: ${HAVE_LIBM}") + ENDIF() +ENDIF() + +################################ +# zlib +################################ +# See if we have zlib +FIND_PACKAGE(ZLIB) + +# Define a test flag for have zlib library +IF(ZLIB_FOUND) + INCLUDE_DIRECTORIES(${ZLIB_INCLUDE_DIRS}) + SET(ENABLE_ZLIB TRUE) +ELSE() + SET(ENABLE_ZLIB FALSE) +ENDIF() + +################################ +# Zips +################################ +IF (ENABLE_FILTER_SZIP) + FIND_PACKAGE(Szip) +ELSEIF(ENABLE_NCZARR) + FIND_PACKAGE(Szip) +ENDIF() +IF (ENABLE_FILTER_BZ2) + FIND_PACKAGE(Bz2) +ENDIF() +IF (ENABLE_FILTER_BLOSC) + FIND_PACKAGE(Blosc) +ENDIF() +IF (ENABLE_FILTER_ZSTD) + FIND_PACKAGE(Zstd) +ENDIF() + +# Accumulate standard filters +set(STD_FILTERS "deflate") # Always have deflate*/ +set_std_filter(Szip) +SET(HAVE_SZ ${Szip_FOUND}) +set_std_filter(Blosc) +IF(Zstd_FOUND) + set_std_filter(Zstd) + SET(HAVE_ZSTD ON) +ENDIF() +IF(Bz2_FOUND) + set_std_filter(Bz2) +ELSE() + # The reason we use a local version is to support a more comples test case + MESSAGE("libbz2 not found using built-in version") + SET(HAVE_LOCAL_BZ2 ON) + SET(HAVE_BZ2 ON CACHE BOOL "") + set(STD_FILTERS "${STD_FILTERS} bz2") +ENDIF() + +IF (ENABLE_NCZARR_ZIP) + FIND_PACKAGE(Zip REQUIRED) + INCLUDE_DIRECTORIES(${Zip_INCLUDE_DIRS}) +ENDIF () + +################################ +# S3 +################################ +# Note we check for the library after checking for enable_s3 +# because for some reason this screws up if we unconditionally test for sdk +# and it is not available. Fix someday +IF(ENABLE_S3) + IF(NOT ENABLE_S3_INTERNAL) + # See if aws-s3-sdk is available + find_package(AWSSDK REQUIRED COMPONENTS s3;transfer) + IF(AWSSDK_FOUND) + SET(ENABLE_S3_AWS ON CACHE BOOL "S3 AWS" FORCE) + INCLUDE_DIRECTORIES(${AWSSDK_INCLUDE_DIR}) + ELSE(AWSSDK_FOUND) + SET(ENABLE_S3_AWS OFF CACHE BOOL "S3 AWS" FORCE) + ENDIF(AWSSDK_FOUND) + ELSE(NOT ENABLE_S3_INTERNAL) + # Find crypto libraries required with testing with the internal s3 api. + #FIND_LIBRARY(SSL_LIB NAMES ssl openssl) + find_package(OpenSSL REQUIRED) + IF(NOT OpenSSL_FOUND) + MESSAGE(FATAL_ERROR "Can't find an ssl library, required by S3_INTERNAL") + ENDIF(NOT OpenSSL_FOUND) + + #find_package(Crypto REQUIRED) + #IF(NOT CRYPTO_LIB) + # MESSAGE(FATAL_ERROR "Can't find a crypto library, required by S3_INTERNAL") + #ENDIF(NOT CRYPTO_LIB) + + ENDIF(NOT ENABLE_S3_INTERNAL) +ELSE() + SET(ENABLE_S3_AWS OFF CACHE BOOL "S3 AWS" FORCE) +ENDIF() + +################################ +# LibXML +################################ +# see if we have libxml2 +IF(ENABLE_LIBXML2) + find_package(LibXml2) + IF(LibXml2_FOUND) + SET(HAVE_LIBXML2 TRUE) + INCLUDE_DIRECTORIES(${LIBXML2_INCLUDE_DIRS}) + SET(XMLPARSER "libxml2") + ELSE() + SET(HAVE_LIBXML2 FALSE) + ENDIF() +ENDIF(ENABLE_LIBXML2) + +################################ +# MPI +################################ +OPTION(ENABLE_PARALLEL4 "Build netCDF-4 with parallel IO" "${HDF5_PARALLEL}") +IF(ENABLE_PARALLEL4 AND ENABLE_HDF5 AND HDF5_PARALLEL) + FIND_PACKAGE(MPI REQUIRED) +ENDIF() + +################################ +# parallel IO +################################ +IF(ENABLE_PNETCDF) + FIND_LIBRARY(PNETCDF NAMES pnetcdf) + FIND_PATH(PNETCDF_INCLUDE_DIR pnetcdf.h) + IF(NOT PNETCDF) + MESSAGE(STATUS "Cannot find PnetCDF library. Disabling PnetCDF support.") + SET(USE_PNETCDF OFF CACHE BOOL "") + ENDIF +ENDIF() + +################################ +# Doxygen +################################ +IF(ENABLE_DOXYGEN) + FIND_PACKAGE(Doxygen REQUIRED) +ENDIF() \ No newline at end of file From ca850af099d0410de7264a3ac0405462de8ed82e Mon Sep 17 00:00:00 2001 From: Kyle Shores Date: Wed, 10 Jan 2024 16:11:00 -0600 Subject: [PATCH 14/33] actually adding the dependencies file... --- CMakeLists.txt | 50 ++++++++++++----------------- cmake/dependencies.cmake | 7 ++-- cmake/netcdf_functions_macros.cmake | 20 ++++++++++++ 3 files changed, 45 insertions(+), 32 deletions(-) create mode 100644 cmake/netcdf_functions_macros.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt index 9d53ad0228..f6f74ed8a4 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -10,10 +10,16 @@ cmake_minimum_required(VERSION 3.12.0) #Project Name project(netCDF -LANGUAGES C CXX -HOMEPAGE_URL "https://www.unidata.ucar.edu/software/netcdf/" -DESCRIPTION "NetCDF is a set of software libraries and machine-independent data formats that support the creation, access, and sharing of array-oriented scientific data." + LANGUAGES C CXX + HOMEPAGE_URL "https://www.unidata.ucar.edu/software/netcdf/" + DESCRIPTION "NetCDF is a set of software libraries and machine-independent data formats that support the creation, access, and sharing of array-oriented scientific data." ) + +#Add custom CMake Module + +set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules/;${PROJECT_SOURCE_DIR}/cmake" + CACHE INTERNAL "Location of our custom CMake modules.") + set(PACKAGE "netCDF" CACHE STRING "") # Backport of built-in `PROJECT_IS_TOP_LEVEL` from CMake 3.21 @@ -114,10 +120,6 @@ IF(MSVC) ADD_COMPILE_OPTIONS("/utf-8") ENDIF() -#Add custom CMake Module -SET(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules/" - CACHE INTERNAL "Location of our custom CMake modules.") - # auto-configure style checks, other CMake modules. INCLUDE(CheckLibraryExists) INCLUDE(CheckIncludeFile) @@ -201,7 +203,6 @@ SET(EXTRA_DEPS "") # End Project Properties ################################ - ################################ # Set CTest Properties ################################ @@ -603,28 +604,6 @@ ENDIF() SET(REMOTETESTSERVERS "remotetest.unidata.ucar.edu" CACHE STRING "test servers to use for remote test") SET(REMOTETESTSERVERS "remotetest.unidata.ucar.edu" CACHE STRING "test servers to use for remote test") - -macro(set_std_filter filter) -# Upper case the filter name -string(TOUPPER "${filter}" upfilter) -string(TOLOWER "${filter}" downfilter) -if(ENABLE_FILTER_${upfilter}) -# Define a test flag for filter - IF(${filter}_FOUND) - INCLUDE_DIRECTORIES(${${filter}_INCLUDE_DIRS}) - SET(ENABLE_${upfilter} TRUE) - SET(HAVE_${upfilter} ON) - SET(STD_FILTERS "${STD_FILTERS} ${downfilter}") - MESSAGE(">>> Standard Filter: ${downfilter}") - ELSE() - SET(ENABLE_${upfilter} FALSE) - SET(HAVE_${upfilter} OFF) - ENDIF() -ELSE() - SET(HAVE_${upfilter} OFF) -ENDIF() -endmacro(set_std_filter) - # Locate some compressors OPTION(ENABLE_FILTER_SZIP "Enable use of Szip compression library if it is available. Required if ENABLE_NCZARR is true." ON) OPTION(ENABLE_FILTER_BZ2 "Enable use of Bz2 compression library if it is available." ON) @@ -1188,6 +1167,17 @@ MARK_AS_ADVANCED(ENABLE_DAP_REMOTE_TESTS ENABLE_DAP_LONG_TESTS USE_REMOTE_CDASH MARK_AS_ADVANCED(ENABLE_DOXYGEN_BUILD_RELEASE_DOCS DOXYGEN_ENABLE_TASKS ENABLE_DOXYGEN_SERVER_SIDE_SEARCH) MARK_AS_ADVANCED(ENABLE_SHARED_LIBRARY_VERSION) +################################## +# Dependencies +################################## + +include(cmake/dependencies.cmake) +message(STATUS "HDF5VERSION: ${HDF5_VERSION}") + +################################ +# End Dependencies +################################ + ################################ # Option checks ################################ diff --git a/cmake/dependencies.cmake b/cmake/dependencies.cmake index 3616331a5c..e08f296654 100644 --- a/cmake/dependencies.cmake +++ b/cmake/dependencies.cmake @@ -1,3 +1,5 @@ +include(netcdf_functions_macros) + ################################ # PkgConfig ################################ @@ -123,7 +125,7 @@ IF(USE_HDF5) REGEX "#define[ \t]+H5_VERSION") string(REGEX REPLACE ".*H5_VERSION .*\"\(.*\)\".*" "\\1" _hdf5_version "${_hdf5_version_lines}") set(HDF5_VERSION "${_hdf5_version}" CACHE STRING "") - set(HDF5_VERSION ${HDF5_VERSION} ) + set(HDF5_VERSION ${HDF5_VERSION} PARENT_SCOPE) unset(_hdf5_version) unset(_hdf5_version_lines) endif () @@ -132,6 +134,7 @@ IF(USE_HDF5) # If HDF5_VERSION is still empty, we have a problem. # Error out. ### + message(STATUS "Thing: ${HDF5_VERSION}") IF("${HDF5_VERSION}" STREQUAL "") MESSAGE(FATAL_ERR "Unable to determine HDF5 version. NetCDF requires at least version ${HDF5_VERSION_REQUIRED}. Please ensure that libhdf5 is installed and accessible.") ENDIF() @@ -641,7 +644,7 @@ IF(ENABLE_PNETCDF) IF(NOT PNETCDF) MESSAGE(STATUS "Cannot find PnetCDF library. Disabling PnetCDF support.") SET(USE_PNETCDF OFF CACHE BOOL "") - ENDIF + ENDIF() ENDIF() ################################ diff --git a/cmake/netcdf_functions_macros.cmake b/cmake/netcdf_functions_macros.cmake new file mode 100644 index 0000000000..9832d0bba4 --- /dev/null +++ b/cmake/netcdf_functions_macros.cmake @@ -0,0 +1,20 @@ +macro(set_std_filter filter) +# Upper case the filter name +string(TOUPPER "${filter}" upfilter) +string(TOLOWER "${filter}" downfilter) +if(ENABLE_FILTER_${upfilter}) +# Define a test flag for filter + IF(${filter}_FOUND) + INCLUDE_DIRECTORIES(${${filter}_INCLUDE_DIRS}) + SET(ENABLE_${upfilter} TRUE) + SET(HAVE_${upfilter} ON) + SET(STD_FILTERS "${STD_FILTERS} ${downfilter}") + MESSAGE(">>> Standard Filter: ${downfilter}") + ELSE() + SET(ENABLE_${upfilter} FALSE) + SET(HAVE_${upfilter} OFF) + ENDIF() +ELSE() + SET(HAVE_${upfilter} OFF) +ENDIF() +endmacro(set_std_filter) \ No newline at end of file From 330f911ecf2f935e0f2f546da4ee01ffb4a06b01 Mon Sep 17 00:00:00 2001 From: Kyle Shores Date: Wed, 10 Jan 2024 16:12:04 -0600 Subject: [PATCH 15/33] removing debug messages --- CMakeLists.txt | 1 - cmake/dependencies.cmake | 1 - 2 files changed, 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index f6f74ed8a4..de485bb228 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1172,7 +1172,6 @@ MARK_AS_ADVANCED(ENABLE_SHARED_LIBRARY_VERSION) ################################## include(cmake/dependencies.cmake) -message(STATUS "HDF5VERSION: ${HDF5_VERSION}") ################################ # End Dependencies diff --git a/cmake/dependencies.cmake b/cmake/dependencies.cmake index e08f296654..ce4f8b4e43 100644 --- a/cmake/dependencies.cmake +++ b/cmake/dependencies.cmake @@ -134,7 +134,6 @@ IF(USE_HDF5) # If HDF5_VERSION is still empty, we have a problem. # Error out. ### - message(STATUS "Thing: ${HDF5_VERSION}") IF("${HDF5_VERSION}" STREQUAL "") MESSAGE(FATAL_ERR "Unable to determine HDF5 version. NetCDF requires at least version ${HDF5_VERSION_REQUIRED}. Please ensure that libhdf5 is installed and accessible.") ENDIF() From 5537a608850002cedd98c3a2db48d4e7c843248f Mon Sep 17 00:00:00 2001 From: Julien Schueller Date: Fri, 12 Jan 2024 10:18:29 +0100 Subject: [PATCH 16/33] CMake: Add support for UNITY_BUILD --- libdap2/CMakeLists.txt | 4 ++++ libdap4/CMakeLists.txt | 4 ++++ libdispatch/CMakeLists.txt | 4 ++++ libsrc/CMakeLists.txt | 4 ++++ nc_test/CMakeLists.txt | 1 + ncdump/CMakeLists.txt | 4 ++++ ncgen/CMakeLists.txt | 4 ++++ nctest/CMakeLists.txt | 1 + 8 files changed, 26 insertions(+) diff --git a/libdap2/CMakeLists.txt b/libdap2/CMakeLists.txt index e58da10e63..eae80d0acd 100644 --- a/libdap2/CMakeLists.txt +++ b/libdap2/CMakeLists.txt @@ -6,6 +6,10 @@ # See netcdf-c/COPYRIGHT file for more info. SET(dap2_SOURCES constraints.c dapcvt.c dapodom.c daputil.c ncdaperr.c cdf.c cache.c dapdump.c dapdebug.c dapattr.c ncd2dispatch.c getvara.c dceconstraints.c dcetab.c dceparse.c dcelex.c) +set_property(SOURCE ncd2dispatch.c + PROPERTY + SKIP_UNITY_BUILD_INCLUSION ON) + add_library(dap2 OBJECT ${dap2_SOURCES}) IF(STATUS_PARALLEL) diff --git a/libdap4/CMakeLists.txt b/libdap4/CMakeLists.txt index 4b914de59f..1f3d0a9ffc 100644 --- a/libdap4/CMakeLists.txt +++ b/libdap4/CMakeLists.txt @@ -6,6 +6,10 @@ # See netcdf-c/COPYRIGHT file for more info. SET(dap4_SOURCES d4curlfunctions.c d4fix.c d4data.c d4file.c d4parser.c d4meta.c d4varx.c d4dump.c d4swap.c d4chunk.c d4printer.c d4read.c d4http.c d4util.c d4odom.c d4cvt.c d4debug.c ncd4dispatch.c) +set_property(SOURCE d4meta.c + PROPERTY + SKIP_UNITY_BUILD_INCLUSION ON) + add_library(dap4 OBJECT ${dap4_SOURCES}) IF(STATUS_PARALLEL) diff --git a/libdispatch/CMakeLists.txt b/libdispatch/CMakeLists.txt index 1d867369b8..9a5a705a10 100644 --- a/libdispatch/CMakeLists.txt +++ b/libdispatch/CMakeLists.txt @@ -8,6 +8,10 @@ SET(libdispatch_SOURCES dcopy.c dfile.c ddim.c datt.c dattinq.c dattput.c dattge daux.c dinstance.c dinstance_intern.c dcrc32.c dcrc32.h dcrc64.c ncexhash.c ncxcache.c ncjson.c ds3util.c dparallel.c dmissing.c) +set_property(SOURCE dinstance_intern.c dinstance.c dvarput.c + PROPERTY + SKIP_UNITY_BUILD_INCLUSION ON) + # Netcdf-4 only functions. Must be defined even if not used SET(libdispatch_SOURCES ${libdispatch_SOURCES} dgroup.c dvlen.c dcompound.c dtype.c denum.c dopaque.c dfilter.c) diff --git a/libsrc/CMakeLists.txt b/libsrc/CMakeLists.txt index 533b64c7bd..9654422225 100644 --- a/libsrc/CMakeLists.txt +++ b/libsrc/CMakeLists.txt @@ -4,6 +4,10 @@ SET(libsrc_SOURCES v1hpg.c putget.c attr.c nc3dispatch.c nc3internal.c var.c dim.c ncx.c lookup3.c ncio.c) +set_property(SOURCE httpio.c posixio.c mmapio.c + PROPERTY + SKIP_UNITY_BUILD_INCLUSION ON) + # Process these files with m4. SET(m4_SOURCES attr ncx putget) foreach (f ${m4_SOURCES}) diff --git a/nc_test/CMakeLists.txt b/nc_test/CMakeLists.txt index 61bff28074..3ea7113ca5 100644 --- a/nc_test/CMakeLists.txt +++ b/nc_test/CMakeLists.txt @@ -34,6 +34,7 @@ TARGET_LINK_LIBRARIES(nc_test netcdf ${HAVE_LIBM} ) +set_property(TARGET nc_test PROPERTY UNITY_BUILD OFF) # Some extra stand-alone tests SET(TESTS t_nc tst_small tst_misc tst_norm tst_names tst_nofill tst_nofill2 tst_nofill3 tst_meta tst_inq_type tst_utf8_phrases tst_global_fillval tst_max_var_dims tst_formats tst_def_var_fill tst_err_enddef tst_default_format) diff --git a/ncdump/CMakeLists.txt b/ncdump/CMakeLists.txt index c0869b0362..5ca76c2a37 100644 --- a/ncdump/CMakeLists.txt +++ b/ncdump/CMakeLists.txt @@ -24,6 +24,10 @@ SET(ncpathcvt_FILES ncpathcvt.c ${XGETOPTSRC}) SET(ncfilteravail_FILES ncfilteravail.c ${XGETOPTSRC}) SET(nchdf5version_FILES nchdf5version.c) +set_property(SOURCE dumplib.c + PROPERTY + SKIP_UNITY_BUILD_INCLUSION ON) + ADD_EXECUTABLE(ncdump ${ncdump_FILES}) ADD_EXECUTABLE(nccopy ${nccopy_FILES}) ADD_EXECUTABLE(ncvalidator ${ncvalidator_FILES}) diff --git a/ncgen/CMakeLists.txt b/ncgen/CMakeLists.txt index 4edc88b145..4d218d3e46 100644 --- a/ncgen/CMakeLists.txt +++ b/ncgen/CMakeLists.txt @@ -22,6 +22,10 @@ util.c bytebuffer.h data.h debug.h dump.h generate.h generr.h genlib.h includes.h list.h ncgen.h ncgeny.h util.h ${XGETOPTSRC}) +set_property(SOURCE data.c + PROPERTY + SKIP_UNITY_BUILD_INCLUSION ON) + # Obsolete SET(OBSOLETE odom.c odom.h jdatastd.c jdatajni.c genjni.c cdfdata.c cmldata.c) diff --git a/nctest/CMakeLists.txt b/nctest/CMakeLists.txt index ada220f2ab..e4c5ea210f 100644 --- a/nctest/CMakeLists.txt +++ b/nctest/CMakeLists.txt @@ -16,6 +16,7 @@ FILE(COPY ${COPY_FILES} DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) ADD_EXECUTABLE(nctest ${nctest_SRC}) TARGET_LINK_LIBRARIES(nctest netcdf) ADD_TEST(nctest ${EXECUTABLE_OUTPUT_PATH}/nctest) +set_property(TARGET nctest PROPERTY UNITY_BUILD OFF) add_bin_test_no_prefix(tst_rename) IF(BUILD_UTILITIES) From 76f4cda69a5e46ee05d59fc4c7140525fb8244ac Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Mon, 15 Jan 2024 10:20:52 +0000 Subject: [PATCH 17/33] Ensure `config.h` is included first --- nczarr_test/ncdumpchunks.c | 2 +- nczarr_test/test_unlim_io.c | 2 +- nczarr_test/zmapio.c | 5 +++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/nczarr_test/ncdumpchunks.c b/nczarr_test/ncdumpchunks.c index 7381e9514c..17bcc76c27 100644 --- a/nczarr_test/ncdumpchunks.c +++ b/nczarr_test/ncdumpchunks.c @@ -1,8 +1,8 @@ -#include #ifdef HAVE_CONFIG_H #include "config.h" #endif +#include #include #include #include diff --git a/nczarr_test/test_unlim_io.c b/nczarr_test/test_unlim_io.c index fa5aee5848..15f10723aa 100644 --- a/nczarr_test/test_unlim_io.c +++ b/nczarr_test/test_unlim_io.c @@ -1,7 +1,7 @@ -#include #ifdef HAVE_CONFIG_H #include "config.h" #endif +#include #include #include #include diff --git a/nczarr_test/zmapio.c b/nczarr_test/zmapio.c index 53a1612b42..fb8c8cfb4f 100644 --- a/nczarr_test/zmapio.c +++ b/nczarr_test/zmapio.c @@ -3,8 +3,6 @@ * See netcdf/COPYRIGHT file for copying and redistribution conditions. */ -#include "ncconfigure.h" -#include #ifdef HAVE_CONFIG_H #include "config.h" #endif @@ -20,6 +18,9 @@ #include "XGetopt.h" #endif +#include + +#include "ncconfigure.h" #include "zincludes.h" #include "ncpathmgr.h" #include "nclog.h" From d51f80cdedf2af02656d7dd679188f98743c4797 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Mon, 15 Jan 2024 10:42:13 +0000 Subject: [PATCH 18/33] Fix some more conversion warnings in nczarr tests --- nczarr_test/test_zchunks2.c | 4 ++-- nczarr_test/ut_map.c | 10 +++++----- nczarr_test/ut_mapapi.c | 10 +++++----- nczarr_test/zmapio.c | 17 +++++++---------- 4 files changed, 19 insertions(+), 22 deletions(-) diff --git a/nczarr_test/test_zchunks2.c b/nczarr_test/test_zchunks2.c index e07412da08..4766f8ac65 100644 --- a/nczarr_test/test_zchunks2.c +++ b/nczarr_test/test_zchunks2.c @@ -349,7 +349,7 @@ main(int argc, char **argv) size_t dim_len[NDIMS3]; int storage = 0; size_t chunksizes[NDIMS3]; - int d, t; + int t; char dim_name[NC_MAX_NAME + 1]; float waste; @@ -358,7 +358,7 @@ main(int argc, char **argv) if (nc_create(itoptions.path, NC_NETCDF4 | NC_CLOBBER, &ncid)) ERR; /* Create a few dimensions. */ - for (d = 0; d < NDIMS3; d++) + for (size_t d = 0; d < NDIMS3; d++) { dim_len[d] = (size_t)rand(); snprintf(dim_name, sizeof(dim_name), "dim_%zu", d); diff --git a/nczarr_test/ut_map.c b/nczarr_test/ut_map.c index ca1d1be82d..f0c9f10df4 100644 --- a/nczarr_test/ut_map.c +++ b/nczarr_test/ut_map.c @@ -323,7 +323,7 @@ readdata(void) static int searchR(NCZMAP* map, int depth, const char* prefix0, NClist* objects) { - int i,stat = NC_NOERR; + int stat = NC_NOERR; NClist* matches = nclistnew(); char prefix[4096]; /* only ok because we know testdata */ size_t prefixlen; @@ -341,7 +341,7 @@ searchR(NCZMAP* map, int depth, const char* prefix0, NClist* objects) default: goto done; } /* recurse */ - for(i=0;itypesize); } - printf("[%d] %s : (%llu)",depth,obj,len); + printf("[%zu] %s : (%llu)",depth,obj,len); if(kind == OK_CHUNK && dumpoptions.nctype->nctype != NC_STRING) printf(" (%s)",dumpoptions.nctype->typename); printf(" |"); @@ -381,10 +378,10 @@ objdump(void) } printf("|\n"); } else { - printf("[%d] %s : (%llu) ||\n",depth,obj,len); + printf("[%zu] %s : (%llu) ||\n",depth,obj,len); } } else { - printf("[%d] %s\n",depth,obj); + printf("[%zu] %s\n",depth,obj); } } done: From f266313cb78c471528f5c70ede6c02f0b300f88c Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Mon, 15 Jan 2024 10:42:45 +0000 Subject: [PATCH 19/33] Fix warning about mismatched indentation between if/else statements --- nczarr_test/zmapio.c | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nczarr_test/zmapio.c b/nczarr_test/zmapio.c index bed4669fa2..836da67f33 100644 --- a/nczarr_test/zmapio.c +++ b/nczarr_test/zmapio.c @@ -369,10 +369,11 @@ objdump(void) printcontent(len,content,kind); break; case OK_CHUNK: - if(dumpoptions.meta_only) + if(dumpoptions.meta_only) { printf("..."); - else + } else { printcontent(len,content,kind); + } break; default: break; } From a7552b4b83f9161e210c6a991ebf018f7bcc40d4 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Mon, 15 Jan 2024 13:57:22 +0000 Subject: [PATCH 20/33] Fix a couple of conversion warnings in shared nczarr/ncdump test --- ncdump/tst_chunking.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ncdump/tst_chunking.c b/ncdump/tst_chunking.c index 46c9ddc5ba..728b274cc5 100644 --- a/ncdump/tst_chunking.c +++ b/ncdump/tst_chunking.c @@ -109,7 +109,7 @@ main(int argc, char **argv) /* Fill in the data */ if(option_unlimited) { - int nvals = UNLIM_SIZE * dim_lens[0]; + size_t nvals = UNLIM_SIZE * dim_lens[0]; size_t start[2] = {0,0}; size_t count[2]; for(i=0;i Date: Tue, 16 Jan 2024 11:20:43 -0600 Subject: [PATCH 21/33] moving the dependencies inclusion --- CMakeLists.txt | 22 +++++++++++----------- cmake/dependencies.cmake | 3 +-- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 2bd180313d..b69ee7a0dd 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -894,6 +894,16 @@ IF(NOT ENABLE_HDF5 AND ENABLE_EXAMPLE_TESTS) SET(ENABLE_EXAMPLE_TESTS OFF) ENDIF() +################################## +# Dependencies +################################## + +include(cmake/dependencies.cmake) + +################################ +# End Dependencies +################################ + # Enable Parallel IO with netCDF-4/HDF5 files using HDF5 parallel I/O. SET(STATUS_PARALLEL "OFF") set(IMPORT_MPI "") @@ -1165,16 +1175,6 @@ MARK_AS_ADVANCED(ENABLE_DAP_REMOTE_TESTS ENABLE_DAP_LONG_TESTS USE_REMOTE_CDASH MARK_AS_ADVANCED(ENABLE_DOXYGEN_BUILD_RELEASE_DOCS DOXYGEN_ENABLE_TASKS ENABLE_DOXYGEN_SERVER_SIDE_SEARCH) MARK_AS_ADVANCED(ENABLE_SHARED_LIBRARY_VERSION) -################################## -# Dependencies -################################## - -include(cmake/dependencies.cmake) - -################################ -# End Dependencies -################################ - ################################ # Option checks ################################ @@ -1700,7 +1700,7 @@ configure_file("${netCDF_SOURCE_DIR}/config.h.cmake.in" "${netCDF_BINARY_DIR}/config.h") ADD_DEFINITIONS(-DHAVE_CONFIG_H) INCLUDE_DIRECTORIES(${netCDF_BINARY_DIR}) -# End autotools-style checs for config.h +# End autotools-style checks for config.h ##### # Set core names of the libraries. diff --git a/cmake/dependencies.cmake b/cmake/dependencies.cmake index ce4f8b4e43..43e8468280 100644 --- a/cmake/dependencies.cmake +++ b/cmake/dependencies.cmake @@ -629,8 +629,7 @@ ENDIF(ENABLE_LIBXML2) ################################ # MPI ################################ -OPTION(ENABLE_PARALLEL4 "Build netCDF-4 with parallel IO" "${HDF5_PARALLEL}") -IF(ENABLE_PARALLEL4 AND ENABLE_HDF5 AND HDF5_PARALLEL) +IF(ENABLE_PARALLEL4 OR HDF5_PARALLEL) FIND_PACKAGE(MPI REQUIRED) ENDIF() From e7c7221f289bcb63d9a8ec18573e6b286043d51c Mon Sep 17 00:00:00 2001 From: Kyle Shores Date: Wed, 17 Jan 2024 15:17:50 -0600 Subject: [PATCH 22/33] moving functions and macros to a file --- CMakeLists.txt | 304 +-------------------------- cmake/dependencies.cmake | 2 - cmake/netcdf_functions_macros.cmake | 315 +++++++++++++++++++++++++++- 3 files changed, 316 insertions(+), 305 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index b69ee7a0dd..37d1b3e5ef 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -23,6 +23,8 @@ set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${CMAKE_CURRENT_SOURCE_DIR}/cmake/mo set(PACKAGE "netCDF" CACHE STRING "") +include(netcdf_functions_macros) + # Backport of built-in `PROJECT_IS_TOP_LEVEL` from CMake 3.21 if (NOT DEFINED NETCDF_IS_TOP_LEVEL) set(NETCDF_IS_TOP_LEVEL OFF) @@ -60,9 +62,6 @@ SET(NC_DISPATCH_VERSION 5) # will be used when committing to CDash. find_program(UNAME NAMES uname) IF(UNAME) - macro(getuname name flag) - execute_process(COMMAND "${UNAME}" "${flag}" OUTPUT_VARIABLE "${name}" OUTPUT_STRIP_TRAILING_WHITESPACE) - endmacro(getuname) getuname(osname -s) getuname(osrel -r) getuname(cpu -m) @@ -145,26 +144,6 @@ ENDIF(${BIGENDIAN}) SET(TRUELIST "on;yes;y;true") SET(FALSELIST "off;no;n;false;0;ignore;notfound") -function(booleanize VALUE RETVAR) - # force case - STRING(TOLOWER "${VALUE}" LCVALUE) - # Now do all the comparisons - if(LCVALUE IN_LIST TRUELIST OR LCVALUE GREATER 0) - SET(${RETVAR} TRUE PARENT_SCOPE) - elseif(LCVALUE IN_LIST FALSELIST OR LCVALUE MATCHES ".*-notfound" OR LCVALUE STREQUAL "") - SET(${RETVAR} FALSE PARENT_SCOPE) - else() - SET(${RETVAR} NOTFOUND PARENT_SCOPE) - endif() -endfunction() - -# A macro to check if a C linker supports a particular flag. -MACRO(CHECK_C_LINKER_FLAG M_FLAG M_RESULT) - SET(T_REQ_FLAG "${CMAKE_REQUIRED_FLAGS}") - SET(CMAKE_REQUIRED_FLAGS "${M_FLAG}") - CHECK_C_SOURCE_COMPILES("int main() {return 0;}" ${M_RESULT}) - SET(CMAKE_REQUIRED_FLAGS "${T_REQ_FLAG}") -ENDMACRO() # Set the build type. IF(NOT CMAKE_BUILD_TYPE) @@ -384,35 +363,6 @@ IF(${CMAKE_SYSTEM_NAME} EQUAL "Darwin") SET(CMAKE_OSX_ARCHITECTURES i386;x86_64) ENDIF(${CMAKE_SYSTEM_NAME} EQUAL "Darwin") -# Macro for replacing '/MD' with '/MT'. -# Used only on Windows, /MD tells VS to use the shared -# CRT libs, MT tells VS to use the static CRT libs. -# -# Taken From: -# http://www.cmake.org/Wiki/CMake_FAQ#How_can_I_build_my_MSVC_application_with_a_static_runtime.3F -# -MACRO(specify_static_crt_flag) - SET(vars - CMAKE_C_FLAGS - CMAKE_C_FLAGS_DEBUG - CMAKE_C_FLAGS_RELEASE - CMAKE_C_FLAGS_MINSIZEREL - CMAKE_C_FLAGS_RELWITHDEBINFO - CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG - CMAKE_CXX_FLAGS_RELEASE CMAKE_CXX_FLAGS_MINSIZEREL - CMAKE_CXX_FLAGS_RELWITHDEBINFO) - - FOREACH(flag_var ${vars}) - IF(${flag_var} MATCHES "/MD") - STRING(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") - ENDIF() - ENDFOREACH() - - FOREACH(flag_var ${vars}) - MESSAGE(STATUS " '${flag_var}': ${${flag_var}}") - ENDFOREACH() - MESSAGE(STATUS "") -ENDMACRO() # Option to use Static Runtimes in MSVC IF(MSVC) @@ -1409,19 +1359,6 @@ ENDIF() # End system inspection checks. ##### -################################ -# Define Utility Macros -################################ - -# Macro to append files to the EXTRA_DIST files. -# Note: can only be used in subdirectories because of the use of PARENT_SCOPE -SET(EXTRA_DIST "") -MACRO(ADD_EXTRA_DIST files) - FOREACH(F ${files}) - SET(EXTRA_DIST ${EXTRA_DIST} ${CMAKE_CURRENT_SOURCE_DIR}/${F}) - SET(EXTRA_DIST ${EXTRA_DIST} PARENT_SCOPE) - ENDFOREACH() -ENDMACRO() # A basic script used to convert m4 files FIND_PROGRAM(NC_M4 NAMES m4 m4.exe) @@ -1433,200 +1370,6 @@ ELSE() SET(HAVE_M4 FALSE) ENDIF() -MACRO(GEN_m4 filename) - - set(fallbackdest "${CMAKE_CURRENT_SOURCE_DIR}/${filename}.c") - set(dest "${CMAKE_CURRENT_BINARY_DIR}/${filename}.c") - - # If m4 isn't present, and the generated file doesn't exist, - # it cannot be generated and an error should be thrown. - IF(NOT HAVE_M4) - IF(NOT EXISTS ${fallbackdest}) - MESSAGE(FATAL_ERROR "m4 is required to generate ${filename}.c. Please install m4 so that it is on the PATH and try again.") - ELSE() - SET(dest ${fallbackdest}) - ENDIF() - ELSE() - ADD_CUSTOM_COMMAND( - OUTPUT ${dest} - COMMAND ${NC_M4} - ARGS ${M4FLAGS} ${CMAKE_CURRENT_SOURCE_DIR}/${filename}.m4 > ${dest} - VERBATIM - ) - - ENDIF() -ENDMACRO(GEN_m4) - -# Binary tests, but ones which depend on value of 'TEMP_LARGE' being defined. -MACRO(add_bin_env_temp_large_test prefix F) - ADD_EXECUTABLE(${prefix}_${F} ${F}.c) - TARGET_LINK_LIBRARIES(${prefix}_${F} netcdf) - IF(MSVC) - SET_TARGET_PROPERTIES(${prefix}_${F} - PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT" - ) - ENDIF() - - ADD_TEST(${prefix}_${F} bash "-c" "TEMP_LARGE=${CMAKE_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/${prefix}_${F}") - IF(MSVC) - SET_PROPERTY(TARGET ${prefix}_${F} PROPERTY FOLDER "tests") - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY - ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG - ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE - ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELWITHDEBINFO - ${CMAKE_CURRENT_BINARY_DIR}) - ENDIF() -ENDMACRO() - - -# Tests which are binary, but depend on a particular environmental variable. -MACRO(add_bin_env_test prefix F) - ADD_EXECUTABLE(${prefix}_${F} ${F}.c) - TARGET_LINK_LIBRARIES(${prefix}_${F} netcdf) - IF(MSVC) - SET_TARGET_PROPERTIES(${prefix}_${F} - PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT" - ) - ENDIF() - - ADD_TEST(${prefix}_${F} bash "-c" "TOPSRCDIR=${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/${prefix}_${F}") - IF(MSVC) - SET_PROPERTY(TARGET ${prefix}_${F} PROPERTY FOLDER "tests") - ENDIF() -ENDMACRO() - -# Build a binary used by a script, but don't make a test out of it. -MACRO(build_bin_test F) - if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/${F}.c") - ADD_EXECUTABLE(${F} "${CMAKE_CURRENT_SOURCE_DIR}/${F}.c" ${ARGN}) - else() - # File should have been copied to the binary directory - ADD_EXECUTABLE(${F} "${CMAKE_CURRENT_BINARY_DIR}/${F}.c" ${ARGN}) - endif() - TARGET_LINK_LIBRARIES(${F} netcdf ${ALL_TLL_LIBS}) - IF(MSVC) - SET_TARGET_PROPERTIES(${F} - PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT" - ) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY - ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG - ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE - ${CMAKE_CURRENT_BINARY_DIR}) - ENDIF() -ENDMACRO() - -# Binary tests which are used by a script looking for a specific name. -MACRO(add_bin_test_no_prefix F) - build_bin_test(${F} ${ARGN}) - ADD_TEST(${F} ${EXECUTABLE_OUTPUT_PATH}/${F}) - IF(MSVC) - SET_PROPERTY(TEST ${F} PROPERTY FOLDER "tests/") - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY - ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG - ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE - ${CMAKE_CURRENT_BINARY_DIR}) - ENDIF() -ENDMACRO() - -# Binary tests which are used by a script looking for a specific name. -MACRO(build_bin_test_no_prefix F) - build_bin_test(${F}) - IF(MSVC) - #SET_PROPERTY(TEST ${F} PROPERTY FOLDER "tests/") - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY - ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG - ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE - ${CMAKE_CURRENT_BINARY_DIR}) - ENDIF() -ENDMACRO() - -# Build a test and add it to the test list. -MACRO(add_bin_test prefix F) - ADD_EXECUTABLE(${prefix}_${F} ${F}.c ${ARGN}) - TARGET_LINK_LIBRARIES(${prefix}_${F} - ${ALL_TLL_LIBS} - netcdf - ) - IF(MSVC) - SET_TARGET_PROPERTIES(${prefix}_${F} - PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT" - ) - ENDIF() - ADD_TEST(${prefix}_${F} - ${EXECUTABLE_OUTPUT_PATH}/${prefix}_${F} - ) - IF(MSVC) - SET_PROPERTY(TEST ${prefix}_${F} PROPERTY FOLDER "tests/") - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY - ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG - ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE - ${CMAKE_CURRENT_BINARY_DIR}) - ENDIF() -ENDMACRO() - -# A cmake script to print out information at the end of the configuration step. -MACRO(print_conf_summary) - MESSAGE("") - MESSAGE("") - MESSAGE("Configuration Summary:") - MESSAGE("") - MESSAGE(STATUS "Building Shared Libraries: ${BUILD_SHARED_LIBS}") - MESSAGE(STATUS "Building netCDF-4: ${ENABLE_NETCDF_4}") - MESSAGE(STATUS "Building DAP2 Support: ${ENABLE_DAP2}") - MESSAGE(STATUS "Building DAP4 Support: ${ENABLE_DAP4}") - MESSAGE(STATUS "Building Byte-range Support: ${ENABLE_BYTERANGE}") - MESSAGE(STATUS "Building Utilities: ${BUILD_UTILITIES}") - IF(CMAKE_PREFIX_PATH) - MESSAGE(STATUS "CMake Prefix Path: ${CMAKE_PREFIX_PATH}") - ENDIF() - MESSAGE("") - - IF(${STATUS_PNETCDF} OR ${STATUS_PARALLEL}) - MESSAGE("Building Parallel NetCDF") - MESSAGE(STATUS "Using PnetCDF: ${STATUS_PNETCDF}") - MESSAGE(STATUS "Using Parallel IO: ${STATUS_PARALLEL}") - MESSAGE("") - ENDIF() - - MESSAGE("Tests Enabled: ${ENABLE_TESTS}") - IF(ENABLE_TESTS) - MESSAGE(STATUS "DAP Remote Tests: ${ENABLE_DAP_REMOTE_TESTS}") - MESSAGE(STATUS "Extra Tests: ${ENABLE_EXTRA_TESTS}") - MESSAGE(STATUS "Coverage Tests: ${ENABLE_COVERAGE_TESTS}") - MESSAGE(STATUS "Parallel Tests: ${ENABLE_PARALLEL_TESTS}") - MESSAGE(STATUS "Large File Tests: ${ENABLE_LARGE_FILE_TESTS}") - MESSAGE(STATUS "Extreme Numbers: ${ENABLE_EXTREME_NUMBERS}") - MESSAGE(STATUS "Unit Tests: ${ENABLE_UNIT_TESTS}") - ENDIF() - - MESSAGE("") - MESSAGE("Compiler:") - MESSAGE("") - MESSAGE(STATUS "Build Type: ${CMAKE_BUILD_TYPE}") - MESSAGE(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") - MESSAGE(STATUS "CMAKE_C_FLAGS: ${CMAKE_C_FLAGS}") - IF("${CMAKE_BUILD_TYPE}" STREQUAL "DEBUG") - MESSAGE(STATUS "CMAKE_C_FLAGS_DEBUG: ${CMAKE_C_FLAGS_DEBUG}") - ENDIF() - IF("${CMAKE_BUILD_TYPE}" STREQUAL "RELEASE") - MESSAGE(STATUS "CMAKE_C_FLAGS_RELEASE: ${CMAKE_C_FLAGS_RELEASE}") - ENDIF() - - MESSAGE(STATUS "Linking against: ${ALL_TLL_LIBS}") - - MESSAGE("") -ENDMACRO() ##specific # Shell script Macro ## @@ -1652,49 +1395,6 @@ ELSE(ENABLE_BASH_SCRIPT_TESTING) SET(HAVE_BASH "") ENDIF(ENABLE_BASH_SCRIPT_TESTING) -MACRO(add_sh_test prefix F) - IF(HAVE_BASH) - ADD_TEST(${prefix}_${F} bash "-c" "export srcdir=${CMAKE_CURRENT_SOURCE_DIR};export TOPSRCDIR=${CMAKE_SOURCE_DIR};${CMAKE_CURRENT_BINARY_DIR}/${F}.sh ${ARGN}") - ENDIF() -ENDMACRO() - - - -# A function used to create autotools-style 'yes/no' definitions. -# If a variable is set, it 'yes' is returned. Otherwise, 'no' is -# returned. -# -# Also creates a version of the ret_val prepended with 'NC', -# when feature is true, which is used to generate netcdf_meta.h. -FUNCTION(is_enabled feature ret_val) - IF(${feature}) - SET(${ret_val} "yes" PARENT_SCOPE) - SET("NC_${ret_val}" 1 PARENT_SCOPE) - ELSE() - SET(${ret_val} "no" PARENT_SCOPE) - SET("NC_${ret_val}" 0 PARENT_SCOPE) - ENDIF(${feature}) -ENDFUNCTION() - -# A function used to create autotools-style 'yes/no' definitions. -# If a variable is set, it 'yes' is returned. Otherwise, 'no' is -# returned. -# -# Also creates a version of the ret_val prepended with 'NC', -# when feature is true, which is used to generate netcdf_meta.h. -FUNCTION(is_disabled feature ret_val) - IF(${feature}) - SET(${ret_val} "no" PARENT_SCOPE) - ELSE() - SET(${ret_val} "yes" PARENT_SCOPE) - SET("NC_${ret_val}" 1 PARENT_SCOPE) - ENDIF(${feature}) -ENDFUNCTION() - -################################ -# End Macro Definitions -################################ - # Create config.h file. configure_file("${netCDF_SOURCE_DIR}/config.h.cmake.in" "${netCDF_BINARY_DIR}/config.h") diff --git a/cmake/dependencies.cmake b/cmake/dependencies.cmake index 6c9e9a62c5..3b74637ec7 100644 --- a/cmake/dependencies.cmake +++ b/cmake/dependencies.cmake @@ -1,5 +1,3 @@ -include(netcdf_functions_macros) - ################################ # PkgConfig ################################ diff --git a/cmake/netcdf_functions_macros.cmake b/cmake/netcdf_functions_macros.cmake index 9832d0bba4..2f7f65bf56 100644 --- a/cmake/netcdf_functions_macros.cmake +++ b/cmake/netcdf_functions_macros.cmake @@ -1,3 +1,7 @@ +################################ +# Macros +################################ + macro(set_std_filter filter) # Upper case the filter name string(TOUPPER "${filter}" upfilter) @@ -17,4 +21,313 @@ if(ENABLE_FILTER_${upfilter}) ELSE() SET(HAVE_${upfilter} OFF) ENDIF() -endmacro(set_std_filter) \ No newline at end of file +endmacro(set_std_filter) + +macro(getuname name flag) +execute_process(COMMAND "${UNAME}" "${flag}" OUTPUT_VARIABLE "${name}" OUTPUT_STRIP_TRAILING_WHITESPACE) +endmacro(getuname) + +# A macro to check if a C linker supports a particular flag. +MACRO(CHECK_C_LINKER_FLAG M_FLAG M_RESULT) + SET(T_REQ_FLAG "${CMAKE_REQUIRED_FLAGS}") + SET(CMAKE_REQUIRED_FLAGS "${M_FLAG}") + CHECK_C_SOURCE_COMPILES("int main() {return 0;}" ${M_RESULT}) + SET(CMAKE_REQUIRED_FLAGS "${T_REQ_FLAG}") +ENDMACRO() + +# Macro for replacing '/MD' with '/MT'. +# Used only on Windows, /MD tells VS to use the shared +# CRT libs, MT tells VS to use the static CRT libs. +# +# Taken From: +# http://www.cmake.org/Wiki/CMake_FAQ#How_can_I_build_my_MSVC_application_with_a_static_runtime.3F +# +MACRO(specify_static_crt_flag) + SET(vars + CMAKE_C_FLAGS + CMAKE_C_FLAGS_DEBUG + CMAKE_C_FLAGS_RELEASE + CMAKE_C_FLAGS_MINSIZEREL + CMAKE_C_FLAGS_RELWITHDEBINFO + CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG + CMAKE_CXX_FLAGS_RELEASE CMAKE_CXX_FLAGS_MINSIZEREL + CMAKE_CXX_FLAGS_RELWITHDEBINFO) + + FOREACH(flag_var ${vars}) + IF(${flag_var} MATCHES "/MD") + STRING(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") + ENDIF() + ENDFOREACH() + + FOREACH(flag_var ${vars}) + MESSAGE(STATUS " '${flag_var}': ${${flag_var}}") + ENDFOREACH() + MESSAGE(STATUS "") +ENDMACRO() + + +################################ +# Define Utility Macros +################################ + +# Macro to append files to the EXTRA_DIST files. +# Note: can only be used in subdirectories because of the use of PARENT_SCOPE +SET(EXTRA_DIST "") +MACRO(ADD_EXTRA_DIST files) + FOREACH(F ${files}) + SET(EXTRA_DIST ${EXTRA_DIST} ${CMAKE_CURRENT_SOURCE_DIR}/${F}) + SET(EXTRA_DIST ${EXTRA_DIST} PARENT_SCOPE) + ENDFOREACH() +ENDMACRO() + + +MACRO(GEN_m4 filename) + + set(fallbackdest "${CMAKE_CURRENT_SOURCE_DIR}/${filename}.c") + set(dest "${CMAKE_CURRENT_BINARY_DIR}/${filename}.c") + + # If m4 isn't present, and the generated file doesn't exist, + # it cannot be generated and an error should be thrown. + IF(NOT HAVE_M4) + IF(NOT EXISTS ${fallbackdest}) + MESSAGE(FATAL_ERROR "m4 is required to generate ${filename}.c. Please install m4 so that it is on the PATH and try again.") + ELSE() + SET(dest ${fallbackdest}) + ENDIF() + ELSE() + ADD_CUSTOM_COMMAND( + OUTPUT ${dest} + COMMAND ${NC_M4} + ARGS ${M4FLAGS} ${CMAKE_CURRENT_SOURCE_DIR}/${filename}.m4 > ${dest} + VERBATIM + ) + + ENDIF() +ENDMACRO(GEN_m4) + +# Binary tests, but ones which depend on value of 'TEMP_LARGE' being defined. +MACRO(add_bin_env_temp_large_test prefix F) + ADD_EXECUTABLE(${prefix}_${F} ${F}.c) + TARGET_LINK_LIBRARIES(${prefix}_${F} netcdf) + IF(MSVC) + SET_TARGET_PROPERTIES(${prefix}_${F} + PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT" + ) + ENDIF() + + ADD_TEST(${prefix}_${F} bash "-c" "TEMP_LARGE=${CMAKE_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/${prefix}_${F}") + IF(MSVC) + SET_PROPERTY(TARGET ${prefix}_${F} PROPERTY FOLDER "tests") + SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY + ${CMAKE_CURRENT_BINARY_DIR}) + SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG + ${CMAKE_CURRENT_BINARY_DIR}) + SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE + ${CMAKE_CURRENT_BINARY_DIR}) + SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELWITHDEBINFO + ${CMAKE_CURRENT_BINARY_DIR}) + ENDIF() +ENDMACRO() + + +# Tests which are binary, but depend on a particular environmental variable. +MACRO(add_bin_env_test prefix F) + ADD_EXECUTABLE(${prefix}_${F} ${F}.c) + TARGET_LINK_LIBRARIES(${prefix}_${F} netcdf) + IF(MSVC) + SET_TARGET_PROPERTIES(${prefix}_${F} + PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT" + ) + ENDIF() + + ADD_TEST(${prefix}_${F} bash "-c" "TOPSRCDIR=${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/${prefix}_${F}") + IF(MSVC) + SET_PROPERTY(TARGET ${prefix}_${F} PROPERTY FOLDER "tests") + ENDIF() +ENDMACRO() + +# Build a binary used by a script, but don't make a test out of it. +MACRO(build_bin_test F) + if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/${F}.c") + ADD_EXECUTABLE(${F} "${CMAKE_CURRENT_SOURCE_DIR}/${F}.c" ${ARGN}) + else() + # File should have been copied to the binary directory + ADD_EXECUTABLE(${F} "${CMAKE_CURRENT_BINARY_DIR}/${F}.c" ${ARGN}) + endif() + TARGET_LINK_LIBRARIES(${F} netcdf ${ALL_TLL_LIBS}) + IF(MSVC) + SET_TARGET_PROPERTIES(${F} + PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT" + ) + SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY + ${CMAKE_CURRENT_BINARY_DIR}) + SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG + ${CMAKE_CURRENT_BINARY_DIR}) + SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE + ${CMAKE_CURRENT_BINARY_DIR}) + ENDIF() +ENDMACRO() + +# Binary tests which are used by a script looking for a specific name. +MACRO(add_bin_test_no_prefix F) + build_bin_test(${F} ${ARGN}) + ADD_TEST(${F} ${EXECUTABLE_OUTPUT_PATH}/${F}) + IF(MSVC) + SET_PROPERTY(TEST ${F} PROPERTY FOLDER "tests/") + SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY + ${CMAKE_CURRENT_BINARY_DIR}) + SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG + ${CMAKE_CURRENT_BINARY_DIR}) + SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE + ${CMAKE_CURRENT_BINARY_DIR}) + ENDIF() +ENDMACRO() + +# Binary tests which are used by a script looking for a specific name. +MACRO(build_bin_test_no_prefix F) + build_bin_test(${F}) + IF(MSVC) + #SET_PROPERTY(TEST ${F} PROPERTY FOLDER "tests/") + SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY + ${CMAKE_CURRENT_BINARY_DIR}) + SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG + ${CMAKE_CURRENT_BINARY_DIR}) + SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE + ${CMAKE_CURRENT_BINARY_DIR}) + ENDIF() +ENDMACRO() + +# Build a test and add it to the test list. +MACRO(add_bin_test prefix F) + ADD_EXECUTABLE(${prefix}_${F} ${F}.c ${ARGN}) + TARGET_LINK_LIBRARIES(${prefix}_${F} + ${ALL_TLL_LIBS} + netcdf + ) + IF(MSVC) + SET_TARGET_PROPERTIES(${prefix}_${F} + PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT" + ) + ENDIF() + ADD_TEST(${prefix}_${F} + ${EXECUTABLE_OUTPUT_PATH}/${prefix}_${F} + ) + IF(MSVC) + SET_PROPERTY(TEST ${prefix}_${F} PROPERTY FOLDER "tests/") + SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY + ${CMAKE_CURRENT_BINARY_DIR}) + SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG + ${CMAKE_CURRENT_BINARY_DIR}) + SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE + ${CMAKE_CURRENT_BINARY_DIR}) + ENDIF() +ENDMACRO() + +# A cmake script to print out information at the end of the configuration step. +MACRO(print_conf_summary) + MESSAGE("") + MESSAGE("") + MESSAGE("Configuration Summary:") + MESSAGE("") + MESSAGE(STATUS "Building Shared Libraries: ${BUILD_SHARED_LIBS}") + MESSAGE(STATUS "Building netCDF-4: ${ENABLE_NETCDF_4}") + MESSAGE(STATUS "Building DAP2 Support: ${ENABLE_DAP2}") + MESSAGE(STATUS "Building DAP4 Support: ${ENABLE_DAP4}") + MESSAGE(STATUS "Building Byte-range Support: ${ENABLE_BYTERANGE}") + MESSAGE(STATUS "Building Utilities: ${BUILD_UTILITIES}") + IF(CMAKE_PREFIX_PATH) + MESSAGE(STATUS "CMake Prefix Path: ${CMAKE_PREFIX_PATH}") + ENDIF() + MESSAGE("") + + IF(${STATUS_PNETCDF} OR ${STATUS_PARALLEL}) + MESSAGE("Building Parallel NetCDF") + MESSAGE(STATUS "Using PnetCDF: ${STATUS_PNETCDF}") + MESSAGE(STATUS "Using Parallel IO: ${STATUS_PARALLEL}") + MESSAGE("") + ENDIF() + + MESSAGE("Tests Enabled: ${ENABLE_TESTS}") + IF(ENABLE_TESTS) + MESSAGE(STATUS "DAP Remote Tests: ${ENABLE_DAP_REMOTE_TESTS}") + MESSAGE(STATUS "Extra Tests: ${ENABLE_EXTRA_TESTS}") + MESSAGE(STATUS "Coverage Tests: ${ENABLE_COVERAGE_TESTS}") + MESSAGE(STATUS "Parallel Tests: ${ENABLE_PARALLEL_TESTS}") + MESSAGE(STATUS "Large File Tests: ${ENABLE_LARGE_FILE_TESTS}") + MESSAGE(STATUS "Extreme Numbers: ${ENABLE_EXTREME_NUMBERS}") + MESSAGE(STATUS "Unit Tests: ${ENABLE_UNIT_TESTS}") + ENDIF() + + MESSAGE("") + MESSAGE("Compiler:") + MESSAGE("") + MESSAGE(STATUS "Build Type: ${CMAKE_BUILD_TYPE}") + MESSAGE(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") + MESSAGE(STATUS "CMAKE_C_FLAGS: ${CMAKE_C_FLAGS}") + IF("${CMAKE_BUILD_TYPE}" STREQUAL "DEBUG") + MESSAGE(STATUS "CMAKE_C_FLAGS_DEBUG: ${CMAKE_C_FLAGS_DEBUG}") + ENDIF() + IF("${CMAKE_BUILD_TYPE}" STREQUAL "RELEASE") + MESSAGE(STATUS "CMAKE_C_FLAGS_RELEASE: ${CMAKE_C_FLAGS_RELEASE}") + ENDIF() + + MESSAGE(STATUS "Linking against: ${ALL_TLL_LIBS}") + + MESSAGE("") +ENDMACRO() + +MACRO(add_sh_test prefix F) + IF(HAVE_BASH) + ADD_TEST(${prefix}_${F} bash "-c" "export srcdir=${CMAKE_CURRENT_SOURCE_DIR};export TOPSRCDIR=${CMAKE_SOURCE_DIR};${CMAKE_CURRENT_BINARY_DIR}/${F}.sh ${ARGN}") + ENDIF() +ENDMACRO() + + +################################ +# Functions +################################ + +function(booleanize VALUE RETVAR) + # force case + STRING(TOLOWER "${VALUE}" LCVALUE) + # Now do all the comparisons + if(LCVALUE IN_LIST TRUELIST OR LCVALUE GREATER 0) + SET(${RETVAR} TRUE PARENT_SCOPE) + elseif(LCVALUE IN_LIST FALSELIST OR LCVALUE MATCHES ".*-notfound" OR LCVALUE STREQUAL "") + SET(${RETVAR} FALSE PARENT_SCOPE) + else() + SET(${RETVAR} NOTFOUND PARENT_SCOPE) + endif() +endfunction() + + +# A function used to create autotools-style 'yes/no' definitions. +# If a variable is set, it 'yes' is returned. Otherwise, 'no' is +# returned. +# +# Also creates a version of the ret_val prepended with 'NC', +# when feature is true, which is used to generate netcdf_meta.h. +FUNCTION(is_enabled feature ret_val) + IF(${feature}) + SET(${ret_val} "yes" PARENT_SCOPE) + SET("NC_${ret_val}" 1 PARENT_SCOPE) + ELSE() + SET(${ret_val} "no" PARENT_SCOPE) + SET("NC_${ret_val}" 0 PARENT_SCOPE) + ENDIF(${feature}) +ENDFUNCTION() + +# A function used to create autotools-style 'yes/no' definitions. +# If a variable is set, it 'yes' is returned. Otherwise, 'no' is +# returned. +# +# Also creates a version of the ret_val prepended with 'NC', +# when feature is true, which is used to generate netcdf_meta.h. +FUNCTION(is_disabled feature ret_val) + IF(${feature}) + SET(${ret_val} "no" PARENT_SCOPE) + ELSE() + SET(${ret_val} "yes" PARENT_SCOPE) + SET("NC_${ret_val}" 1 PARENT_SCOPE) + ENDIF(${feature}) +ENDFUNCTION() \ No newline at end of file From cb28b1ba0c8106fa7f494922d446673a4a85c6af Mon Sep 17 00:00:00 2001 From: Kyle Shores Date: Wed, 17 Jan 2024 15:47:16 -0600 Subject: [PATCH 23/33] lowercase --- cmake/dependencies.cmake | 586 ++++++++++++++-------------- cmake/netcdf_functions_macros.cmake | 403 ++++++++++--------- 2 files changed, 491 insertions(+), 498 deletions(-) diff --git a/cmake/dependencies.cmake b/cmake/dependencies.cmake index 3b74637ec7..f101c6de9c 100644 --- a/cmake/dependencies.cmake +++ b/cmake/dependencies.cmake @@ -1,78 +1,78 @@ ################################ # PkgConfig ################################ -FIND_PACKAGE(PkgConfig QUIET) +find_package(PkgConfig QUIET) ################################ # MakeDist ################################ # Enable 'dist and distcheck'. # File adapted from http://ensc.de/cmake/FindMakeDist.cmake -FIND_PACKAGE(MakeDist) +find_package(MakeDist) # End 'enable dist and distcheck' ################################ # HDF4 ################################ -IF(ENABLE_HDF4) - SET(USE_HDF4 ON ) +if(ENABLE_HDF4) + set(USE_HDF4 ON ) # Check for include files, libraries. - FIND_PATH(MFHDF_H_INCLUDE_DIR mfhdf.h) - IF(NOT MFHDF_H_INCLUDE_DIR) - MESSAGE(FATAL_ERROR "HDF4 Support specified, cannot find file mfhdf.h") - ELSE() - INCLUDE_DIRECTORIES(${MFHDF_H_INCLUDE_DIR}) - ENDIF() + find_path(MFHDF_H_INCLUDE_DIR mfhdf.h) + if(NOT MFHDF_H_INCLUDE_DIR) + message(FATAL_ERROR "HDF4 Support specified, cannot find file mfhdf.h") + else() + include_directories(${MFHDF_H_INCLUDE_DIR}) + endif() - FIND_LIBRARY(HDF4_DF_LIB NAMES df libdf hdf) - IF(NOT HDF4_DF_LIB) - MESSAGE(FATAL_ERROR "Can't find or link to the hdf4 df library.") - ENDIF() + find_library(HDF4_DF_LIB NAMES df libdf hdf) + if(NOT HDF4_DF_LIB) + message(FATAL_ERROR "Can't find or link to the hdf4 df library.") + endif() - FIND_LIBRARY(HDF4_MFHDF_LIB NAMES mfhdf libmfhdf) - IF(NOT HDF4_MFHDF_LIB) - MESSAGE(FATAL_ERROR "Can't find or link to the hdf4 mfhdf library.") - ENDIF() + find_library(HDF4_MFHDF_LIB NAMES mfhdf libmfhdf) + if(NOT HDF4_MFHDF_LIB) + message(FATAL_ERROR "Can't find or link to the hdf4 mfhdf library.") + endif() - SET(HAVE_LIBMFHDF TRUE ) + set(HAVE_LIBMFHDF TRUE ) - SET(HDF4_LIBRARIES ${HDF4_DF_LIB} ${HDF4_MFHDF_LIB} ) + set(HDF4_LIBRARIES ${HDF4_DF_LIB} ${HDF4_MFHDF_LIB} ) # End include files, libraries. - MESSAGE(STATUS "HDF4 libraries: ${HDF4_DF_LIB}, ${HDF4_MFHDF_LIB}") + message(STATUS "HDF4 libraries: ${HDF4_DF_LIB}, ${HDF4_MFHDF_LIB}") - MESSAGE(STATUS "Seeking HDF4 jpeg dependency.") + message(STATUS "Seeking HDF4 jpeg dependency.") # Look for the jpeglib.h header file. - FIND_PATH(JPEGLIB_H_INCLUDE_DIR jpeglib.h) - IF(NOT JPEGLIB_H_INCLUDE_DIR) - MESSAGE(FATAL_ERROR "HDF4 Support enabled but cannot find jpeglib.h") - ELSE() - SET(HAVE_JPEGLIB_H ON CACHE BOOL "") - SET(HAVE_LIBJPEG TRUE ) - INCLUDE_DIRECTORIES(${JPEGLIB_H_INCLUDE_DIR}) - ENDIF() - - FIND_LIBRARY(JPEG_LIB NAMES jpeg libjpeg) - IF(NOT JPEG_LIB) - MESSAGE(FATAL_ERROR "HDF4 Support enabled but cannot find libjpeg") - ENDIF() - SET(HDF4_LIBRARIES ${JPEG_LIB} ${HDF4_LIBRARIES} ) - MESSAGE(STATUS "Found JPEG libraries: ${JPEG_LIB}") + find_path(JPEGLIB_H_INCLUDE_DIR jpeglib.h) + if(NOT JPEGLIB_H_INCLUDE_DIR) + message(FATAL_ERROR "HDF4 Support enabled but cannot find jpeglib.h") + else() + set(HAVE_JPEGLIB_H ON CACHE BOOL "") + set(HAVE_LIBJPEG TRUE ) + include_directories(${JPEGLIB_H_INCLUDE_DIR}) + endif() + + find_library(JPEG_LIB NAMES jpeg libjpeg) + if(NOT JPEG_LIB) + message(FATAL_ERROR "HDF4 Support enabled but cannot find libjpeg") + endif() + set(HDF4_LIBRARIES ${JPEG_LIB} ${HDF4_LIBRARIES} ) + message(STATUS "Found JPEG libraries: ${JPEG_LIB}") # Option to enable HDF4 file tests. OPTION(ENABLE_HDF4_FILE_TESTS "Run HDF4 file tests. This fetches sample HDF4 files from the Unidata ftp site to test with (requires curl)." ON) - IF(ENABLE_HDF4_FILE_TESTS) + if(ENABLE_HDF4_FILE_TESTS) FIND_PROGRAM(PROG_CURL NAMES curl) - IF(PROG_CURL) - SET(USE_HDF4_FILE_TESTS ON ) - ELSE() - MESSAGE(STATUS "Unable to locate 'curl'. Disabling hdf4 file tests.") - SET(USE_HDF4_FILE_TESTS OFF ) - ENDIF() - SET(USE_HDF4_FILE_TESTS ${USE_HDF4_FILE_TESTS} ) - ENDIF() -ENDIF() + if(PROG_CURL) + set(USE_HDF4_FILE_TESTS ON ) + else() + message(STATUS "Unable to locate 'curl'. Disabling hdf4 file tests.") + set(USE_HDF4_FILE_TESTS OFF ) + endif() + set(USE_HDF4_FILE_TESTS ${USE_HDF4_FILE_TESTS} ) + endif() +endif() ################################ # HDF5 @@ -94,12 +94,12 @@ ENDIF() # * HDF5_INCLUDE_DIR # * ## -IF(USE_HDF5) +if(USE_HDF5) ## # Assert HDF5 version meets minimum required version. ## - SET(HDF5_VERSION_REQUIRED 1.8.10) + set(HDF5_VERSION_REQUIRED 1.8.10) ## @@ -110,14 +110,14 @@ IF(USE_HDF5) # # This script will attempt to determine the version of the HDF5 library programatically. ## - IF(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR) - SET(HDF5_LIBRARIES ${HDF5_C_LIBRARY} ${HDF5_HL_LIBRARY} ) - SET(HDF5_C_LIBRARIES ${HDF5_C_LIBRARY} ) - SET(HDF5_C_LIBRARY_hdf5 ${HDF5_C_LIBRARY} ) - SET(HDF5_HL_LIBRARIES ${HDF5_HL_LIBRARY} ) - INCLUDE_DIRECTORIES(${HDF5_INCLUDE_DIR}) - MESSAGE(STATUS "Using HDF5 C Library: ${HDF5_C_LIBRARY}") - MESSAGE(STATUS "Using HDF5 HL LIbrary: ${HDF5_HL_LIBRARY}") + if(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR) + set(HDF5_LIBRARIES ${HDF5_C_LIBRARY} ${HDF5_HL_LIBRARY} ) + set(HDF5_C_LIBRARIES ${HDF5_C_LIBRARY} ) + set(HDF5_C_LIBRARY_hdf5 ${HDF5_C_LIBRARY} ) + set(HDF5_HL_LIBRARIES ${HDF5_HL_LIBRARY} ) + include_directories(${HDF5_INCLUDE_DIR}) + message(STATUS "Using HDF5 C Library: ${HDF5_C_LIBRARY}") + message(STATUS "Using HDF5 HL LIbrary: ${HDF5_HL_LIBRARY}") if (EXISTS "${HDF5_INCLUDE_DIR}/H5pubconf.h") file(READ "${HDF5_INCLUDE_DIR}/H5pubconf.h" _hdf5_version_lines REGEX "#define[ \t]+H5_VERSION") @@ -127,24 +127,24 @@ IF(USE_HDF5) unset(_hdf5_version) unset(_hdf5_version_lines) endif () - MESSAGE(STATUS "Found HDF5 libraries version ${HDF5_VERSION}") + message(STATUS "Found HDF5 libraries version ${HDF5_VERSION}") ### # If HDF5_VERSION is still empty, we have a problem. # Error out. ### - IF("${HDF5_VERSION}" STREQUAL "") - MESSAGE(FATAL_ERR "Unable to determine HDF5 version. NetCDF requires at least version ${HDF5_VERSION_REQUIRED}. Please ensure that libhdf5 is installed and accessible.") - ENDIF() + if("${HDF5_VERSION}" STREQUAL "") + message(FATAL_ERR "Unable to determine HDF5 version. NetCDF requires at least version ${HDF5_VERSION_REQUIRED}. Please ensure that libhdf5 is installed and accessible.") + endif() ### # Now that we know HDF5_VERSION isn't empty, we can check for minimum required version, # and toggle various options. ### - IF(${HDF5_VERSION} VERSION_LESS ${HDF5_VERSION_REQUIRED}) - MESSAGE(FATAL_ERROR "netCDF requires at least HDF5 ${HDF5_VERSION_REQUIRED}. Found ${HDF5_VERSION}.") - ENDIF() + if(${HDF5_VERSION} VERSION_LESS ${HDF5_VERSION_REQUIRED}) + message(FATAL_ERROR "netCDF requires at least HDF5 ${HDF5_VERSION_REQUIRED}. Found ${HDF5_VERSION}.") + endif() - ELSE(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR) # We are seeking out HDF5 with Find Package. + else(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR) # We are seeking out HDF5 with Find Package. ### # For now we assume that if we are building netcdf # as a shared library, we will use hdf5 as a shared @@ -152,15 +152,15 @@ IF(USE_HDF5) # we will use a static library. This can be toggled # by explicitly modifying NC_FIND_SHARED_LIBS. ## - IF(NC_FIND_SHARED_LIBS) - SET(NC_HDF5_LINK_TYPE "shared") - SET(NC_HDF5_LINK_TYPE_UPPER "SHARED") + if(NC_FIND_SHARED_LIBS) + set(NC_HDF5_LINK_TYPE "shared") + set(NC_HDF5_LINK_TYPE_UPPER "SHARED") ADD_DEFINITIONS(-DH5_BUILT_AS_DYNAMIC_LIB) - ELSE(NC_FIND_SHARED_LIBS) - SET(NC_HDF5_LINK_TYPE "static") - SET(NC_HDF5_LINK_TYPE_UPPER "STATIC") + else(NC_FIND_SHARED_LIBS) + set(NC_HDF5_LINK_TYPE "static") + set(NC_HDF5_LINK_TYPE_UPPER "STATIC") ADD_DEFINITIONS(-DH5_BUILT_AS_STATIC_LIB ) - ENDIF(NC_FIND_SHARED_LIBS) + endif(NC_FIND_SHARED_LIBS) ##### # First, find the C and HL libraries. @@ -169,12 +169,12 @@ IF(USE_HDF5) # examples, even though the previous version of what we # had worked. ##### - IF(MSVC) - SET(SEARCH_PACKAGE_NAME ${HDF5_PACKAGE_NAME}) - FIND_PACKAGE(HDF5 NAMES ${SEARCH_PACKAGE_NAME} COMPONENTS C HL CONFIG REQUIRED ${NC_HDF5_LINK_TYPE}) - ELSE(MSVC) - FIND_PACKAGE(HDF5 COMPONENTS C HL REQUIRED) - ENDIF(MSVC) + if(MSVC) + set(SEARCH_PACKAGE_NAME ${HDF5_PACKAGE_NAME}) + find_package(HDF5 NAMES ${SEARCH_PACKAGE_NAME} COMPONENTS C HL CONFIG REQUIRED ${NC_HDF5_LINK_TYPE}) + else(MSVC) + find_package(HDF5 COMPONENTS C HL REQUIRED) + endif(MSVC) ## # Next, check the HDF5 version. This will inform which @@ -182,16 +182,16 @@ IF(USE_HDF5) ## # Some versions of HDF5 set HDF5_VERSION_STRING instead of HDF5_VERSION - IF(HDF5_VERSION_STRING AND NOT HDF5_VERSION) - SET(HDF5_VERSION ${HDF5_VERSION_STRING}) - ENDIF() + if(HDF5_VERSION_STRING AND NOT HDF5_VERSION) + set(HDF5_VERSION ${HDF5_VERSION_STRING}) + endif() ### # If HDF5_VERSION is undefined, attempt to determine it programatically. ### - IF("${HDF5_VERSION}" STREQUAL "") - MESSAGE(STATUS "HDF5_VERSION not detected. Attempting to determine programatically.") + if("${HDF5_VERSION}" STREQUAL "") + message(STATUS "HDF5_VERSION not detected. Attempting to determine programatically.") IF (EXISTS "${HDF5_INCLUDE_DIR}/H5pubconf.h") file(READ "${HDF5_INCLUDE_DIR}/H5pubconf.h" _hdf5_version_lines REGEX "#define[ \t]+H5_VERSION") @@ -199,46 +199,46 @@ IF(USE_HDF5) set(HDF5_VERSION "${_hdf5_version}" CACHE STRING "") unset(_hdf5_version) unset(_hdf5_version_lines) - MESSAGE(STATUS "Found HDF5 libraries version ${HDF5_VERSION}") - ENDIF() - ELSE() - SET(HDF5_VERSION ${HDF5_VERSION} CACHE STRING "") - ENDIF() + message(STATUS "Found HDF5 libraries version ${HDF5_VERSION}") + endif() + else() + set(HDF5_VERSION ${HDF5_VERSION} CACHE STRING "") + endif() ### # If HDF5_VERSION is still empty, we have a problem. # Error out. ### - IF("${HDF5_VERSION}" STREQUAL "") - MESSAGE(FATAL_ERR "Unable to determine HDF5 version. NetCDF requires at least version ${HDF5_VERSION_REQUIRED}. Please ensure that libhdf5 is installed and accessible.") - ENDIF() + if("${HDF5_VERSION}" STREQUAL "") + message(FATAL_ERR "Unable to determine HDF5 version. NetCDF requires at least version ${HDF5_VERSION_REQUIRED}. Please ensure that libhdf5 is installed and accessible.") + endif() ### # Now that we know HDF5_VERSION isn't empty, we can check for minimum required version, # and toggle various options. ### - IF(${HDF5_VERSION} VERSION_LESS ${HDF5_VERSION_REQUIRED}) - MESSAGE(FATAL_ERROR "netCDF requires at least HDF5 ${HDF5_VERSION_REQUIRED}. Found ${HDF5_VERSION}.") - ENDIF() + if(${HDF5_VERSION} VERSION_LESS ${HDF5_VERSION_REQUIRED}) + message(FATAL_ERROR "netCDF requires at least HDF5 ${HDF5_VERSION_REQUIRED}. Found ${HDF5_VERSION}.") + endif() ## # Include the HDF5 include directory. ## - IF(HDF5_INCLUDE_DIRS AND NOT HDF5_INCLUDE_DIR) - SET(HDF5_INCLUDE_DIR ${HDF5_INCLUDE_DIRS} ) - ENDIF() - MESSAGE(STATUS "Using HDF5 include dir: ${HDF5_INCLUDE_DIR}") - INCLUDE_DIRECTORIES(${HDF5_INCLUDE_DIR}) + if(HDF5_INCLUDE_DIRS AND NOT HDF5_INCLUDE_DIR) + set(HDF5_INCLUDE_DIR ${HDF5_INCLUDE_DIRS} ) + endif() + message(STATUS "Using HDF5 include dir: ${HDF5_INCLUDE_DIR}") + include_directories(${HDF5_INCLUDE_DIR}) ### # This is the block where we figure out what the appropriate # variables are, and we ensure that we end up with # HDF5_C_LIBRARY, HDF5_HL_LIBRARY and HDF5_LIBRARIES. ### - IF(MSVC) + if(MSVC) #### # Environmental variables in Windows when using MSVC # are a hot mess between versions. @@ -247,60 +247,60 @@ IF(USE_HDF5) ## # HDF5 1.8.15 defined HDF5_LIBRARIES. ## - IF(${HDF5_VERSION} VERSION_LESS "1.8.16") - SET(HDF5_C_LIBRARY hdf5 ) - SET(HDF5_C_LIBRARY_hdf5 hdf5 ) - ENDIF(${HDF5_VERSION} VERSION_LESS "1.8.16") + if(${HDF5_VERSION} VERSION_LESS "1.8.16") + set(HDF5_C_LIBRARY hdf5 ) + set(HDF5_C_LIBRARY_hdf5 hdf5 ) + endif(${HDF5_VERSION} VERSION_LESS "1.8.16") - IF(${HDF5_VERSION} VERSION_GREATER "1.8.15") - IF(NOT HDF5_LIBRARIES AND HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY AND HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY) - SET(HDF5_C_LIBRARY ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ) - SET(HDF5_C_LIBRARY_hdf5 ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ) - SET(HDF5_HL_LIBRARY ${HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ) + if(${HDF5_VERSION} VERSION_GREATER "1.8.15") + if(NOT HDF5_LIBRARIES AND HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY AND HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY) + set(HDF5_C_LIBRARY ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ) + set(HDF5_C_LIBRARY_hdf5 ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ) + set(HDF5_HL_LIBRARY ${HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ) - SET(HDF5_LIBRARIES ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ${HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ) - ENDIF() - ENDIF(${HDF5_VERSION} VERSION_GREATER "1.8.15") + set(HDF5_LIBRARIES ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ${HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ) + endif() + endif(${HDF5_VERSION} VERSION_GREATER "1.8.15") - ELSE(MSVC) + else(MSVC) # Depending on the install, either HDF5_hdf_library or # HDF5_C_LIBRARIES may be defined. We must check for either. - IF(HDF5_C_LIBRARIES AND NOT HDF5_hdf5_LIBRARY) - SET(HDF5_hdf5_LIBRARY ${HDF5_C_LIBRARIES} ) - ENDIF() + if(HDF5_C_LIBRARIES AND NOT HDF5_hdf5_LIBRARY) + set(HDF5_hdf5_LIBRARY ${HDF5_C_LIBRARIES} ) + endif() - # Some versions of FIND_PACKAGE set HDF5_C_LIBRARIES, but not HDF5_C_LIBRARY + # Some versions of find_package set HDF5_C_LIBRARIES, but not HDF5_C_LIBRARY # We use HDF5_C_LIBRARY below, so need to make sure it is set. - IF(HDF5_C_LIBRARIES AND NOT HDF5_C_LIBRARY) - SET(HDF5_C_LIBRARY ${HDF5_C_LIBRARIES} ) - ENDIF() + if(HDF5_C_LIBRARIES AND NOT HDF5_C_LIBRARY) + set(HDF5_C_LIBRARY ${HDF5_C_LIBRARIES} ) + endif() # Same issue as above... - IF(HDF5_HL_LIBRARIES AND NOT HDF5_HL_LIBRARY) - SET(HDF5_HL_LIBRARY ${HDF5_HL_LIBRARIES} ) - ENDIF() + if(HDF5_HL_LIBRARIES AND NOT HDF5_HL_LIBRARY) + set(HDF5_HL_LIBRARY ${HDF5_HL_LIBRARIES} ) + endif() - ENDIF(MSVC) - IF(NOT HDF5_C_LIBRARY) - SET(HDF5_C_LIBRARY hdf5 ) - ENDIF() + endif(MSVC) + if(NOT HDF5_C_LIBRARY) + set(HDF5_C_LIBRARY hdf5 ) + endif() - ENDIF(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR) + endif(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR) - FIND_PACKAGE(Threads) + find_package(Threads) # There is a missing case in the above code so default it - IF(NOT HDF5_C_LIBRARY_hdf5 OR "${HDF5_C_LIBRARY_hdf5}" STREQUAL "" ) - SET(HDF5_C_LIBRARY_hdf5 "${HDF5_C_LIBRARY}" ) - ENDIF() + if(NOT HDF5_C_LIBRARY_hdf5 OR "${HDF5_C_LIBRARY_hdf5}" STREQUAL "" ) + set(HDF5_C_LIBRARY_hdf5 "${HDF5_C_LIBRARY}" ) + endif() - FIND_PATH(HAVE_HDF5_H hdf5.h PATHS ${HDF5_INCLUDE_DIR} NO_DEFAULT_PATH) - IF(NOT HAVE_HDF5_H) - MESSAGE(FATAL_ERROR "Compiling a test with hdf5 failed. Either hdf5.h cannot be found, or the log messages should be checked for another reason.") - ELSE(NOT HAVE_HDF5_H) - INCLUDE_DIRECTORIES(${HAVE_HDF5_H}) - ENDIF(NOT HAVE_HDF5_H) + find_path(HAVE_HDF5_H hdf5.h PATHS ${HDF5_INCLUDE_DIR} NO_DEFAULT_PATH) + if(NOT HAVE_HDF5_H) + message(FATAL_ERROR "Compiling a test with hdf5 failed. Either hdf5.h cannot be found, or the log messages should be checked for another reason.") + else(NOT HAVE_HDF5_H) + include_directories(${HAVE_HDF5_H}) + endif(NOT HAVE_HDF5_H) set (CMAKE_REQUIRED_INCLUDES ${HDF5_INCLUDE_DIR}) @@ -314,21 +314,21 @@ IF(USE_HDF5) #endif int main() { int x = 1;}" HAVE_HDF5_ZLIB) - IF(NOT HAVE_HDF5_ZLIB) - MESSAGE(FATAL_ERROR "HDF5 was built without zlib. Rebuild HDF5 with zlib.") - ELSE() + if(NOT HAVE_HDF5_ZLIB) + message(FATAL_ERROR "HDF5 was built without zlib. Rebuild HDF5 with zlib.") + else() # If user has specified the `ZLIB_LIBRARY`, use it; otherwise try to find... - IF(NOT ZLIB_LIBRARY) + if(NOT ZLIB_LIBRARY) find_package(ZLIB) - IF(ZLIB_FOUND) - SET(ZLIB_LIBRARY ${ZLIB_LIBRARIES} ) - ELSE() - MESSAGE(FATAL_ERROR "HDF5 Requires ZLIB, but cannot find libz.") - ENDIF() - ENDIF() - SET(CMAKE_REQUIRED_LIBRARIES ${ZLIB_LIBRARY} ${CMAKE_REQUIRED_LIBRARIES} ) - MESSAGE(STATUS "HDF5 has zlib.") - ENDIF() + if(ZLIB_FOUND) + set(ZLIB_LIBRARY ${ZLIB_LIBRARIES} ) + else() + message(FATAL_ERROR "HDF5 Requires ZLIB, but cannot find libz.") + endif() + endif() + set(CMAKE_REQUIRED_LIBRARIES ${ZLIB_LIBRARY} ${CMAKE_REQUIRED_LIBRARIES} ) + message(STATUS "HDF5 has zlib.") + endif() #Check to see if H5Z_SZIP exists in HDF5_Libraries. If so, we must use szip library. CHECK_C_SOURCE_COMPILES("#include @@ -337,21 +337,21 @@ IF(USE_HDF5) #endif int main() { int x = 1;}" USE_HDF5_SZIP) - IF(USE_HDF5_SZIP) - SET(HAVE_H5Z_SZIP yes ) - ENDIF() + if(USE_HDF5_SZIP) + set(HAVE_H5Z_SZIP yes ) + endif() #### # Check to see if HDF5 library is 1.10.6 or greater. # Used to control path name conversion #### - IF(${HDF5_VERSION} VERSION_GREATER "1.10.5") - SET(HDF5_UTF8_PATHS ON ) - ELSE() - SET(HDF5_UTF8_PATHS OFF ) - ENDIF() + if(${HDF5_VERSION} VERSION_GREATER "1.10.5") + set(HDF5_UTF8_PATHS ON ) + else() + set(HDF5_UTF8_PATHS OFF ) + endif() - MESSAGE("-- HDF5_UTF8_PATHS (HDF5 version 1.10.6+): ${HDF5_UTF8_PATHS}") + message("-- HDF5_UTF8_PATHS (HDF5 version 1.10.6+): ${HDF5_UTF8_PATHS}") # Find out if HDF5 was built with parallel support. # Do that by checking for the targets H5Pget_fapl_mpiposx and @@ -362,21 +362,21 @@ IF(USE_HDF5) # CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pget_fapl_mpiposix "" HDF5_IS_PARALLEL_MPIPOSIX) CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pget_fapl_mpio "" HDF5_IS_PARALLEL_MPIO) - IF(HDF5_IS_PARALLEL_MPIO) - SET(HDF5_PARALLEL ON) - ELSE() - SET(HDF5_PARALLEL OFF) - ENDIF() - SET(HDF5_PARALLEL ${HDF5_PARALLEL} ) + if(HDF5_IS_PARALLEL_MPIO) + set(HDF5_PARALLEL ON) + else() + set(HDF5_PARALLEL OFF) + endif() + set(HDF5_PARALLEL ${HDF5_PARALLEL} ) #Check to see if HDF5 library has collective metadata APIs, (HDF5 >= 1.10.0) CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pset_all_coll_metadata_ops "" HDF5_HAS_COLL_METADATA_OPS) - IF(HDF5_PARALLEL) - SET(HDF5_CC h5pcc ) - ELSE() - SET(HDF5_CC h5cc ) - ENDIF() + if(HDF5_PARALLEL) + set(HDF5_CC h5pcc ) + else() + set(HDF5_CC h5cc ) + endif() # Check to see if H5Dread_chunk is available CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Dread_chunk "" HAS_READCHUNKS) @@ -385,57 +385,57 @@ IF(USE_HDF5) CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pset_fapl_ros3 "" HAS_HDF5_ROS3) # Check to see if this is hdf5-1.10.3 or later. - IF(HAS_READCHUNKS) - SET(HDF5_SUPPORTS_PAR_FILTERS ON ) - SET(ENABLE_NCDUMPCHUNKS ON ) - ENDIF() + if(HAS_READCHUNKS) + set(HDF5_SUPPORTS_PAR_FILTERS ON ) + set(ENABLE_NCDUMPCHUNKS ON ) + endif() # Record if ROS3 Driver is available - IF(HAS_HDF5_ROS3) - SET(ENABLE_HDF5_ROS3 ON ) - ENDIF() + if(HAS_HDF5_ROS3) + set(ENABLE_HDF5_ROS3 ON ) + endif() IF (HDF5_SUPPORTS_PAR_FILTERS) - SET(HDF5_HAS_PAR_FILTERS TRUE CACHE BOOL "" ) - SET(HAS_PAR_FILTERS yes CACHE STRING "" ) - ELSE() - SET(HDF5_HAS_PAR_FILTERS FALSE CACHE BOOL "" ) - SET(HAS_PAR_FILTERS no CACHE STRING "" ) - ENDIF() - - FIND_PATH(HAVE_HDF5_H hdf5.h PATHS ${HDF5_INCLUDE_DIR} NO_DEFAULT_PATH) - IF(NOT HAVE_HDF5_H) - MESSAGE(FATAL_ERROR "Compiling a test with hdf5 failed. Either hdf5.h cannot be found, or the log messages should be checked for another reason.") - ELSE(NOT HAVE_HDF5_H) - INCLUDE_DIRECTORIES(${HAVE_HDF5_H}) - ENDIF(NOT HAVE_HDF5_H) + set(HDF5_HAS_PAR_FILTERS TRUE CACHE BOOL "" ) + set(HAS_PAR_FILTERS yes CACHE STRING "" ) + else() + set(HDF5_HAS_PAR_FILTERS FALSE CACHE BOOL "" ) + set(HAS_PAR_FILTERS no CACHE STRING "" ) + endif() + + find_path(HAVE_HDF5_H hdf5.h PATHS ${HDF5_INCLUDE_DIR} NO_DEFAULT_PATH) + if(NOT HAVE_HDF5_H) + message(FATAL_ERROR "Compiling a test with hdf5 failed. Either hdf5.h cannot be found, or the log messages should be checked for another reason.") + else(NOT HAVE_HDF5_H) + include_directories(${HAVE_HDF5_H}) + endif(NOT HAVE_HDF5_H) #option to include HDF5 High Level header file (hdf5_hl.h) in case we are not doing a make install - INCLUDE_DIRECTORIES(${HDF5_HL_INCLUDE_DIR}) + include_directories(${HDF5_HL_INCLUDE_DIR}) -ENDIF(USE_HDF5) +endif(USE_HDF5) ################################ # Curl ################################ # See if we have libcurl -FIND_PACKAGE(CURL) +find_package(CURL) ADD_DEFINITIONS(-DCURL_STATICLIB=1) -INCLUDE_DIRECTORIES(${CURL_INCLUDE_DIRS}) +include_directories(${CURL_INCLUDE_DIRS}) # Define a test flag for have curl library -IF(CURL_LIBRARIES OR CURL_LIBRARY) - SET(FOUND_CURL TRUE) -ELSE() - SET(FOUND_CURL FALSE) -ENDIF() -SET(FOUND_CURL ${FOUND_CURL} TRUE ) +if(CURL_LIBRARIES OR CURL_LIBRARY) + set(FOUND_CURL TRUE) +else() + set(FOUND_CURL FALSE) +endif() +set(FOUND_CURL ${FOUND_CURL} TRUE ) # Start disabling if curl not found -IF(NOT FOUND_CURL) - MESSAGE(WARNING "ENABLE_REMOTE_FUNCTIONALITY requires libcurl; disabling") - SET(ENABLE_REMOTE_FUNCTIONALITY OFF CACHE BOOL "ENABLE_REMOTE_FUNCTIONALITY requires libcurl; disabling" FORCE ) -ENDIF() +if(NOT FOUND_CURL) + message(WARNING "ENABLE_REMOTE_FUNCTIONALITY requires libcurl; disabling") + set(ENABLE_REMOTE_FUNCTIONALITY OFF CACHE BOOL "ENABLE_REMOTE_FUNCTIONALITY requires libcurl; disabling" FORCE ) +endif() set (CMAKE_REQUIRED_INCLUDES ${CURL_INCLUDE_DIRS}) # Check to see if we have libcurl 7.66 or later @@ -457,7 +457,7 @@ IF (HAVE_LIBCURL_766) set(HAVE_CURLINFO_HTTP_CONNECTCODE TRUE) set(HAVE_CURLOPT_BUFFERSIZE TRUE) set(HAVE_CURLOPT_KEEPALIVE TRUE) -ELSE() +else() # Check to see if CURLOPT_USERNAME is defined. # It is present starting version 7.19.1. CHECK_C_SOURCE_COMPILES(" @@ -499,82 +499,82 @@ ELSE() CHECK_C_SOURCE_COMPILES(" #include int main() {int x = CURLOPT_TCP_KEEPALIVE;}" HAVE_CURLOPT_KEEPALIVE) -ENDIF() +endif() ################################ # Math ################################ # Check for the math library so it can be explicitly linked. -IF(NOT WIN32) - FIND_LIBRARY(HAVE_LIBM NAMES math m libm) - IF(NOT HAVE_LIBM) +if(NOT WIN32) + find_library(HAVE_LIBM NAMES math m libm) + if(NOT HAVE_LIBM) CHECK_FUNCTION_EXISTS(exp HAVE_LIBM_FUNC) - IF(NOT HAVE_LIBM_FUNC) - MESSAGE(FATAL_ERROR "Unable to find the math library.") - ELSE(NOT HAVE_LIBM_FUNC) - SET(HAVE_LIBM "") - ENDIF() - ELSE(NOT HAVE_LIBM) - MESSAGE(STATUS "Found Math library: ${HAVE_LIBM}") - ENDIF() -ENDIF() + if(NOT HAVE_LIBM_FUNC) + message(FATAL_ERROR "Unable to find the math library.") + else(NOT HAVE_LIBM_FUNC) + set(HAVE_LIBM "") + endif() + else(NOT HAVE_LIBM) + message(STATUS "Found Math library: ${HAVE_LIBM}") + endif() +endif() ################################ # zlib ################################ # See if we have zlib -FIND_PACKAGE(ZLIB) +find_package(ZLIB) # Define a test flag for have zlib library -IF(ZLIB_FOUND) - INCLUDE_DIRECTORIES(${ZLIB_INCLUDE_DIRS}) - SET(ENABLE_ZLIB TRUE) -ELSE() - SET(ENABLE_ZLIB FALSE) -ENDIF() +if(ZLIB_FOUND) + include_directories(${ZLIB_INCLUDE_DIRS}) + set(ENABLE_ZLIB TRUE) +else() + set(ENABLE_ZLIB FALSE) +endif() ################################ # Zips ################################ IF (ENABLE_FILTER_SZIP) - FIND_PACKAGE(Szip) -ELSEIF(ENABLE_NCZARR) - FIND_PACKAGE(Szip) -ENDIF() + find_package(Szip) +elseif(ENABLE_NCZARR) + find_package(Szip) +endif() IF (ENABLE_FILTER_BZ2) - FIND_PACKAGE(Bz2) -ENDIF() + find_package(Bz2) +endif() IF (ENABLE_FILTER_BLOSC) - FIND_PACKAGE(Blosc) -ENDIF() + find_package(Blosc) +endif() IF (ENABLE_FILTER_ZSTD) - FIND_PACKAGE(Zstd) -ENDIF() + find_package(Zstd) +endif() # Accumulate standard filters set(STD_FILTERS "deflate") # Always have deflate*/ set_std_filter(Szip) -SET(HAVE_SZ ${Szip_FOUND}) -SET(USE_SZIP ${HAVE_SZ}) +set(HAVE_SZ ${Szip_FOUND}) +set(USE_SZIP ${HAVE_SZ}) set_std_filter(Blosc) -IF(Zstd_FOUND) +if(Zstd_FOUND) set_std_filter(Zstd) - SET(HAVE_ZSTD ON) -ENDIF() -IF(Bz2_FOUND) + set(HAVE_ZSTD ON) +endif() +if(Bz2_FOUND) set_std_filter(Bz2) -ELSE() +else() # The reason we use a local version is to support a more comples test case - MESSAGE("libbz2 not found using built-in version") - SET(HAVE_LOCAL_BZ2 ON) - SET(HAVE_BZ2 ON CACHE BOOL "") + message("libbz2 not found using built-in version") + set(HAVE_LOCAL_BZ2 ON) + set(HAVE_BZ2 ON CACHE BOOL "") set(STD_FILTERS "${STD_FILTERS} bz2") -ENDIF() +endif() IF (ENABLE_NCZARR_ZIP) - FIND_PACKAGE(Zip REQUIRED) - INCLUDE_DIRECTORIES(${Zip_INCLUDE_DIRS}) -ENDIF () + find_package(Zip REQUIRED) + include_directories(${Zip_INCLUDE_DIRS}) +endif () ################################ # S3 @@ -582,71 +582,71 @@ ENDIF () # Note we check for the library after checking for enable_s3 # because for some reason this screws up if we unconditionally test for sdk # and it is not available. Fix someday -IF(ENABLE_S3) - IF(NOT ENABLE_S3_INTERNAL) +if(ENABLE_S3) + if(NOT ENABLE_S3_INTERNAL) # See if aws-s3-sdk is available find_package(AWSSDK REQUIRED COMPONENTS s3;transfer) - IF(AWSSDK_FOUND) - SET(ENABLE_S3_AWS ON CACHE BOOL "S3 AWS" FORCE) - INCLUDE_DIRECTORIES(${AWSSDK_INCLUDE_DIR}) - ELSE(AWSSDK_FOUND) - SET(ENABLE_S3_AWS OFF CACHE BOOL "S3 AWS" FORCE) - ENDIF(AWSSDK_FOUND) - ELSE(NOT ENABLE_S3_INTERNAL) + if(AWSSDK_FOUND) + set(ENABLE_S3_AWS ON CACHE BOOL "S3 AWS" FORCE) + include_directories(${AWSSDK_INCLUDE_DIR}) + else(AWSSDK_FOUND) + set(ENABLE_S3_AWS OFF CACHE BOOL "S3 AWS" FORCE) + endif(AWSSDK_FOUND) + else(NOT ENABLE_S3_INTERNAL) # Find crypto libraries required with testing with the internal s3 api. - #FIND_LIBRARY(SSL_LIB NAMES ssl openssl) + #find_library(SSL_LIB NAMES ssl openssl) find_package(OpenSSL REQUIRED) - IF(NOT OpenSSL_FOUND) - MESSAGE(FATAL_ERROR "Can't find an ssl library, required by S3_INTERNAL") - ENDIF(NOT OpenSSL_FOUND) + if(NOT OpenSSL_FOUND) + message(FATAL_ERROR "Can't find an ssl library, required by S3_INTERNAL") + endif(NOT OpenSSL_FOUND) #find_package(Crypto REQUIRED) - #IF(NOT CRYPTO_LIB) - # MESSAGE(FATAL_ERROR "Can't find a crypto library, required by S3_INTERNAL") - #ENDIF(NOT CRYPTO_LIB) + #if(NOT CRYPTO_LIB) + # message(FATAL_ERROR "Can't find a crypto library, required by S3_INTERNAL") + #endif(NOT CRYPTO_LIB) - ENDIF(NOT ENABLE_S3_INTERNAL) -ELSE() - SET(ENABLE_S3_AWS OFF CACHE BOOL "S3 AWS" FORCE) -ENDIF() + endif(NOT ENABLE_S3_INTERNAL) +else() + set(ENABLE_S3_AWS OFF CACHE BOOL "S3 AWS" FORCE) +endif() ################################ # LibXML ################################ # see if we have libxml2 -IF(ENABLE_LIBXML2) +if(ENABLE_LIBXML2) find_package(LibXml2) - IF(LibXml2_FOUND) - SET(HAVE_LIBXML2 TRUE) - INCLUDE_DIRECTORIES(${LIBXML2_INCLUDE_DIRS}) - SET(XMLPARSER "libxml2") - ELSE() - SET(HAVE_LIBXML2 FALSE) - ENDIF() -ENDIF(ENABLE_LIBXML2) + if(LibXml2_FOUND) + set(HAVE_LIBXML2 TRUE) + include_directories(${LIBXML2_INCLUDE_DIRS}) + set(XMLPARSER "libxml2") + else() + set(HAVE_LIBXML2 FALSE) + endif() +endif(ENABLE_LIBXML2) ################################ # MPI ################################ -IF(ENABLE_PARALLEL4 OR HDF5_PARALLEL) - FIND_PACKAGE(MPI REQUIRED) -ENDIF() +if(ENABLE_PARALLEL4 OR HDF5_PARALLEL) + find_package(MPI REQUIRED) +endif() ################################ # parallel IO ################################ -IF(ENABLE_PNETCDF) - FIND_LIBRARY(PNETCDF NAMES pnetcdf) - FIND_PATH(PNETCDF_INCLUDE_DIR pnetcdf.h) - IF(NOT PNETCDF) - MESSAGE(STATUS "Cannot find PnetCDF library. Disabling PnetCDF support.") - SET(USE_PNETCDF OFF CACHE BOOL "") - ENDIF() -ENDIF() +if(ENABLE_PNETCDF) + find_library(PNETCDF NAMES pnetcdf) + find_path(PNETCDF_INCLUDE_DIR pnetcdf.h) + if(NOT PNETCDF) + message(STATUS "Cannot find PnetCDF library. Disabling PnetCDF support.") + set(USE_PNETCDF OFF CACHE BOOL "") + endif() +endif() ################################ # Doxygen ################################ -IF(ENABLE_DOXYGEN) - FIND_PACKAGE(Doxygen REQUIRED) -ENDIF() \ No newline at end of file +if(ENABLE_DOXYGEN) + find_package(Doxygen REQUIRED) +endif() \ No newline at end of file diff --git a/cmake/netcdf_functions_macros.cmake b/cmake/netcdf_functions_macros.cmake index 2f7f65bf56..c60d380e64 100644 --- a/cmake/netcdf_functions_macros.cmake +++ b/cmake/netcdf_functions_macros.cmake @@ -3,37 +3,37 @@ ################################ macro(set_std_filter filter) -# Upper case the filter name -string(TOUPPER "${filter}" upfilter) -string(TOLOWER "${filter}" downfilter) -if(ENABLE_FILTER_${upfilter}) -# Define a test flag for filter - IF(${filter}_FOUND) - INCLUDE_DIRECTORIES(${${filter}_INCLUDE_DIRS}) - SET(ENABLE_${upfilter} TRUE) - SET(HAVE_${upfilter} ON) - SET(STD_FILTERS "${STD_FILTERS} ${downfilter}") - MESSAGE(">>> Standard Filter: ${downfilter}") - ELSE() - SET(ENABLE_${upfilter} FALSE) - SET(HAVE_${upfilter} OFF) - ENDIF() -ELSE() - SET(HAVE_${upfilter} OFF) -ENDIF() + # Upper case the filter name + string(TOUPPER "${filter}" upfilter) + string(TOLOWER "${filter}" downfilter) + if(ENABLE_FILTER_${upfilter}) + # Define a test flag for filter + if(${filter}_FOUND) + include_directories(${${filter}_INCLUDE_DIRS}) + set(ENABLE_${upfilter} TRUE) + set(HAVE_${upfilter} ON) + set(STD_FILTERS "${STD_FILTERS} ${downfilter}") + message(">>> Standard Filter: ${downfilter}") + else() + set(ENABLE_${upfilter} FALSE) + set(HAVE_${upfilter} OFF) + endif() + else() + set(HAVE_${upfilter} OFF) + endif() endmacro(set_std_filter) macro(getuname name flag) -execute_process(COMMAND "${UNAME}" "${flag}" OUTPUT_VARIABLE "${name}" OUTPUT_STRIP_TRAILING_WHITESPACE) + execute_process(COMMAND "${UNAME}" "${flag}" OUTPUT_VARIABLE "${name}" OUTPUT_STRIP_TRAILING_WHITESPACE) endmacro(getuname) # A macro to check if a C linker supports a particular flag. -MACRO(CHECK_C_LINKER_FLAG M_FLAG M_RESULT) - SET(T_REQ_FLAG "${CMAKE_REQUIRED_FLAGS}") - SET(CMAKE_REQUIRED_FLAGS "${M_FLAG}") +macro(CHECK_C_LINKER_FLAG M_FLAG M_RESULT) + set(T_REQ_FLAG "${CMAKE_REQUIRED_FLAGS}") + set(CMAKE_REQUIRED_FLAGS "${M_FLAG}") CHECK_C_SOURCE_COMPILES("int main() {return 0;}" ${M_RESULT}) - SET(CMAKE_REQUIRED_FLAGS "${T_REQ_FLAG}") -ENDMACRO() + set(CMAKE_REQUIRED_FLAGS "${T_REQ_FLAG}") +endmacro() # Macro for replacing '/MD' with '/MT'. # Used only on Windows, /MD tells VS to use the shared @@ -42,8 +42,8 @@ ENDMACRO() # Taken From: # http://www.cmake.org/Wiki/CMake_FAQ#How_can_I_build_my_MSVC_application_with_a_static_runtime.3F # -MACRO(specify_static_crt_flag) - SET(vars +macro(specify_static_crt_flag) + set(vars CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE @@ -53,234 +53,227 @@ MACRO(specify_static_crt_flag) CMAKE_CXX_FLAGS_RELEASE CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO) - FOREACH(flag_var ${vars}) - IF(${flag_var} MATCHES "/MD") - STRING(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") - ENDIF() - ENDFOREACH() - - FOREACH(flag_var ${vars}) - MESSAGE(STATUS " '${flag_var}': ${${flag_var}}") - ENDFOREACH() - MESSAGE(STATUS "") -ENDMACRO() + foreach(flag_var ${vars}) + if(${flag_var} MATCHES "/MD") + string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") + endif() + endforeach() - -################################ -# Define Utility Macros -################################ + foreach(flag_var ${vars}) + message(STATUS " '${flag_var}': ${${flag_var}}") + endforeach() + message(STATUS "") +endmacro() # Macro to append files to the EXTRA_DIST files. # Note: can only be used in subdirectories because of the use of PARENT_SCOPE -SET(EXTRA_DIST "") -MACRO(ADD_EXTRA_DIST files) - FOREACH(F ${files}) - SET(EXTRA_DIST ${EXTRA_DIST} ${CMAKE_CURRENT_SOURCE_DIR}/${F}) - SET(EXTRA_DIST ${EXTRA_DIST} PARENT_SCOPE) - ENDFOREACH() -ENDMACRO() +set(EXTRA_DIST "") +macro(ADD_EXTRA_DIST files) + foreach(F ${files}) + set(EXTRA_DIST ${EXTRA_DIST} ${CMAKE_CURRENT_SOURCE_DIR}/${F}) + set(EXTRA_DIST ${EXTRA_DIST} PARENT_SCOPE) + endforeach() +endmacro() - -MACRO(GEN_m4 filename) +macro(GEN_m4 filename) set(fallbackdest "${CMAKE_CURRENT_SOURCE_DIR}/${filename}.c") set(dest "${CMAKE_CURRENT_BINARY_DIR}/${filename}.c") # If m4 isn't present, and the generated file doesn't exist, # it cannot be generated and an error should be thrown. - IF(NOT HAVE_M4) - IF(NOT EXISTS ${fallbackdest}) - MESSAGE(FATAL_ERROR "m4 is required to generate ${filename}.c. Please install m4 so that it is on the PATH and try again.") - ELSE() - SET(dest ${fallbackdest}) - ENDIF() - ELSE() - ADD_CUSTOM_COMMAND( + if(not HAVE_M4) + if(NOT EXISTS ${fallbackdest}) + message(FATAL_ERROR "m4 is required to generate ${filename}.c. Please install m4 so that it is on the PATH and try again.") + else() + set(dest ${fallbackdest}) + endif() + else() + add_custom_command( OUTPUT ${dest} COMMAND ${NC_M4} ARGS ${M4FLAGS} ${CMAKE_CURRENT_SOURCE_DIR}/${filename}.m4 > ${dest} VERBATIM ) - ENDIF() -ENDMACRO(GEN_m4) + endif() +endmacro(GEN_m4) # Binary tests, but ones which depend on value of 'TEMP_LARGE' being defined. -MACRO(add_bin_env_temp_large_test prefix F) - ADD_EXECUTABLE(${prefix}_${F} ${F}.c) - TARGET_LINK_LIBRARIES(${prefix}_${F} netcdf) +macro(add_bin_env_temp_large_test prefix F) + add_executable(${prefix}_${F} ${F}.c) + target_link_libraries(${prefix}_${F} netcdf) IF(MSVC) - SET_TARGET_PROPERTIES(${prefix}_${F} + set_target_properties(${prefix}_${F} PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT" ) - ENDIF() + endif() - ADD_TEST(${prefix}_${F} bash "-c" "TEMP_LARGE=${CMAKE_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/${prefix}_${F}") - IF(MSVC) - SET_PROPERTY(TARGET ${prefix}_${F} PROPERTY FOLDER "tests") - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY + add_test(${prefix}_${F} bash "-c" "TEMP_LARGE=${CMAKE_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/${prefix}_${F}") + if(MSVC) + set_property(TARGET ${prefix}_${F} PROPERTY FOLDER "tests") + set_target_properties(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG + set_target_properties(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE + set_target_properties(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELWITHDEBINFO + set_target_properties(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELWITHDEBINFO ${CMAKE_CURRENT_BINARY_DIR}) - ENDIF() -ENDMACRO() - + endif() +endmacro() # Tests which are binary, but depend on a particular environmental variable. -MACRO(add_bin_env_test prefix F) - ADD_EXECUTABLE(${prefix}_${F} ${F}.c) - TARGET_LINK_LIBRARIES(${prefix}_${F} netcdf) - IF(MSVC) - SET_TARGET_PROPERTIES(${prefix}_${F} +macro(add_bin_env_test prefix F) + add_executable(${prefix}_${F} ${F}.c) + target_link_libraries(${prefix}_${F} netcdf) + if(MSVC) + set_target_properties(${prefix}_${F} PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT" ) - ENDIF() + endif() - ADD_TEST(${prefix}_${F} bash "-c" "TOPSRCDIR=${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/${prefix}_${F}") - IF(MSVC) - SET_PROPERTY(TARGET ${prefix}_${F} PROPERTY FOLDER "tests") - ENDIF() -ENDMACRO() + add_test(${prefix}_${F} bash "-c" "TOPSRCDIR=${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/${prefix}_${F}") + if(MSVC) + set_property(TARGET ${prefix}_${F} PROPERTY FOLDER "tests") + endif() +endmacro() # Build a binary used by a script, but don't make a test out of it. -MACRO(build_bin_test F) +macro(build_bin_test F) if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/${F}.c") - ADD_EXECUTABLE(${F} "${CMAKE_CURRENT_SOURCE_DIR}/${F}.c" ${ARGN}) + add_executable(${F} "${CMAKE_CURRENT_SOURCE_DIR}/${F}.c" ${ARGN}) else() # File should have been copied to the binary directory - ADD_EXECUTABLE(${F} "${CMAKE_CURRENT_BINARY_DIR}/${F}.c" ${ARGN}) + add_executable(${F} "${CMAKE_CURRENT_BINARY_DIR}/${F}.c" ${ARGN}) endif() - TARGET_LINK_LIBRARIES(${F} netcdf ${ALL_TLL_LIBS}) - IF(MSVC) - SET_TARGET_PROPERTIES(${F} + target_link_libraries(${F} netcdf ${ALL_TLL_LIBS}) + if(MSVC) + set_target_properties(${F} PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT" ) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY + set_target_properties(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG + set_target_properties(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE + set_target_properties(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE ${CMAKE_CURRENT_BINARY_DIR}) - ENDIF() -ENDMACRO() + endif() +endmacro() # Binary tests which are used by a script looking for a specific name. -MACRO(add_bin_test_no_prefix F) +macro(add_bin_test_no_prefix F) build_bin_test(${F} ${ARGN}) - ADD_TEST(${F} ${EXECUTABLE_OUTPUT_PATH}/${F}) - IF(MSVC) - SET_PROPERTY(TEST ${F} PROPERTY FOLDER "tests/") - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY + add_test(${F} ${EXECUTABLE_OUTPUT_PATH}/${F}) + if(MSVC) + set_property(TEST ${F} PROPERTY FOLDER "tests/") + set_target_properties(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG + set_target_properties(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE + set_target_properties(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE ${CMAKE_CURRENT_BINARY_DIR}) - ENDIF() -ENDMACRO() + endif() +endmacro() # Binary tests which are used by a script looking for a specific name. -MACRO(build_bin_test_no_prefix F) +macro(build_bin_test_no_prefix F) build_bin_test(${F}) - IF(MSVC) + if(MSVC) #SET_PROPERTY(TEST ${F} PROPERTY FOLDER "tests/") - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY + set_target_properties(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG + set_target_properties(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE + set_target_properties(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE ${CMAKE_CURRENT_BINARY_DIR}) - ENDIF() -ENDMACRO() + endif() +endmacro() # Build a test and add it to the test list. -MACRO(add_bin_test prefix F) - ADD_EXECUTABLE(${prefix}_${F} ${F}.c ${ARGN}) - TARGET_LINK_LIBRARIES(${prefix}_${F} +macro(add_bin_test prefix F) + add_executable(${prefix}_${F} ${F}.c ${ARGN}) + target_link_libraries(${prefix}_${F} ${ALL_TLL_LIBS} netcdf ) - IF(MSVC) - SET_TARGET_PROPERTIES(${prefix}_${F} + if(MSVC) + set_target_properties(${prefix}_${F} PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT" ) - ENDIF() - ADD_TEST(${prefix}_${F} + endif() + add_test(${prefix}_${F} ${EXECUTABLE_OUTPUT_PATH}/${prefix}_${F} ) - IF(MSVC) - SET_PROPERTY(TEST ${prefix}_${F} PROPERTY FOLDER "tests/") - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY + if(MSVC) + set_property(TEST ${prefix}_${F} PROPERTY FOLDER "tests/") + set_target_properties(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG + set_target_properties(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG ${CMAKE_CURRENT_BINARY_DIR}) - SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE + set_target_properties(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE ${CMAKE_CURRENT_BINARY_DIR}) - ENDIF() -ENDMACRO() + endif() +endmacro() # A cmake script to print out information at the end of the configuration step. -MACRO(print_conf_summary) - MESSAGE("") - MESSAGE("") - MESSAGE("Configuration Summary:") - MESSAGE("") - MESSAGE(STATUS "Building Shared Libraries: ${BUILD_SHARED_LIBS}") - MESSAGE(STATUS "Building netCDF-4: ${ENABLE_NETCDF_4}") - MESSAGE(STATUS "Building DAP2 Support: ${ENABLE_DAP2}") - MESSAGE(STATUS "Building DAP4 Support: ${ENABLE_DAP4}") - MESSAGE(STATUS "Building Byte-range Support: ${ENABLE_BYTERANGE}") - MESSAGE(STATUS "Building Utilities: ${BUILD_UTILITIES}") - IF(CMAKE_PREFIX_PATH) - MESSAGE(STATUS "CMake Prefix Path: ${CMAKE_PREFIX_PATH}") - ENDIF() - MESSAGE("") - - IF(${STATUS_PNETCDF} OR ${STATUS_PARALLEL}) - MESSAGE("Building Parallel NetCDF") - MESSAGE(STATUS "Using PnetCDF: ${STATUS_PNETCDF}") - MESSAGE(STATUS "Using Parallel IO: ${STATUS_PARALLEL}") - MESSAGE("") - ENDIF() - - MESSAGE("Tests Enabled: ${ENABLE_TESTS}") - IF(ENABLE_TESTS) - MESSAGE(STATUS "DAP Remote Tests: ${ENABLE_DAP_REMOTE_TESTS}") - MESSAGE(STATUS "Extra Tests: ${ENABLE_EXTRA_TESTS}") - MESSAGE(STATUS "Coverage Tests: ${ENABLE_COVERAGE_TESTS}") - MESSAGE(STATUS "Parallel Tests: ${ENABLE_PARALLEL_TESTS}") - MESSAGE(STATUS "Large File Tests: ${ENABLE_LARGE_FILE_TESTS}") - MESSAGE(STATUS "Extreme Numbers: ${ENABLE_EXTREME_NUMBERS}") - MESSAGE(STATUS "Unit Tests: ${ENABLE_UNIT_TESTS}") - ENDIF() - - MESSAGE("") - MESSAGE("Compiler:") - MESSAGE("") - MESSAGE(STATUS "Build Type: ${CMAKE_BUILD_TYPE}") - MESSAGE(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") - MESSAGE(STATUS "CMAKE_C_FLAGS: ${CMAKE_C_FLAGS}") - IF("${CMAKE_BUILD_TYPE}" STREQUAL "DEBUG") - MESSAGE(STATUS "CMAKE_C_FLAGS_DEBUG: ${CMAKE_C_FLAGS_DEBUG}") - ENDIF() - IF("${CMAKE_BUILD_TYPE}" STREQUAL "RELEASE") - MESSAGE(STATUS "CMAKE_C_FLAGS_RELEASE: ${CMAKE_C_FLAGS_RELEASE}") - ENDIF() - - MESSAGE(STATUS "Linking against: ${ALL_TLL_LIBS}") - - MESSAGE("") -ENDMACRO() - -MACRO(add_sh_test prefix F) - IF(HAVE_BASH) - ADD_TEST(${prefix}_${F} bash "-c" "export srcdir=${CMAKE_CURRENT_SOURCE_DIR};export TOPSRCDIR=${CMAKE_SOURCE_DIR};${CMAKE_CURRENT_BINARY_DIR}/${F}.sh ${ARGN}") - ENDIF() -ENDMACRO() +macro(print_conf_summary) + message("") + message("") + message("Configuration Summary:") + message("") + message(STATUS "Building Shared Libraries: ${BUILD_SHARED_LIBS}") + message(STATUS "Building netCDF-4: ${ENABLE_NETCDF_4}") + message(STATUS "Building DAP2 Support: ${ENABLE_DAP2}") + message(STATUS "Building DAP4 Support: ${ENABLE_DAP4}") + message(STATUS "Building Byte-range Support: ${ENABLE_BYTERANGE}") + message(STATUS "Building Utilities: ${BUILD_UTILITIES}") + if(CMAKE_PREFIX_PATH) + message(STATUS "CMake Prefix Path: ${CMAKE_PREFIX_PATH}") + endif() + message("") + + if(${STATUS_PNETCDF} OR ${STATUS_PARALLEL}) + message("Building Parallel NetCDF") + message(STATUS "Using PnetCDF: ${STATUS_PNETCDF}") + message(STATUS "Using Parallel IO: ${STATUS_PARALLEL}") + message("") + endif() + + message("Tests Enabled: ${ENABLE_TESTS}") + if(ENABLE_TESTS) + message(STATUS "DAP Remote Tests: ${ENABLE_DAP_REMOTE_TESTS}") + message(STATUS "Extra Tests: ${ENABLE_EXTRA_TESTS}") + message(STATUS "Coverage Tests: ${ENABLE_COVERAGE_TESTS}") + message(STATUS "Parallel Tests: ${ENABLE_PARALLEL_TESTS}") + message(STATUS "Large File Tests: ${ENABLE_LARGE_FILE_TESTS}") + message(STATUS "Extreme Numbers: ${ENABLE_EXTREME_NUMBERS}") + message(STATUS "Unit Tests: ${ENABLE_UNIT_TESTS}") + endif() + + message("") + message("Compiler:") + message("") + message(STATUS "Build Type: ${CMAKE_BUILD_TYPE}") + message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") + message(STATUS "CMAKE_C_FLAGS: ${CMAKE_C_FLAGS}") + if("${CMAKE_BUILD_TYPE}" STREQUAL "DEBUG") + message(STATUS "CMAKE_C_FLAGS_DEBUG: ${CMAKE_C_FLAGS_DEBUG}") + endif() + if("${CMAKE_BUILD_TYPE}" STREQUAL "RELEASE") + message(STATUS "CMAKE_C_FLAGS_RELEASE: ${CMAKE_C_FLAGS_RELEASE}") + endif() + + message(STATUS "Linking against: ${ALL_TLL_LIBS}") + + message("") +endmacro() + +macro(add_sh_test prefix F) + if(HAVE_BASH) + add_test(${prefix}_${F} bash "-c" "export srcdir=${CMAKE_CURRENT_SOURCE_DIR};export TOPSRCDIR=${CMAKE_SOURCE_DIR};${CMAKE_CURRENT_BINARY_DIR}/${F}.sh ${ARGN}") + endif() +endmacro() ################################ @@ -289,14 +282,14 @@ ENDMACRO() function(booleanize VALUE RETVAR) # force case - STRING(TOLOWER "${VALUE}" LCVALUE) + string(TOLOWER "${VALUE}" LCVALUE) # Now do all the comparisons if(LCVALUE IN_LIST TRUELIST OR LCVALUE GREATER 0) - SET(${RETVAR} TRUE PARENT_SCOPE) + set(${RETVAR} TRUE PARENT_SCOPE) elseif(LCVALUE IN_LIST FALSELIST OR LCVALUE MATCHES ".*-notfound" OR LCVALUE STREQUAL "") - SET(${RETVAR} FALSE PARENT_SCOPE) + set(${RETVAR} FALSE PARENT_SCOPE) else() - SET(${RETVAR} NOTFOUND PARENT_SCOPE) + set(${RETVAR} NOTFOUND PARENT_SCOPE) endif() endfunction() @@ -307,15 +300,15 @@ endfunction() # # Also creates a version of the ret_val prepended with 'NC', # when feature is true, which is used to generate netcdf_meta.h. -FUNCTION(is_enabled feature ret_val) - IF(${feature}) - SET(${ret_val} "yes" PARENT_SCOPE) - SET("NC_${ret_val}" 1 PARENT_SCOPE) - ELSE() - SET(${ret_val} "no" PARENT_SCOPE) - SET("NC_${ret_val}" 0 PARENT_SCOPE) - ENDIF(${feature}) -ENDFUNCTION() +function(is_enabled feature ret_val) + if(${feature}) + set(${ret_val} "yes" PARENT_SCOPE) + set("NC_${ret_val}" 1 PARENT_SCOPE) + else() + set(${ret_val} "no" PARENT_SCOPE) + set("NC_${ret_val}" 0 PARENT_SCOPE) + endif(${feature}) +endfunction() # A function used to create autotools-style 'yes/no' definitions. # If a variable is set, it 'yes' is returned. Otherwise, 'no' is @@ -323,11 +316,11 @@ ENDFUNCTION() # # Also creates a version of the ret_val prepended with 'NC', # when feature is true, which is used to generate netcdf_meta.h. -FUNCTION(is_disabled feature ret_val) - IF(${feature}) - SET(${ret_val} "no" PARENT_SCOPE) - ELSE() - SET(${ret_val} "yes" PARENT_SCOPE) - SET("NC_${ret_val}" 1 PARENT_SCOPE) - ENDIF(${feature}) -ENDFUNCTION() \ No newline at end of file +function(is_disabled feature ret_val) + if(${feature}) + set(${ret_val} "no" PARENT_SCOPE) + else() + set(${ret_val} "yes" PARENT_SCOPE) + set("NC_${ret_val}" 1 PARENT_SCOPE) + endif(${feature}) +endfunction() \ No newline at end of file From 4a07adf261280553f5085bfb01b32e047e003614 Mon Sep 17 00:00:00 2001 From: Kyle Shores Date: Wed, 17 Jan 2024 15:59:02 -0600 Subject: [PATCH 24/33] lower case --- CMakeInstallation.cmake | 72 ++++++++++++++++++++--------------------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 5967889c95..4cc5215b23 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -10,13 +10,13 @@ # build binary installers. ##### -SET(CPACK_PACKAGE_VENDOR "Unidata") +set(CPACK_PACKAGE_VENDOR "Unidata") ## # Declare exclusions list used when building a source file. # NOTE!! This list uses regular expressions, NOT wildcards!! ## -SET(CPACK_SOURCE_IGNORE_FILES "${CPACK_SOURCE_IGNORE_FILES}" +set(CPACK_SOURCE_IGNORE_FILES "${CPACK_SOURCE_IGNORE_FILES}" "/expecttds3/" "/nocacheremote3/" "/nocacheremote4/" @@ -46,21 +46,21 @@ SET(CPACK_SOURCE_IGNORE_FILES "${CPACK_SOURCE_IGNORE_FILES}" # Nullsoft Installation System (NSIS) ### -SET(CPACK_PACKAGE_CONTACT "NetCDF Support ") - -IF(WIN32) - SET(CPACK_NSIS_MODIFY_PATH ON) - SET(CPACK_NSIS_DISPLAY_NAME "NetCDF ${netCDF_VERSION}") - SET(CPACK_NSIS_PACKAGE_NAME "NetCDF ${netCDF_VERSION}") - SET(CPACK_NSIS_HELP_LINK "https://www.unidata.ucar.edu/netcdf") - SET(CPACK_NSIS_URL_INFO_ABOUT "https://www.unidata.ucar.edu/netcdf") - SET(CPACK_NSIS_CONTACT "support-netcdf@unidata.ucar.edu") - SET(CPACK_NSIS_ENABLE_UNINSTALL_BEFORE_INSTALL ON) - SET(CPACK_NSIS_MENU_LINKS +set(CPACK_PACKAGE_CONTACT "NetCDF Support ") + +if(WIN32) + set(CPACK_NSIS_MODIFY_PATH ON) + set(CPACK_NSIS_DISPLAY_NAME "NetCDF ${netCDF_VERSION}") + set(CPACK_NSIS_PACKAGE_NAME "NetCDF ${netCDF_VERSION}") + set(CPACK_NSIS_HELP_LINK "https://www.unidata.ucar.edu/netcdf") + set(CPACK_NSIS_URL_INFO_ABOUT "https://www.unidata.ucar.edu/netcdf") + set(CPACK_NSIS_CONTACT "support-netcdf@unidata.ucar.edu") + set(CPACK_NSIS_ENABLE_UNINSTALL_BEFORE_INSTALL ON) + set(CPACK_NSIS_MENU_LINKS "https://www.unidata.ucar.edu/software/netcdf" "Unidata Website" "https://docs.unidata.ucar.edu/netcdf-c" "NetCDF Stable Documentation") -ENDIF() +endif() ### # Set debian-specific options used when @@ -71,51 +71,51 @@ ENDIF() # This should be set using the output of dpkg --print-architecture. FIND_PROGRAM(NC_DPKG NAMES dpkg) -IF(NC_DPKG) +if(NC_DPKG) # Define a macro for getting the dpkg architecture. - MACRO(getdpkg_arch arch) + macro(getdpkg_arch arch) execute_process(COMMAND "${NC_DPKG}" "--print-architecture" OUTPUT_VARIABLE "${arch}" OUTPUT_STRIP_TRAILING_WHITESPACE) - ENDMACRO(getdpkg_arch) + endmacro(getdpkg_arch) getdpkg_arch(dpkg_arch) - SET(CPACK_DEBIAN_PACKAGE_NAME "netcdf4-dev") - SET(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${dpkg_arch}") - SET(CPACK_DEBIAN_PACKAGE_DEPENDS "zlib1g (>= 1:1.2.3.4), libhdf5-7 (>= 1.8.11), libcurl4-openssl-dev (>= 7.22.0)") -ENDIF() + set(CPACK_DEBIAN_PACKAGE_NAME "netcdf4-dev") + set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${dpkg_arch}") + set(CPACK_DEBIAN_PACKAGE_DEPENDS "zlib1g (>= 1:1.2.3.4), libhdf5-7 (>= 1.8.11), libcurl4-openssl-dev (>= 7.22.0)") +endif() ## # Set Copyright, License info for CPack. ## -CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/COPYRIGHT +configure_file(${CMAKE_CURRENT_SOURCE_DIR}/COPYRIGHT ${CMAKE_CURRENT_BINARY_DIR}/COPYRIGHT.txt @ONLY ) -SET(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_BINARY_DIR}/COPYRIGHT.txt") -IF(NOT CPACK_PACK_VERSION) - SET(CPACK_PACKAGE_VERSION ${VERSION}) -ENDIF() +set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_BINARY_DIR}/COPYRIGHT.txt") +if(NOT CPACK_PACK_VERSION) + set(CPACK_PACKAGE_VERSION ${VERSION}) +endif() -IF(UNIX) - SET(CPACK_GENERATOR "STGZ" "TBZ2" "DEB" "ZIP") -ENDIF() +if(UNIX) + set(CPACK_GENERATOR "STGZ" "TBZ2" "DEB" "ZIP") +endif() -IF(APPLE) - SET(CPACK_SOURCE_GENERATOR "TGZ") - SET(CPACK_GENERATOR "productbuild" "STGZ" "TBZ2" "TGZ" "ZIP") -ENDIF() +if(APPLE) + set(CPACK_SOURCE_GENERATOR "TGZ") + set(CPACK_GENERATOR "productbuild" "STGZ" "TBZ2" "TGZ" "ZIP") +endif() ## # Create an 'uninstall' target. ## -CONFIGURE_FILE( +configure_file( "${CMAKE_CURRENT_SOURCE_DIR}/cmake_uninstall.cmake.in" "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake" IMMEDIATE @ONLY) -ADD_CUSTOM_TARGET(uninstall +add_custom_target(uninstall COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake) ## @@ -133,4 +133,4 @@ set(CPACK_COMPONENT_DEPENDENCIES_DESCRIPTION set(CPACK_COMPONENT_DOCUMENTATION_DESCRIPTION "The NetCDF-C user documentation.") -INCLUDE(CPack) +include(CPack) \ No newline at end of file From 5e487fd381a3897c699cd3d7de61871315ee180c Mon Sep 17 00:00:00 2001 From: Kyle Shores Date: Wed, 17 Jan 2024 16:07:22 -0600 Subject: [PATCH 25/33] lowercase --- CMakeLists.txt | 1874 ++++++++++++++++++++++----------------------- PostInstall.cmake | 2 +- 2 files changed, 938 insertions(+), 938 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 37d1b3e5ef..c51e34fd4c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -44,43 +44,43 @@ endif () # http://www.gnu.org/software/libtool/manual/libtool.html#Libtool-versioning ##### -SET(NC_VERSION_NOTE "-development") -SET(netCDF_VERSION ${PROJECT_VERSION}${NC_VERSION_NOTE}) -SET(VERSION ${netCDF_VERSION}) -SET(NC_VERSION ${netCDF_VERSION}) -SET(PACKAGE_VERSION ${VERSION}) +set(NC_VERSION_NOTE "-development") +set(netCDF_VERSION ${PROJECT_VERSION}${NC_VERSION_NOTE}) +set(VERSION ${netCDF_VERSION}) +set(NC_VERSION ${netCDF_VERSION}) +set(PACKAGE_VERSION ${VERSION}) # These values should match those in configure.ac -SET(netCDF_LIB_VERSION 19) -SET(netCDF_SO_VERSION 19) +set(netCDF_LIB_VERSION 19) +set(netCDF_SO_VERSION 19) # Version of the dispatch table. This must match the value in # configure.ac. -SET(NC_DISPATCH_VERSION 5) +set(NC_DISPATCH_VERSION 5) # Get system configuration, Use it to determine osname, os release, cpu. These # will be used when committing to CDash. find_program(UNAME NAMES uname) -IF(UNAME) +if(UNAME) getuname(osname -s) getuname(osrel -r) getuname(cpu -m) set(TMP_BUILDNAME "${osname}-${osrel}-${cpu}") -ENDIF() +endif() # Define some Platforms if(osname MATCHES "CYGWIN.*") - SET(ISCYGWIN yes) + set(ISCYGWIN yes) endif() if(osname MATCHES "Darwin.*") - SET(ISOSX yes) + set(ISOSX yes) endif() if(MSVC) - SET(ISMSVC yes) + set(ISMSVC yes) endif() if(osname MATCHES "MINGW.*" OR osname MATCHES "MSYS.*") - SET(ISMINGW yes) - SET(MINGW yes) + set(ISMINGW yes) + set(MINGW yes) endif() ### @@ -91,50 +91,50 @@ endif() # For ctest scripts, we can use CTEST_BUILD_NAME. ### -SET(BUILDNAME_PREFIX "" CACHE STRING "") -SET(BUILDNAME_SUFFIX "" CACHE STRING "") +set(BUILDNAME_PREFIX "" CACHE STRING "") +set(BUILDNAME_SUFFIX "" CACHE STRING "") -IF(BUILDNAME_PREFIX) - SET(TMP_BUILDNAME "${BUILDNAME_PREFIX}-${TMP_BUILDNAME}") -ENDIF() +if(BUILDNAME_PREFIX) + set(TMP_BUILDNAME "${BUILDNAME_PREFIX}-${TMP_BUILDNAME}") +endif() -IF(BUILDNAME_SUFFIX) - SET(TMP_BUILDNAME "${TMP_BUILDNAME}-${BUILDNAME_SUFFIX}") -ENDIF() +if(BUILDNAME_SUFFIX) + set(TMP_BUILDNAME "${TMP_BUILDNAME}-${BUILDNAME_SUFFIX}") +endif() -IF(NOT BUILDNAME) - SET(BUILDNAME "${TMP_BUILDNAME}" CACHE STRING "Build name variable for CDash") -ENDIF() +if(NOT BUILDNAME) + set(BUILDNAME "${TMP_BUILDNAME}" CACHE STRING "Build name variable for CDash") +endif() ### # End BUILDNAME customization. ### # For CMAKE_INSTALL_LIBDIR -INCLUDE(GNUInstallDirs) +include(GNUInstallDirs) -IF(MSVC) - SET(GLOBAL PROPERTY USE_FOLDERS ON) - ADD_COMPILE_OPTIONS("/utf-8") -ENDIF() +if(MSVC) + set(GLOBAL PROPERTY USE_FOLDERS ON) + add_compile_options("/utf-8") +endif() # auto-configure style checks, other CMake modules. -INCLUDE(CheckLibraryExists) -INCLUDE(CheckIncludeFile) -INCLUDE(CheckIncludeFiles) -INCLUDE(CheckTypeSize) -INCLUDE(CheckFunctionExists) -INCLUDE(CheckCXXSourceCompiles) -INCLUDE(CheckCSourceCompiles) -INCLUDE(TestBigEndian) -INCLUDE(CheckSymbolExists) -INCLUDE(GetPrerequisites) -INCLUDE(CheckCCompilerFlag) +include(CheckLibraryExists) +include(CheckIncludeFile) +include(CheckIncludeFiles) +include(CheckTypeSize) +include(CheckFunctionExists) +include(CheckCXXSourceCompiles) +include(CheckCSourceCompiles) +include(TestBigEndian) +include(CheckSymbolExists) +include(GetPrerequisites) +include(CheckCCompilerFlag) # A check to see if the system is big endian TEST_BIG_ENDIAN(BIGENDIAN) -IF(${BIGENDIAN}) - SET(WORDS_BIGENDIAN "1") -ENDIF(${BIGENDIAN}) +if(${BIGENDIAN}) + set(WORDS_BIGENDIAN "1") +endif(${BIGENDIAN}) # Define a function to convert various true or false values # to either TRUE|FALSE (uppercase). @@ -142,39 +142,39 @@ ENDIF(${BIGENDIAN}) #1, ON, YES, TRUE, Y, #0, OFF, NO, FALSE, N, IGNORE, NOTFOUND -NOTFOUND "" -SET(TRUELIST "on;yes;y;true") -SET(FALSELIST "off;no;n;false;0;ignore;notfound") +set(TRUELIST "on;yes;y;true") +set(FALSELIST "off;no;n;false;0;ignore;notfound") # Set the build type. -IF(NOT CMAKE_BUILD_TYPE) - SET(CMAKE_BUILD_TYPE DEBUG CACHE STRING "Choose the type of build, options are: None, Debug, Release." +if(NOT CMAKE_BUILD_TYPE) + set(CMAKE_BUILD_TYPE DEBUG CACHE STRING "Choose the type of build, options are: None, Debug, Release." FORCE) -ENDIF() +endif() # Set build type uppercase -STRING(TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE) +string(TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE) # Determine the configure date. -IF(DEFINED ENV{SOURCE_DATE_EPOCH}) - EXECUTE_PROCESS( +if(DEFINED ENV{SOURCE_DATE_EPOCH}) + execute_process( COMMAND "date" "-u" "-d" "@$ENV{SOURCE_DATE_EPOCH}" OUTPUT_VARIABLE CONFIG_DATE ) -ELSE() - EXECUTE_PROCESS( +else() + execute_process( COMMAND date OUTPUT_VARIABLE CONFIG_DATE ) -ENDIF() -IF(CONFIG_DATE) +endif() +if(CONFIG_DATE) string(STRIP ${CONFIG_DATE} CONFIG_DATE) -ENDIF() +endif() ## # Allow for extra dependencies. ## -SET(EXTRA_DEPS "") +set(EXTRA_DEPS "") ################################ # End Project Properties @@ -184,17 +184,17 @@ SET(EXTRA_DEPS "") # Set CTest Properties ################################ -ENABLE_TESTING() -INCLUDE(CTest) +enable_testing() +include(CTest) # Set Memory test program for non-MSVC based builds. # Assume valgrind for now. -IF((NOT MSVC) AND (NOT MINGW) AND (NOT ISCYGWIN)) - SET(CTEST_MEMORYCHECK_COMMAND valgrind CACHE STRING "") -ENDIF() +if((NOT MSVC) AND (NOT MINGW) AND (NOT ISCYGWIN)) + set(CTEST_MEMORYCHECK_COMMAND valgrind CACHE STRING "") +endif() # Set variable to define the build type. -INCLUDE(GenerateExportHeader) +include(GenerateExportHeader) ################################ # End CTest Properties @@ -210,93 +210,93 @@ INCLUDE(GenerateExportHeader) # BUILD_SHARED_LIBS is provided by/used by # CMake directly. ## -OPTION(BUILD_SHARED_LIBS "Configure netCDF as a shared library." ON) -IF(BUILD_SHARED_LIBS) - SET(CMAKE_POSITION_INDEPENDENT_CODE ON) -ENDIF() +option(BUILD_SHARED_LIBS "Configure netCDF as a shared library." ON) +if(BUILD_SHARED_LIBS) + set(CMAKE_POSITION_INDEPENDENT_CODE ON) +endif() -OPTION(NC_FIND_SHARED_LIBS "Find dynamically-built versions of dependent libraries" ${BUILD_SHARED_LIBS}) +option(NC_FIND_SHARED_LIBS "Find dynamically-built versions of dependent libraries" ${BUILD_SHARED_LIBS}) ## # We've had a request to allow for non-versioned shared libraries. # This seems reasonable enough to accommodate. See # https://github.com/Unidata/netcdf-c/issues/228 for more info. ## -OPTION(ENABLE_SHARED_LIBRARY_VERSION "Encode the library SO version in the file name of the generated library file." ON) +option(ENABLE_SHARED_LIBRARY_VERSION "Encode the library SO version in the file name of the generated library file." ON) # Set some default linux gcc & apple compiler options for # debug builds. -IF(CMAKE_COMPILER_IS_GNUCC OR APPLE) - OPTION(ENABLE_COVERAGE_TESTS "Enable compiler flags needed to perform coverage tests." OFF) - OPTION(ENABLE_CONVERSION_WARNINGS "Enable warnings for implicit conversion from 64 to 32-bit datatypes." ON) - OPTION(ENABLE_LARGE_FILE_TESTS "Enable large file tests." OFF) +if(CMAKE_COMPILER_IS_GNUCC OR APPLE) + option(ENABLE_COVERAGE_TESTS "Enable compiler flags needed to perform coverage tests." OFF) + option(ENABLE_CONVERSION_WARNINGS "Enable warnings for implicit conversion from 64 to 32-bit datatypes." ON) + option(ENABLE_LARGE_FILE_TESTS "Enable large file tests." OFF) # Debugging flags - SET(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -Wall") + set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -Wall") # Check to see if -Wl,--no-undefined is supported. CHECK_C_LINKER_FLAG("-Wl,--no-undefined" LIBTOOL_HAS_NO_UNDEFINED) - IF(LIBTOOL_HAS_NO_UNDEFINED) - SET(CMAKE_SHARED_LINKER_FLAGS_DEBUG "${CMAKE_SHARED_LINKER_FLAGS_DEBUG} -Wl,--no-undefined") - ENDIF() - SET(CMAKE_REQUIRED_FLAGS "${TMP_CMAKE_REQUIRED_FLAGS}") + if(LIBTOOL_HAS_NO_UNDEFINED) + set(CMAKE_SHARED_LINKER_FLAGS_DEBUG "${CMAKE_SHARED_LINKER_FLAGS_DEBUG} -Wl,--no-undefined") + endif() + set(CMAKE_REQUIRED_FLAGS "${TMP_CMAKE_REQUIRED_FLAGS}") # Coverage tests need to have optimization turned off. - IF(ENABLE_COVERAGE_TESTS) - SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -coverage -fprofile-arcs -ftest-coverage") - SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -coverage -fprofile-arcs -ftest-coverage") - SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fprofile-arcs -ftest-coverage") - MESSAGE(STATUS "Coverage Tests: On.") - ENDIF() + if(ENABLE_COVERAGE_TESTS) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -coverage -fprofile-arcs -ftest-coverage") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -coverage -fprofile-arcs -ftest-coverage") + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fprofile-arcs -ftest-coverage") + message(STATUS "Coverage Tests: On.") + endif() # Warnings for 64-to-32 bit conversions. - IF(ENABLE_CONVERSION_WARNINGS) + if(ENABLE_CONVERSION_WARNINGS) CHECK_C_COMPILER_FLAG(-Wconversion CC_HAS_WCONVERSION) CHECK_C_COMPILER_FLAG(-Wshorten-64-to-32 CC_HAS_SHORTEN_64_32) - IF(CC_HAS_SHORTEN_64_32) - SET(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -Wshorten-64-to-32") - ENDIF() - IF(CC_HAS_WCONVERSION) - SET(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -Wconversion") - ENDIF() + if(CC_HAS_SHORTEN_64_32) + set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -Wshorten-64-to-32") + endif() + if(CC_HAS_WCONVERSION) + set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -Wconversion") + endif() - ENDIF(ENABLE_CONVERSION_WARNINGS) + endif(ENABLE_CONVERSION_WARNINGS) -ENDIF(CMAKE_COMPILER_IS_GNUCC OR APPLE) +endif(CMAKE_COMPILER_IS_GNUCC OR APPLE) # End default linux gcc & apple compiler options. # Use relative pathnames in __FILE__ macros on MINGW: -IF(MINGW) +if(MINGW) CHECK_C_COMPILER_FLAG("-fmacro-prefix-map='${CMAKE_SOURCE_DIR}'=." CC_HAS_MACRO_PREFIX_MAP) - IF(CC_HAS_MACRO_PREFIX_MAP) - SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fmacro-prefix-map='${CMAKE_SOURCE_DIR}'=.") - ENDIF() -ENDIF() + if(CC_HAS_MACRO_PREFIX_MAP) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fmacro-prefix-map='${CMAKE_SOURCE_DIR}'=.") + endif() +endif() -ADD_DEFINITIONS() +add_definitions() # Suppress CRT Warnings. # Only necessary for Windows -IF(MSVC) - ADD_DEFINITIONS(-D_CRT_SECURE_NO_WARNINGS) -ENDIF() +if(MSVC) + add_definitions(-D_CRT_SECURE_NO_WARNINGS) +endif() # Support ANSI format specifiers for *printf on MINGW: -IF(MINGW) - ADD_DEFINITIONS(-D__USE_MINGW_ANSI_STDIO=1) -ENDIF() +if(MINGW) + add_definitions(-D__USE_MINGW_ANSI_STDIO=1) +endif() ##### # System inspection checks ##### -INCLUDE_DIRECTORIES(${CMAKE_CURRENT_BINARY_DIR}/include) -INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}/include) -INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}/oc2) -INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}/libsrc) -SET(CMAKE_REQUIRED_INCLUDES ${CMAKE_CURRENT_SOURCE_DIR}/libsrc) +include_directories(${CMAKE_CURRENT_BINARY_DIR}/include) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/oc2) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libsrc) +set(CMAKE_REQUIRED_INCLUDES ${CMAKE_CURRENT_SOURCE_DIR}/libsrc) ################################ # End Compiler Configuration @@ -306,13 +306,13 @@ SET(CMAKE_REQUIRED_INCLUDES ${CMAKE_CURRENT_SOURCE_DIR}/libsrc) # Configuration for post-install RPath # Adapted from http://www.cmake.org/Wiki/CMake_RPATH_handling ## -IF(NOT WIN32 AND BUILD_SHARED_LIBS) +if(NOT WIN32 AND BUILD_SHARED_LIBS) # use, i.e. don't skip the full RPATH for the build tree - SET(CMAKE_SKIP_BUILD_RPATH FALSE) + set(CMAKE_SKIP_BUILD_RPATH FALSE) # when building, don't use the install RPATH already # (but later on when installing) - SET(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE) + set(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE) if(APPLE) set(CMAKE_MACOSX_RPATH ON) @@ -320,16 +320,16 @@ IF(NOT WIN32 AND BUILD_SHARED_LIBS) # add the automatically determined parts of the RPATH # which point to directories outside the build tree to the install RPATH - SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE) + set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE) # the RPATH to be used when installing, # but only if it's not a system directory - LIST(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}" isSystemDir) - IF("${isSystemDir}" STREQUAL "-1") - SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") - ENDIF("${isSystemDir}" STREQUAL "-1") + list(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}" isSystemDir) + if("${isSystemDir}" STREQUAL "-1") + set(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") + endif("${isSystemDir}" STREQUAL "-1") -ENDIF() +endif() ## # End configuration for post-install RPath @@ -340,79 +340,79 @@ ENDIF() ################################ # Default Cache variables. -SET(DEFAULT_CHUNK_SIZE 16777216 CACHE STRING "Default Chunk Cache Size.") -SET(DEFAULT_CHUNK_CACHE_SIZE 16777216U CACHE STRING "Default Chunk Cache Size.") -SET(DEFAULT_CHUNKS_IN_CACHE 1000 CACHE STRING "Default number of chunks in cache.") -SET(DEFAULT_CHUNK_CACHE_PREEMPTION 0.75 CACHE STRING "Default file chunk cache preemption policy (a number between 0 and 1, inclusive.") +set(DEFAULT_CHUNK_SIZE 16777216 CACHE STRING "Default Chunk Cache Size.") +set(DEFAULT_CHUNK_CACHE_SIZE 16777216U CACHE STRING "Default Chunk Cache Size.") +set(DEFAULT_CHUNKS_IN_CACHE 1000 CACHE STRING "Default number of chunks in cache.") +set(DEFAULT_CHUNK_CACHE_PREEMPTION 0.75 CACHE STRING "Default file chunk cache preemption policy (a number between 0 and 1, inclusive.") # HDF5 default cache size values -SET(CHUNK_CACHE_SIZE ${DEFAULT_CHUNK_CACHE_SIZE} CACHE STRING "Default HDF5 Chunk Cache Size.") -SET(CHUNK_CACHE_NELEMS ${DEFAULT_CHUNKS_IN_CACHE} CACHE STRING "Default maximum number of elements in cache.") -SET(CHUNK_CACHE_PREEMPTION ${DEFAULT_CHUNK_CACHE_PREEMPTION} CACHE STRING "Default file chunk cache preemption policy for HDf5 files(a number between 0 and 1, inclusive.") +set(CHUNK_CACHE_SIZE ${DEFAULT_CHUNK_CACHE_SIZE} CACHE STRING "Default HDF5 Chunk Cache Size.") +set(CHUNK_CACHE_NELEMS ${DEFAULT_CHUNKS_IN_CACHE} CACHE STRING "Default maximum number of elements in cache.") +set(CHUNK_CACHE_PREEMPTION ${DEFAULT_CHUNK_CACHE_PREEMPTION} CACHE STRING "Default file chunk cache preemption policy for HDf5 files(a number between 0 and 1, inclusive.") -SET(NETCDF_LIB_NAME "" CACHE STRING "Default name of the netcdf library.") -SET(TEMP_LARGE "." CACHE STRING "Where to put large temp files if large file tests are run.") -SET(NCPROPERTIES_EXTRA "" CACHE STRING "Specify extra pairs for _NCProperties.") +set(NETCDF_LIB_NAME "" CACHE STRING "Default name of the netcdf library.") +set(TEMP_LARGE "." CACHE STRING "Where to put large temp files if large file tests are run.") +set(NCPROPERTIES_EXTRA "" CACHE STRING "Specify extra pairs for _NCProperties.") -IF(NOT NETCDF_LIB_NAME STREQUAL "") - SET(MOD_NETCDF_NAME ON) -ENDIF() +if(NOT NETCDF_LIB_NAME STREQUAL "") + set(MOD_NETCDF_NAME ON) +endif() # Set the appropriate compiler/architecture for universal OSX binaries. -IF(${CMAKE_SYSTEM_NAME} EQUAL "Darwin") - SET(CMAKE_OSX_ARCHITECTURES i386;x86_64) -ENDIF(${CMAKE_SYSTEM_NAME} EQUAL "Darwin") +if(${CMAKE_SYSTEM_NAME} EQUAL "Darwin") + set(CMAKE_OSX_ARCHITECTURES i386;x86_64) +endif(${CMAKE_SYSTEM_NAME} EQUAL "Darwin") # Option to use Static Runtimes in MSVC -IF(MSVC) - OPTION(NC_USE_STATIC_CRT "Use static CRT Libraries ('\\MT')." OFF) - IF(NC_USE_STATIC_CRT) - SET(USE_STATIC_CRT ON) +if(MSVC) + option(NC_USE_STATIC_CRT "Use static CRT Libraries ('\\MT')." OFF) + if(NC_USE_STATIC_CRT) + set(USE_STATIC_CRT ON) specify_static_crt_flag() - ENDIF() -ENDIF() + endif() +endif() # Option to build netCDF Version 2 OPTION (ENABLE_V2_API "Build netCDF Version 2." ON) -SET(BUILD_V2 ${ENABLE_V2_API}) -IF(NOT ENABLE_V2_API) - SET(NO_NETCDF_2 ON) -ELSE(NOT ENABLE_V2_API) - SET(USE_NETCDF_2 TRUE) -ENDIF(NOT ENABLE_V2_API) +set(BUILD_V2 ${ENABLE_V2_API}) +if(NOT ENABLE_V2_API) + set(NO_NETCDF_2 ON) +else(NOT ENABLE_V2_API) + set(USE_NETCDF_2 TRUE) +endif(NOT ENABLE_V2_API) # Option to build utilities -OPTION(BUILD_UTILITIES "Build ncgen, ncgen3, ncdump." ON) +option(BUILD_UTILITIES "Build ncgen, ncgen3, ncdump." ON) # Option to use MMAP -OPTION(ENABLE_MMAP "Use MMAP." ON) +option(ENABLE_MMAP "Use MMAP." ON) # Option to use examples. -OPTION(ENABLE_EXAMPLES "Build Examples" ON) +option(ENABLE_EXAMPLES "Build Examples" ON) ### # Allow the user to specify libraries # to link against, similar to automakes 'LIBS' variable. ### -SET(NC_EXTRA_DEPS "" CACHE STRING "Additional libraries to link against.") -IF(NC_EXTRA_DEPS) - STRING(REPLACE " " ";" DEPS_LIST ${NC_EXTRA_DEPS}) - FOREACH(_DEP ${DEPS_LIST}) - STRING(REGEX REPLACE "^-l" "" _LIB ${_DEP}) +set(NC_EXTRA_DEPS "" CACHE STRING "Additional libraries to link against.") +if(NC_EXTRA_DEPS) + string(REPLACE " " ";" DEPS_LIST ${NC_EXTRA_DEPS}) + foreach(_DEP ${DEPS_LIST}) + string(REGEX REPLACE "^-l" "" _LIB ${_DEP}) FIND_LIBRARY("${_LIB}_DEP" NAMES "${_LIB}" "lib${_LIB}") - MESSAGE(${${_LIB}_DEP}) - IF("${${_LIB}_DEP}" STREQUAL "${_LIB}_DEP-NOTFOUND") - MESSAGE(FATAL_ERROR "Error finding ${_LIB}.") - ELSE() - MESSAGE(STATUS "Found ${_LIB}: ${${_LIB}_DEP}") - ENDIF() - SET(EXTRA_DEPS ${EXTRA_DEPS} "${${_LIB}_DEP}") - ENDFOREACH() - MESSAGE("Extra deps: ${EXTRA_DEPS}") - LIST(REMOVE_DUPLICATES EXTRA_DEPS) - SET(CMAKE_REQUIRED_LIBRARIES ${CMAKE_REQUIRED_LIBRARIES} ${EXTRA_DEPS}) -ENDIF() + message(${${_LIB}_DEP}) + if("${${_LIB}_DEP}" STREQUAL "${_LIB}_DEP-NOTFOUND") + message(FATAL_ERROR "Error finding ${_LIB}.") + else() + message(STATUS "Found ${_LIB}: ${${_LIB}_DEP}") + endif() + set(EXTRA_DEPS ${EXTRA_DEPS} "${${_LIB}_DEP}") + endforach() + message("Extra deps: ${EXTRA_DEPS}") + list(REMOVE_DUPLICATES EXTRA_DEPS) + set(CMAKE_REQUIRED_LIBRARIES ${CMAKE_REQUIRED_LIBRARIES} ${EXTRA_DEPS}) +endif() ### # End user-specified dependent libraries. ### @@ -428,82 +428,82 @@ ENDIF() # 1. if ENABLE_HDF5 is off then disable hdf5 # 2. if ENABLE_NETCDF4 is off then disable hdf5 # 3. else enable hdf5 -OPTION(ENABLE_NETCDF_4 "Use HDF5." ON) -OPTION(ENABLE_NETCDF4 "Use HDF5." ON) -OPTION(ENABLE_HDF5 "Use HDF5." ON) -IF(NOT ENABLE_HDF5 OR NOT ENABLE_NETCDF4 OR NOT ENABLE_NETCDF_4) - SET(ENABLE_HDF5 OFF CACHE BOOL "Use HDF5" FORCE) -ENDIF() -OPTION(ENABLE_HDF4 "Build netCDF-4 with HDF4 read capability(HDF4, HDF5 and Zlib required)." OFF) -OPTION(ENABLE_DAP "Enable DAP2 and DAP4 Client." ON) -OPTION(ENABLE_NCZARR "Enable NCZarr Client." ON) -OPTION(ENABLE_PNETCDF "Build with parallel I/O for CDF-1, 2, and 5 files using PnetCDF." OFF) -SET(ENABLE_CDF5 AUTO CACHE STRING "AUTO") -OPTION(ENABLE_CDF5 "Enable CDF5 support" ON) +option(ENABLE_NETCDF_4 "Use HDF5." ON) +option(ENABLE_NETCDF4 "Use HDF5." ON) +option(ENABLE_HDF5 "Use HDF5." ON) +if(NOT ENABLE_HDF5 OR NOT ENABLE_NETCDF4 OR NOT ENABLE_NETCDF_4) + set(ENABLE_HDF5 OFF CACHE BOOL "Use HDF5" FORCE) +endif() +option(ENABLE_HDF4 "Build netCDF-4 with HDF4 read capability(HDF4, HDF5 and Zlib required)." OFF) +option(ENABLE_DAP "Enable DAP2 and DAP4 Client." ON) +option(ENABLE_NCZARR "Enable NCZarr Client." ON) +option(ENABLE_PNETCDF "Build with parallel I/O for CDF-1, 2, and 5 files using PnetCDF." OFF) +set(ENABLE_CDF5 AUTO CACHE STRING "AUTO") +option(ENABLE_CDF5 "Enable CDF5 support" ON) # Netcdf-4 support (i.e. libsrc4) is required by more than just HDF5 (e.g. NCZarr) # So depending on what above formats are enabled, enable netcdf-4 if(ENABLE_HDF5 OR ENABLE_HDF4 OR ENABLE_NCZARR) - SET(ENABLE_NETCDF_4 ON CACHE BOOL "Enable netCDF-4 API" FORCE) - SET(ENABLE_NETCDF4 ON CACHE BOOL "Enable netCDF4 Alias" FORCE) + set(ENABLE_NETCDF_4 ON CACHE BOOL "Enable netCDF-4 API" FORCE) + set(ENABLE_NETCDF4 ON CACHE BOOL "Enable netCDF4 Alias" FORCE) endif() # enable|disable all forms of network access -OPTION(ENABLE_REMOTE_FUNCTIONALITY "Enable|disable all forms remote data access (DAP, S3, etc)" ON) -MESSAGE(">>> ENABLE_REMOTE_FUNCTIONALITY=${ENABLE_REMOTE_FUNCTIONALITY}") +option(ENABLE_REMOTE_FUNCTIONALITY "Enable|disable all forms remote data access (DAP, S3, etc)" ON) +message(">>> ENABLE_REMOTE_FUNCTIONALITY=${ENABLE_REMOTE_FUNCTIONALITY}") if(NOT ENABLE_REMOTE_FUNCTIONALITY) -MESSAGE(WARNING "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_DAP[4]=NO") -SET(ENABLE_DAP OFF CACHE BOOL "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_DAP=NO" FORCE) -SET(ENABLE_DAP4 OFF CACHE BOOL "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_DAP4=NO" FORCE) -ENDIF() +message(WARNING "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_DAP[4]=NO") +set(ENABLE_DAP OFF CACHE BOOL "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_DAP=NO" FORCE) +set(ENABLE_DAP4 OFF CACHE BOOL "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_DAP4=NO" FORCE) +endif() # Option to Build DLL -IF(WIN32) - OPTION(ENABLE_DLL "Build a Windows DLL." ${BUILD_SHARED_LIBS}) - IF(ENABLE_DLL) - SET(BUILD_DLL ON CACHE BOOL "") - ADD_DEFINITIONS(-DDLL_NETCDF) - ADD_DEFINITIONS(-DDLL_EXPORT) - ENDIF() -ENDIF() +if(WIN32) + option(ENABLE_DLL "Build a Windows DLL." ${BUILD_SHARED_LIBS}) + if(ENABLE_DLL) + set(BUILD_DLL ON CACHE BOOL "") + add_definitions(-DDLL_NETCDF) + add_definitions(-DDLL_EXPORT) + endif() +endif() # Did the user specify a default minimum blocksize for posixio? -SET(NCIO_MINBLOCKSIZE 256 CACHE STRING "Minimum I/O Blocksize for netCDF classic and 64-bit offset format files.") - -IF(ENABLE_NETCDF_4) - SET(USE_NETCDF4 ON CACHE BOOL "") - SET(ENABLE_NETCDF_4 ON CACHE BOOL "") - SET(ENABLE_NETCDF4 ON CACHE BOOL "") -ELSE() - SET(USE_HDF4_FILE_TESTS OFF) - SET(USE_HDF4 OFF) - SET(ENABLE_HDF4_FILE_TESTS OFF) - SET(ENABLE_HDF4 OFF) -ENDIF() +set(NCIO_MINBLOCKSIZE 256 CACHE STRING "Minimum I/O Blocksize for netCDF classic and 64-bit offset format files.") + +if(ENABLE_NETCDF_4) + set(USE_NETCDF4 ON CACHE BOOL "") + set(ENABLE_NETCDF_4 ON CACHE BOOL "") + set(ENABLE_NETCDF4 ON CACHE BOOL "") +else() + set(USE_HDF4_FILE_TESTS OFF) + set(USE_HDF4 OFF) + set(ENABLE_HDF4_FILE_TESTS OFF) + set(ENABLE_HDF4 OFF) +endif() # Option Logging, only valid for netcdf4. -OPTION(ENABLE_LOGGING "Enable Logging." OFF) -IF(NOT ENABLE_NETCDF_4) -SET(ENABLE_LOGGING OFF) -ENDIF() -IF(ENABLE_LOGGING) - ADD_DEFINITIONS(-DLOGGING) - ADD_DEFINITIONS(-DENABLE_SET_LOG_LEVEL) - SET(LOGGING ON) - SET(ENABLE_SET_LOG_LEVEL ON) -ENDIF() -OPTION(ENABLE_SET_LOG_LEVEL_FUNC "Enable definition of nc_set_log_level()." ON) -IF(ENABLE_NETCDF_4 AND NOT ENABLE_LOGGING AND ENABLE_SET_LOG_LEVEL_FUNC) - ADD_DEFINITIONS(-DENABLE_SET_LOG_LEVEL) - SET(ENABLE_SET_LOG_LEVEL ON) -ENDIF() +option(ENABLE_LOGGING "Enable Logging." OFF) +if(NOT ENABLE_NETCDF_4) +set(ENABLE_LOGGING OFF) +endif() +if(ENABLE_LOGGING) + add_definitions(-DLOGGING) + add_definitions(-DENABLE_SET_LOG_LEVEL) + set(LOGGING ON) + set(ENABLE_SET_LOG_LEVEL ON) +endif() +option(ENABLE_SET_LOG_LEVEL_FUNC "Enable definition of nc_set_log_level()." ON) +if(ENABLE_NETCDF_4 AND NOT ENABLE_LOGGING AND ENABLE_SET_LOG_LEVEL_FUNC) + add_definitions(-DENABLE_SET_LOG_LEVEL) + set(ENABLE_SET_LOG_LEVEL ON) +endif() # Option to allow for strict null file padding. # See https://github.com/Unidata/netcdf-c/issues/657 for more information -OPTION(ENABLE_STRICT_NULL_BYTE_HEADER_PADDING "Enable strict null byte header padding." OFF) +option(ENABLE_STRICT_NULL_BYTE_HEADER_PADDING "Enable strict null byte header padding." OFF) -IF(ENABLE_STRICT_NULL_BYTE_HEADER_PADDING) - SET(USE_STRICT_NULL_BYTE_HEADER_PADDING ON CACHE BOOL "") -ENDIF(ENABLE_STRICT_NULL_BYTE_HEADER_PADDING) +if(ENABLE_STRICT_NULL_BYTE_HEADER_PADDING) + set(USE_STRICT_NULL_BYTE_HEADER_PADDING ON CACHE BOOL "") +endif(ENABLE_STRICT_NULL_BYTE_HEADER_PADDING) # Note that szip management is tricky. # This is because we have three things to consider: @@ -512,251 +512,251 @@ ENDIF(ENABLE_STRICT_NULL_BYTE_HEADER_PADDING) # 3. is nczarr enabled? # We need separate flags for cases 1 and 2 -SET(USE_HDF5 ${ENABLE_HDF5}) +set(USE_HDF5 ${ENABLE_HDF5}) -IF(ENABLE_DAP) - SET(USE_DAP ON CACHE BOOL "") - SET(ENABLE_DAP2 ON CACHE BOOL "") +if(ENABLE_DAP) + set(USE_DAP ON CACHE BOOL "") + set(ENABLE_DAP2 ON CACHE BOOL "") - IF(ENABLE_HDF5) - MESSAGE(STATUS "Enabling DAP4") - SET(ENABLE_DAP4 ON CACHE BOOL "") - ELSE() - MESSAGE(STATUS "Disabling DAP4") - SET(ENABLE_DAP4 OFF CACHE BOOL "") - ENDIF(ENABLE_HDF5) + if(ENABLE_HDF5) + message(STATUS "Enabling DAP4") + set(ENABLE_DAP4 ON CACHE BOOL "") + else() + message(STATUS "Disabling DAP4") + set(ENABLE_DAP4 OFF CACHE BOOL "") + endif(ENABLE_HDF5) -ELSE() - SET(ENABLE_DAP2 OFF CACHE BOOL "") - SET(ENABLE_DAP4 OFF CACHE BOOL "") -ENDIF() +else() + set(ENABLE_DAP2 OFF CACHE BOOL "") + set(ENABLE_DAP4 OFF CACHE BOOL "") +endif() # Option to support byte-range reading of remote datasets -OPTION(ENABLE_BYTERANGE "Enable byte-range access to remote datasets.." ON) +option(ENABLE_BYTERANGE "Enable byte-range access to remote datasets.." ON) if(NOT ENABLE_REMOTE_FUNCTIONALITY) -MESSAGE(WARNING "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_BYTERANGE=NO") -SET(ENABLE_BYTERANGE OFF CACHE BOOL "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_BYTERANGE=NO" FORCE) -ENDIF() +message(WARNING "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_BYTERANGE=NO") +set(ENABLE_BYTERANGE OFF CACHE BOOL "ENABLE_REMOTE_FUNCTIONALITY=NO => ENABLE_BYTERANGE=NO" FORCE) +endif() # Option to Enable DAP long tests, remote tests. -OPTION(ENABLE_DAP_REMOTE_TESTS "Enable DAP remote tests." ON) -OPTION(ENABLE_EXTERNAL_SERVER_TESTS "Enable external Server remote tests." OFF) -OPTION(ENABLE_DAP_LONG_TESTS "Enable DAP long tests." OFF) +option(ENABLE_DAP_REMOTE_TESTS "Enable DAP remote tests." ON) +option(ENABLE_EXTERNAL_SERVER_TESTS "Enable external Server remote tests." OFF) +option(ENABLE_DAP_LONG_TESTS "Enable DAP long tests." OFF) if(NOT ENABLE_DAP) -SET(ENABLE_DAP_REMOTE_TESTS OFF CACHE BOOL "" FORCE) -SET(ENABLE_EXTERNAL_SERVER_TESTS OFF CACHE BOOL "" FORCE) -SET(ENABLE_DAP_LONG_TESTS OFF CACHE BOOL "" FORCE) -ENDIF() +set(ENABLE_DAP_REMOTE_TESTS OFF CACHE BOOL "" FORCE) +set(ENABLE_EXTERNAL_SERVER_TESTS OFF CACHE BOOL "" FORCE) +set(ENABLE_DAP_LONG_TESTS OFF CACHE BOOL "" FORCE) +endif() -SET(REMOTETESTSERVERS "remotetest.unidata.ucar.edu" CACHE STRING "test servers to use for remote test") -SET(REMOTETESTSERVERS "remotetest.unidata.ucar.edu" CACHE STRING "test servers to use for remote test") +set(REMOTETESTSERVERS "remotetest.unidata.ucar.edu" CACHE STRING "test servers to use for remote test") +set(REMOTETESTSERVERS "remotetest.unidata.ucar.edu" CACHE STRING "test servers to use for remote test") # Locate some compressors -OPTION(ENABLE_FILTER_SZIP "Enable use of Szip compression library if it is available. Required if ENABLE_NCZARR is true." ON) -OPTION(ENABLE_FILTER_BZ2 "Enable use of Bz2 compression library if it is available." ON) -OPTION(ENABLE_FILTER_BLOSC "Enable use of blosc compression library if it is available." ON) -OPTION(ENABLE_FILTER_ZSTD "Enable use of Zstd compression library if it is available." ON) +option(ENABLE_FILTER_SZIP "Enable use of Szip compression library if it is available. Required if ENABLE_NCZARR is true." ON) +option(ENABLE_FILTER_BZ2 "Enable use of Bz2 compression library if it is available." ON) +option(ENABLE_FILTER_BLOSC "Enable use of blosc compression library if it is available." ON) +option(ENABLE_FILTER_ZSTD "Enable use of Zstd compression library if it is available." ON) # If user wants, then install selected plugins (default on) -SET(PLUGIN_INSTALL_DIR "NO" CACHE STRING "Whether and where we should install plugins; defaults to yes") +set(PLUGIN_INSTALL_DIR "NO" CACHE STRING "Whether and where we should install plugins; defaults to yes") # This is ugly, but seems necessary because of CMake's boolean structure -SET(boolval FALSE) -IF(DEFINED PLUGIN_INSTALL_DIR) +set(boolval FALSE) +if(DEFINED PLUGIN_INSTALL_DIR) booleanize(${PLUGIN_INSTALL_DIR} boolval) - IF(boolval) - SET(ENABLE_PLUGIN_INSTALL YES) + if(boolval) + set(ENABLE_PLUGIN_INSTALL YES) # No actual value was specified - UNSET(PLUGIN_INSTALL_DIR CACHE) - ELSE() - IF(boolval STREQUAL "NOTFOUND") + UNset(PLUGIN_INSTALL_DIR CACHE) + else() + if(boolval STREQUAL "NOTFOUND") # Must be an actual value - SET(ENABLE_PLUGIN_INSTALL YES) - ELSE() - SET(ENABLE_PLUGIN_INSTALL NO) - ENDIF() - ENDIF() -ELSE() - SET(ENABLE_PLUGIN_INSTALL NO) -ENDIF() + set(ENABLE_PLUGIN_INSTALL YES) + else() + set(ENABLE_PLUGIN_INSTALL NO) + endif() + endif() +else() + set(ENABLE_PLUGIN_INSTALL NO) +endif() # Ensure no defined plugin dir if not enabled -IF(NOT ENABLE_PLUGIN_INSTALL) - UNSET(PLUGIN_INSTALL_DIR CACHE) -ENDIF() +if(NOT ENABLE_PLUGIN_INSTALL) + UNset(PLUGIN_INSTALL_DIR CACHE) +endif() -IF(ENABLE_PLUGIN_INSTALL) - IF(NOT DEFINED PLUGIN_INSTALL_DIR) +if(ENABLE_PLUGIN_INSTALL) + if(NOT DEFINED PLUGIN_INSTALL_DIR) # Default to HDF5_PLUGIN_PATH or its default directories - IF(DEFINED ENV{HDF5_PLUGIN_PATH}) - SET(PLUGIN_INSTALL_DIR "$ENV{HDF5_PLUGIN_PATH}") - ELSE() - IF(ISMSVC OR ISMINGW) - SET(PLUGIN_INSTALL_DIR "$ENV{ALLUSERSPROFILE}\\hdf5\\lib\\plugin") - ELSE() - SET(PLUGIN_INSTALL_DIR "/usr/local/hdf5/lib/plugin") - ENDIF() - ENDIF() - MESSAGE("Defaulting to -DPLUGIN_INSTALL_DIR=${PLUGIN_INSTALL_DIR}") - ENDIF() -ENDIF() - -IF(ENABLE_PLUGIN_INSTALL) + if(DEFINED ENV{HDF5_PLUGIN_PATH}) + set(PLUGIN_INSTALL_DIR "$ENV{HDF5_PLUGIN_PATH}") + else() + if(ISMSVC OR ISMINGW) + set(PLUGIN_INSTALL_DIR "$ENV{ALLUSERSPROFILE}\\hdf5\\lib\\plugin") + else() + set(PLUGIN_INSTALL_DIR "/usr/local/hdf5/lib/plugin") + endif() + endif() + message("Defaulting to -DPLUGIN_INSTALL_DIR=${PLUGIN_INSTALL_DIR}") + endif() +endif() + +if(ENABLE_PLUGIN_INSTALL) # Use the lowest priority dir in the path - IF(NOT ISMSVC AND NOT ISMINGW) - STRING(REPLACE ":" ";" PATH_LIST ${PLUGIN_INSTALL_DIR}) - ELSE() - SET(PATH_LIST ${PLUGIN_INSTALL_DIR}) - ENDIF() + if(NOT ISMSVC AND NOT ISMINGW) + string(REPLACE ":" ";" PATH_LIST ${PLUGIN_INSTALL_DIR}) + else() + set(PATH_LIST ${PLUGIN_INSTALL_DIR}) + endif() # Get last element - LIST(GET PATH_LIST -1 PLUGIN_INSTALL_DIR) - SET(PLUGIN_INSTALL_DIR_SETTING "${PLUGIN_INSTALL_DIR}") - MESSAGE("Final value of-DPLUGIN_INSTALL_DIR=${PLUGIN_INSTALL_DIR}") -ELSE() # No option specified - UNSET(PLUGIN_INSTALL_DIR) - UNSET(PLUGIN_INSTALL_DIR CACHE) - SET(PLUGIN_INSTALL_DIR_SETTING "N.A.") -ENDIF() + list(GET PATH_LIST -1 PLUGIN_INSTALL_DIR) + set(PLUGIN_INSTALL_DIR_SETTING "${PLUGIN_INSTALL_DIR}") + message("Final value of-DPLUGIN_INSTALL_DIR=${PLUGIN_INSTALL_DIR}") +else() # No option specified + UNset(PLUGIN_INSTALL_DIR) + UNset(PLUGIN_INSTALL_DIR CACHE) + set(PLUGIN_INSTALL_DIR_SETTING "N.A.") +endif() # Try to enable NCZarr zip support -OPTION(ENABLE_NCZARR_ZIP "Enable NCZarr ZIP support." OFF) +option(ENABLE_NCZARR_ZIP "Enable NCZarr ZIP support." OFF) # libdl is always available; built-in in Windows and OSX -OPTION(ENABLE_PLUGINS "Enable dynamically loaded plugins (default on)." ON) -IF(MINGW) - SET(ENABLE_PLUGINS OFF CACHE BOOL "Disable plugins" FORCE) -ELSE() - IF(NOT WIN32) - IF(HAVE_DLFCN_H) - INCLUDE_DIRECTORIES("dlfcn.h") - ENDIF() - ENDIF() -ENDIF() -IF(ENABLE_PLUGINS) - SET(USEPLUGINS yes) -ENDIF() +option(ENABLE_PLUGINS "Enable dynamically loaded plugins (default on)." ON) +if(MINGW) + set(ENABLE_PLUGINS OFF CACHE BOOL "Disable plugins" FORCE) +else() + if(NOT WIN32) + if(HAVE_DLFCN_H) + include_directories("dlfcn.h") + endif() + endif() +endif() +if(ENABLE_PLUGINS) + set(USEPLUGINS yes) +endif() # Enable some developer-only tests -OPTION(ENABLE_EXTRA_TESTS "Enable Extra tests. Some may not work because of known issues. Developers only." OFF) -IF(ENABLE_EXTRA_TESTS) - SET(EXTRA_TESTS ON) -ENDIF() +option(ENABLE_EXTRA_TESTS "Enable Extra tests. Some may not work because of known issues. Developers only." OFF) +if(ENABLE_EXTRA_TESTS) + set(EXTRA_TESTS ON) +endif() # Option to use bundled XGetopt in place of getopt(). This is mostly useful # for MSVC builds. If not building utilities or some tests, # getopt() isn't required at all. -IF(MSVC) - OPTION(ENABLE_XGETOPT "Enable bundled XGetOpt instead of external getopt()." ON) - IF(ENABLE_XGETOPT) - SET(USE_X_GETOPT ON CACHE BOOL "") - ENDIF() -ENDIF() +if(MSVC) + option(ENABLE_XGETOPT "Enable bundled XGetOpt instead of external getopt()." ON) + if(ENABLE_XGETOPT) + set(USE_X_GETOPT ON CACHE BOOL "") + endif() +endif() -SET(MATH "") -IF(NOT WIN32) +set(MATH "") +if(NOT WIN32) # STDIO instead of posixio. - OPTION(ENABLE_STDIO "If true, use stdio instead of posixio (ex. on the Cray)" OFF) - IF(ENABLE_STDIO) - SET(USE_STDIO ON CACHE BOOL "") - ENDIF() + option(ENABLE_STDIO "If true, use stdio instead of posixio (ex. on the Cray)" OFF) + if(ENABLE_STDIO) + set(USE_STDIO ON CACHE BOOL "") + endif() # FFIO insteaad of PosixIO - OPTION(ENABLE_FFIO "If true, use ffio instead of posixio" OFF) - IF(ENABLE_FFIO) - SET(USE_FFIO ON CACHE BOOL "") - ENDIF() -ENDIF() + option(ENABLE_FFIO "If true, use ffio instead of posixio" OFF) + if(ENABLE_FFIO) + set(USE_FFIO ON CACHE BOOL "") + endif() +endif() # Options for S3 Support -OPTION(ENABLE_S3 "Enable S3 support." OFF) -OPTION(ENABLE_S3_INTERNAL "Enable S3 Internal support." OFF) -OPTION(ENABLE_NCZARR_S3 "Enable NCZarr S3 support; Deprecated in favor of ENABLE_S3" OFF) +option(ENABLE_S3 "Enable S3 support." OFF) +option(ENABLE_S3_INTERNAL "Enable S3 Internal support." OFF) +option(ENABLE_NCZARR_S3 "Enable NCZarr S3 support; Deprecated in favor of ENABLE_S3" OFF) -IF(NOT ENABLE_REMOTE_FUNCTIONALITY) - SET(ENABLE_S3 OFF CACHE BOOL "" FORCE) - SET(ENABLE_S3_INTERNAL OFF CACHE BOOL "" FORCE) - SET(ENABLE_NCZARR_S3 OFF CACHE BOOL "" FORCE) -ENDIF() +if(NOT ENABLE_REMOTE_FUNCTIONALITY) + set(ENABLE_S3 OFF CACHE BOOL "" FORCE) + set(ENABLE_S3_INTERNAL OFF CACHE BOOL "" FORCE) + set(ENABLE_NCZARR_S3 OFF CACHE BOOL "" FORCE) +endif() # Control S3 Testing: Multi-valued option -SET(WITH_S3_TESTING OFF CACHE STRING "Control S3 Testing: ON (i.e. all) OFF (i.e. none) PUBLIC") +set(WITH_S3_TESTING OFF CACHE STRING "Control S3 Testing: ON (i.e. all) OFF (i.e. none) PUBLIC") SET_PROPERTY(CACHE WITH_S3_TESTING PROPERTY STRINGS ON OFF PUBLIC) # -IF(WITH_S3_TESTING STREQUAL "") - SET(WITH_S3_TESTING OFF CACHE STRING "") # Default -ENDIF() +if(WITH_S3_TESTING STREQUAL "") + set(WITH_S3_TESTING OFF CACHE STRING "") # Default +endif() -IF(WITH_S3_TESTING) +if(WITH_S3_TESTING) message(WARNING "**** DO NOT USE WITH_S3_TESTING=ON UNLESS YOU HAVE ACCESS TO THE UNIDATA S3 BUCKET! ***") -ENDIF() +endif() # ENABLE_NCZARR_S3 is now an alias for ENABLE_S3 (but...) if (NOT ENABLE_S3 AND ENABLE_NCZARR_S3) - SET(ENABLE_S3 ON CACHE BOOL "NCARR S3" FORCE) # For back compatibility -ENDIF() -UNSET(ENABLE_NCZARR_S3) - -IF(NOT ENABLE_REMOTE_FUNCTIONALITY) - MESSAGE(WARNING "ENABLE_REMOTE_FUNCTIONALITY=NO => disable all s3 functionality") - SET(ENABLE_S3 OFF CACHE BOOL "" FORCE) - SET(ENABLE_S3_INTERNAL OFF CACHE BOOL "" FORCE) - SET(ENABLE_NCZARR_S3 OFF CACHE BOOL "" FORCE) - SET(ENABLE_HDF5_ROS3 OFF CACHE BOOL "Use ROS3" FORCE) - SET(WITH_S3_TESTING OFF CACHE STRING "" FORCE) -ENDIF() - - -IF(ENABLE_S3) - IF(NOT ENABLE_S3_AWS AND NOT ENABLE_S3_INTERNAL) + set(ENABLE_S3 ON CACHE BOOL "NCARR S3" FORCE) # For back compatibility +endif() +UNset(ENABLE_NCZARR_S3) + +if(NOT ENABLE_REMOTE_FUNCTIONALITY) + message(WARNING "ENABLE_REMOTE_FUNCTIONALITY=NO => disable all s3 functionality") + set(ENABLE_S3 OFF CACHE BOOL "" FORCE) + set(ENABLE_S3_INTERNAL OFF CACHE BOOL "" FORCE) + set(ENABLE_NCZARR_S3 OFF CACHE BOOL "" FORCE) + set(ENABLE_HDF5_ROS3 OFF CACHE BOOL "Use ROS3" FORCE) + set(WITH_S3_TESTING OFF CACHE STRING "" FORCE) +endif() + + +if(ENABLE_S3) + if(NOT ENABLE_S3_AWS AND NOT ENABLE_S3_INTERNAL) message(FATAL_ERROR "S3 support library not found; please specify option -DENABLE_S3=NO") - SET(ENABLE_S3 OFF CACHE BOOL "S3 support" FORCE) - ENDIF() - IF(ENABLE_S3_AWS AND ENABLE_S3_INTERNAL) + set(ENABLE_S3 OFF CACHE BOOL "S3 support" FORCE) + endif() + if(ENABLE_S3_AWS AND ENABLE_S3_INTERNAL) message(WARNING "Both aws-sdk-cpp and s3-internal enabled => use s3-internal") - SET(ENABLE_S3_AWS OFF CACHE BOOL "S3 AWS" FORCE) - ENDIF() -ENDIF() + set(ENABLE_S3_AWS OFF CACHE BOOL "S3 AWS" FORCE) + endif() +endif() -IF(NOT ENABLE_S3) - IF(WITH_S3_TESTING STREQUAL "PUBLIC" OR WITH_S3_TESTING) +if(NOT ENABLE_S3) + if(WITH_S3_TESTING STREQUAL "PUBLIC" OR WITH_S3_TESTING) message(WARNING "S3 support is disabled => WITH_S3_TESTING=OFF") - SET(WITH_S3_TESTING OFF CACHE STRING "" FORCE) - ENDIF() -ENDIF() + set(WITH_S3_TESTING OFF CACHE STRING "" FORCE) + endif() +endif() -OPTION(ENABLE_LIBXML2 "Link against libxml2 if it is available, use the packaged tinyxml2 parser otherwise." ON) -SET(XMLPARSER "tinyxml2 (bundled)") +option(ENABLE_LIBXML2 "Link against libxml2 if it is available, use the packaged tinyxml2 parser otherwise." ON) +set(XMLPARSER "tinyxml2 (bundled)") -IF(NOT ENABLE_BYTERANGE AND ENABLE_HDF5_ROS3) - MESSAGE(WARNING "ROS3 support requires ENABLE_BYTERANGE=TRUE; disabling ROS3 support") - SET(ENABLE_HDF5_ROS3 OFF CACHE BOOL "ROS3 support" FORCE) -ENDIF() +if(NOT ENABLE_BYTERANGE AND ENABLE_HDF5_ROS3) + message(WARNING "ROS3 support requires ENABLE_BYTERANGE=TRUE; disabling ROS3 support") + set(ENABLE_HDF5_ROS3 OFF CACHE BOOL "ROS3 support" FORCE) +endif() ## # Enable Tests ## -OPTION(ENABLE_TESTS "Enable basic tests, run with 'make test'." ON) -IF(ENABLE_TESTS) - SET(BUILD_TESTSETS ON CACHE BOOL "") +option(ENABLE_TESTS "Enable basic tests, run with 'make test'." ON) +if(ENABLE_TESTS) + set(BUILD_TESTSETS ON CACHE BOOL "") # Options for CTest-based tests, dashboards. - SET(NC_CTEST_PROJECT_NAME "netcdf-c" CACHE STRING "Project Name for CTest-based testing purposes.") - SET(NC_CTEST_DROP_SITE "cdash.unidata.ucar.edu:443" CACHE STRING "Dashboard location for CTest-based testing purposes.") - SET(NC_CTEST_DROP_LOC_PREFIX "" CACHE STRING "Prefix for Dashboard location on remote server when using CTest-based testing.") - SET(SUBMIT_URL "https://cdash.unidata.ucar.edu:443") - FIND_PROGRAM(HOSTNAME_CMD NAMES hostname) - IF(NOT WIN32) - SET(HOSTNAME_ARG "-s") - ENDIF() - IF(HOSTNAME_CMD) - EXECUTE_PROCESS(COMMAND ${HOSTNAME_CMD} "${HOSTNAME_ARG}" OUTPUT_VARIABLE HOSTNAME OUTPUT_STRIP_TRAILING_WHITESPACE) - SET(NC_CTEST_SITE "${HOSTNAME}" CACHE STRING "Hostname of test machine.") - ENDIF() - - IF(NC_CTEST_SITE) - SET(SITE "${NC_CTEST_SITE}" CACHE STRING "") - ENDIF() + set(NC_CTEST_PROJECT_NAME "netcdf-c" CACHE STRING "Project Name for CTest-based testing purposes.") + set(NC_CTEST_DROP_SITE "cdash.unidata.ucar.edu:443" CACHE STRING "Dashboard location for CTest-based testing purposes.") + set(NC_CTEST_DROP_LOC_PREFIX "" CACHE STRING "Prefix for Dashboard location on remote server when using CTest-based testing.") + set(SUBMIT_URL "https://cdash.unidata.ucar.edu:443") + find_program(HOSTNAME_CMD NAMES hostname) + if(NOT WIN32) + set(HOSTNAME_ARG "-s") + endif() + if(HOSTNAME_CMD) + execute_process(COMMAND ${HOSTNAME_CMD} "${HOSTNAME_ARG}" OUTPUT_VARIABLE HOSTNAME OUTPUT_STRIP_TRAILING_WHITESPACE) + set(NC_CTEST_SITE "${HOSTNAME}" CACHE STRING "Hostname of test machine.") + endif() + + if(NC_CTEST_SITE) + set(SITE "${NC_CTEST_SITE}" CACHE STRING "") + endif() ### # This option dictates whether or not to turn on @@ -773,76 +773,76 @@ IF(ENABLE_TESTS) # which are known to fail. ### - OPTION(ENABLE_FAILING_TESTS "Run tests which are known to fail, check to see if any have been fixed." OFF) + option(ENABLE_FAILING_TESTS "Run tests which are known to fail, check to see if any have been fixed." OFF) ### # Option to turn on unit testing. See # https://github.com/Unidata/netcdf-c/pull/1472 for more information. ### - OPTION(ENABLE_UNIT_TESTS "Run Unit Tests." ON) + option(ENABLE_UNIT_TESTS "Run Unit Tests." ON) ### # Option to turn on performance testing. # See https://github.com/Unidata/netcdf-c/issues/2627 for more information. ### - OPTION(ENABLE_BENCHMARKS "Run benchmark Tests." OFF) + option(ENABLE_BENCHMARKS "Run benchmark Tests." OFF) ### # End known-failures. ### MARK_AS_ADVANCED(ENABLE_FAILING_TESTS) -ENDIF() +endif() ### # Option to enable extreme numbers during testing. ### -OPTION(ENABLE_EXTREME_NUMBERS "Enable extreme numbers during testing, such as MAX_INT-1" ON) -IF(ENABLE_EXTREME_NUMBERS) - SET(USE_EXTREME_NUMBERS ON) -ENDIF() +option(ENABLE_EXTREME_NUMBERS "Enable extreme numbers during testing, such as MAX_INT-1" ON) +if(ENABLE_EXTREME_NUMBERS) + set(USE_EXTREME_NUMBERS ON) +endif() # Enable Large file tests -IF(ENABLE_LARGE_FILE_TESTS) - SET(LARGE_FILE_TESTS ON) -ENDIF() +if(ENABLE_LARGE_FILE_TESTS) + set(LARGE_FILE_TESTS ON) +endif() -OPTION(ENABLE_METADATA_PERF_TESTS "Enable test of metadata performance." OFF) -IF(ENABLE_METADATA_PERF_TESTS) - SET(ENABLE_METADATA_PERF ON) -ENDIF() +option(ENABLE_METADATA_PERF_TESTS "Enable test of metadata performance." OFF) +if(ENABLE_METADATA_PERF_TESTS) + set(ENABLE_METADATA_PERF ON) +endif() # Location for large file tests. -SET(TEMP_LARGE "." CACHE STRING "Location to store large file tests.") +set(TEMP_LARGE "." CACHE STRING "Location to store large file tests.") -OPTION(ENABLE_FSYNC "Enable experimental fsync code." OFF) -IF(ENABLE_FSYNC) - SET(USE_FSYNC ON) -ENDIF() +option(ENABLE_FSYNC "Enable experimental fsync code." OFF) +if(ENABLE_FSYNC) + set(USE_FSYNC ON) +endif() # Temporary OPTION (ENABLE_JNA "Enable jna bug fix code." OFF) -IF(ENABLE_JNA) - SET(JNA ON) -ENDIF() +if(ENABLE_JNA) + set(JNA ON) +endif() # Linux specific large file support flags. # Modelled after check in CMakeLists.txt for hdf5. -OPTION(ENABLE_LARGE_FILE_SUPPORT "Enable large file support." ON) -IF(ENABLE_LARGE_FILE_SUPPORT) - IF(MSVC) - SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /LARGEADDRESSAWARE") - SET(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /LARGEADDRESSAWARE") - SET(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} /LARGEADDRESSAWARE") - ELSE() - SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64") - ENDIF() -ENDIF() - -OPTION(ENABLE_EXAMPLE_TESTS "Run extra example tests. Requires GNU Sed. Ignored if HDF5 is not Enabled" OFF) -IF(NOT ENABLE_HDF5 AND ENABLE_EXAMPLE_TESTS) - SET(ENABLE_EXAMPLE_TESTS OFF) -ENDIF() +option(ENABLE_LARGE_FILE_SUPPORT "Enable large file support." ON) +if(ENABLE_LARGE_FILE_SUPPORT) + if(MSVC) + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /LARGEADDRESSAWARE") + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /LARGEADDRESSAWARE") + set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} /LARGEADDRESSAWARE") + else() + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64") + endif() +endif() + +option(ENABLE_EXAMPLE_TESTS "Run extra example tests. Requires GNU Sed. Ignored if HDF5 is not Enabled" OFF) +if(NOT ENABLE_HDF5 AND ENABLE_EXAMPLE_TESTS) + set(ENABLE_EXAMPLE_TESTS OFF) +endif() ################################## # Dependencies @@ -855,43 +855,43 @@ include(cmake/dependencies.cmake) ################################ # Enable Parallel IO with netCDF-4/HDF5 files using HDF5 parallel I/O. -SET(STATUS_PARALLEL "OFF") +set(STATUS_PARALLEL "OFF") set(IMPORT_MPI "") -OPTION(ENABLE_PARALLEL4 "Build netCDF-4 with parallel IO" "${HDF5_PARALLEL}") -IF(ENABLE_PARALLEL4 AND ENABLE_HDF5) - IF(NOT HDF5_PARALLEL) - SET(USE_PARALLEL OFF CACHE BOOL "") - MESSAGE(STATUS "Cannot find HDF5 library built with parallel support. Disabling parallel build.") - ELSE() - SET(HDF5_PARALLEL ON CACHE BOOL "") - SET(USE_PARALLEL ON CACHE BOOL "") - SET(USE_PARALLEL4 ON CACHE BOOL "") - SET(STATUS_PARALLEL "ON") +option(ENABLE_PARALLEL4 "Build netCDF-4 with parallel IO" "${HDF5_PARALLEL}") +if(ENABLE_PARALLEL4 AND ENABLE_HDF5) + if(NOT HDF5_PARALLEL) + set(USE_PARALLEL OFF CACHE BOOL "") + message(STATUS "Cannot find HDF5 library built with parallel support. Disabling parallel build.") + else() + set(HDF5_PARALLEL ON CACHE BOOL "") + set(USE_PARALLEL ON CACHE BOOL "") + set(USE_PARALLEL4 ON CACHE BOOL "") + set(STATUS_PARALLEL "ON") configure_file("${netCDF_SOURCE_DIR}/nc_test4/run_par_test.sh.in" "${netCDF_BINARY_DIR}/tmp/run_par_test.sh" @ONLY NEWLINE_STYLE LF) - FILE(COPY "${netCDF_BINARY_DIR}/tmp/run_par_test.sh" + file(COPY "${netCDF_BINARY_DIR}/tmp/run_par_test.sh" DESTINATION ${netCDF_BINARY_DIR}/nc_test4 FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE) configure_file("${netCDF_SOURCE_DIR}/h5_test/run_par_tests.sh.in" "${netCDF_BINARY_DIR}/tmp/run_par_tests.sh" @ONLY NEWLINE_STYLE LF) - FILE(COPY "${netCDF_BINARY_DIR}/tmp/run_par_tests.sh" + file(COPY "${netCDF_BINARY_DIR}/tmp/run_par_tests.sh" DESTINATION ${netCDF_BINARY_DIR}/h5_test FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE) set(IMPORT_MPI "include(CMakeFindDependencyMacro)\nfind_dependency(MPI COMPONENTS C)") - ENDIF() -ENDIF() + endif() +endif() # Options to enable parallel IO for classic formats with PnetCDF library. -SET(STATUS_PNETCDF "OFF") -IF(ENABLE_PNETCDF) +set(STATUS_PNETCDF "OFF") +if(ENABLE_PNETCDF) # Check for ncmpi_create in libpnetcdf, define USE_PNETCDF # Does the user want to turn on PnetCDF read ability? - SET(USE_PNETCDF ON CACHE BOOL "") - IF(NOT PNETCDF) - MESSAGE(STATUS "Cannot find PnetCDF library. Disabling PnetCDF support.") - SET(USE_PNETCDF OFF CACHE BOOL "") - ELSE(NOT PNETCDF) - SET(USE_PARALLEL ON CACHE BOOL "") + set(USE_PNETCDF ON CACHE BOOL "") + if(NOT PNETCDF) + message(STATUS "Cannot find PnetCDF library. Disabling PnetCDF support.") + set(USE_PNETCDF OFF CACHE BOOL "") + else(NOT PNETCDF) + set(USE_PARALLEL ON CACHE BOOL "") # Check PnetCDF version. Must be >= 1.6.0 set(pnetcdf_h "${PNETCDF_INCLUDE_DIR}/pnetcdf.h" ) @@ -906,218 +906,218 @@ IF(ENABLE_PNETCDF) message(STATUS "Found PnetCDF version ${pnetcdf_version}") if(${pnetcdf_version} VERSION_GREATER "1.6.0") - SET(STATUS_PNETCDF "ON") - INCLUDE_DIRECTORIES(${PNETCDF_INCLUDE_DIR}) - SET(HAVE_LIBPNETCDF ON) + set(STATUS_PNETCDF "ON") + include_directories(${PNETCDF_INCLUDE_DIR}) + set(HAVE_LIBPNETCDF ON) # PnetCDF => parallel - SET(STATUS_PARALLEL ON) - SET(USE_PARALLEL ON) - MESSAGE(STATUS "Using PnetCDF Library: ${PNETCDF}") - ELSE() - MESSAGE(WARNING "ENABLE_PNETCDF requires version 1.6.1 or later; found version ${pnetcdf_version}. PnetCDF is disabled") - ENDIF() + set(STATUS_PARALLEL ON) + set(USE_PARALLEL ON) + message(STATUS "Using PnetCDF Library: ${PNETCDF}") + else() + message(WARNING "ENABLE_PNETCDF requires version 1.6.1 or later; found version ${pnetcdf_version}. PnetCDF is disabled") + endif() ### # Generate pnetcdf test. ### - CONFIGURE_FILE("${netCDF_SOURCE_DIR}/nc_test/run_pnetcdf_tests.sh.in" + configure_file("${netCDF_SOURCE_DIR}/nc_test/run_pnetcdf_tests.sh.in" "${netCDF_BINARY_DIR}/nc_test/run_pnetcdf_tests.sh") - ENDIF(NOT PNETCDF) -ENDIF() + endif(NOT PNETCDF) +endif() # Options to enable use of fill values for elements causing NC_ERANGE -SET(ENABLE_ERANGE_FILL AUTO CACHE STRING "AUTO") -OPTION(ENABLE_ERANGE_FILL "Enable use of fill value when out-of-range type conversion causes NC_ERANGE error." OFF) -IF(ENABLE_ERANGE_FILL) # enable or auto - STRING(TOUPPER ${ENABLE_ERANGE_FILL} ENABLE_ERANGE_FILL) - IF(ENABLE_ERANGE_FILL AND NOT ENABLE_ERANGE_FILL STREQUAL "AUTO") +set(ENABLE_ERANGE_FILL AUTO CACHE STRING "AUTO") +option(ENABLE_ERANGE_FILL "Enable use of fill value when out-of-range type conversion causes NC_ERANGE error." OFF) +if(ENABLE_ERANGE_FILL) # enable or auto + string(TOUPPER ${ENABLE_ERANGE_FILL} ENABLE_ERANGE_FILL) + if(ENABLE_ERANGE_FILL AND NOT ENABLE_ERANGE_FILL STREQUAL "AUTO") # explicitly enabled - SET(ENABLE_ERANGE_FILL ON) - ELSE() - IF(NOT ENABLE_ERANGE_FILL STREQUAL "AUTO") - SET(ENABLE_ERANGE_FILL OFF) - ENDIF() - ENDIF() -ENDIF(ENABLE_ERANGE_FILL) + set(ENABLE_ERANGE_FILL ON) + else() + if(NOT ENABLE_ERANGE_FILL STREQUAL "AUTO") + set(ENABLE_ERANGE_FILL OFF) + endif() + endif() +endif(ENABLE_ERANGE_FILL) # Now ENABLE_ERANGE_FILL is either AUTO, ON, or OFF # More relaxed coordinate check is now mandatory for all builds. -SET(ENABLE_ZERO_LENGTH_COORD_BOUND ON) +set(ENABLE_ZERO_LENGTH_COORD_BOUND ON) # check and conform with PnetCDF settings on ERANGE_FILL and RELAX_COORD_BOUND -IF(STATUS_PNETCDF) +if(STATUS_PNETCDF) file(STRINGS "${pnetcdf_h}" enable_erange_fill_pnetcdf REGEX "^#define PNETCDF_ERANGE_FILL") string(REGEX REPLACE "[^0-9]" "" erange_fill_pnetcdf "${enable_erange_fill_pnetcdf}") - IF("x${erange_fill_pnetcdf}" STREQUAL "x1") - SET(erange_fill_pnetcdf "ON") - ELSE() - SET(erange_fill_pnetcdf "OFF") - ENDIF() - IF(ENABLE_ERANGE_FILL STREQUAL "AUTO") # not set on command line - SET(ENABLE_ERANGE_FILL "${erange_fill_pnetcdf}") - ELSE() + if("x${erange_fill_pnetcdf}" STREQUAL "x1") + set(erange_fill_pnetcdf "ON") + else() + set(erange_fill_pnetcdf "OFF") + endif() + if(ENABLE_ERANGE_FILL STREQUAL "AUTO") # not set on command line + set(ENABLE_ERANGE_FILL "${erange_fill_pnetcdf}") + else() # user explicitly set this option on command line - IF(NOT ENABLE_ERANGE_FILL STREQUAL "${erange_fill_pnetcdf}") - IF(ENABLE_ERANGE_FILL) - MESSAGE(FATAL_ERROR "Enabling erange-fill conflicts with PnetCDF setting") - ELSE() - MESSAGE(FATAL_ERROR "Disabling erange-fill conflicts with PnetCDF setting") - ENDIF() - ENDIF() - ENDIF() + if(NOT ENABLE_ERANGE_FILL STREQUAL "${erange_fill_pnetcdf}") + if(ENABLE_ERANGE_FILL) + message(FATAL_ERROR "Enabling erange-fill conflicts with PnetCDF setting") + else() + message(FATAL_ERROR "Disabling erange-fill conflicts with PnetCDF setting") + endif() + endif() + endif() file(STRINGS "${pnetcdf_h}" relax_coord_bound_pnetcdf REGEX "^#define PNETCDF_RELAX_COORD_BOUND") string(REGEX REPLACE "[^0-9]" "" relax_coord_bound "${relax_coord_bound_pnetcdf}") - IF("x${relax_coord_bound}" STREQUAL "x1") - SET(relax_coord_bound_pnetcdf "ON") - ELSE() - SET(relax_coord_bound_pnetcdf "OFF") - ENDIF() + if("x${relax_coord_bound}" STREQUAL "x1") + set(relax_coord_bound_pnetcdf "ON") + else() + set(relax_coord_bound_pnetcdf "OFF") + endif() # pnetcdf must have relaxed coord bounds to build with netCDF-4 - IF(NOT ENABLE_ZERO_LENGTH_COORD_BOUND STREQUAL "${relax_coord_bound_pnetcdf}") - MESSAGE(FATAL_ERROR "Pnetcdf must be built with relax-coord-bound enabled") - ENDIF() -ENDIF() + if(NOT ENABLE_ZERO_LENGTH_COORD_BOUND STREQUAL "${relax_coord_bound_pnetcdf}") + message(FATAL_ERROR "Pnetcdf must be built with relax-coord-bound enabled") + endif() +endif() -IF(ENABLE_ERANGE_FILL) - MESSAGE(STATUS "Enabling use of fill value when NC_ERANGE") - SET(M4FLAGS "-DERANGE_FILL" CACHE STRING "") -ENDIF() +if(ENABLE_ERANGE_FILL) + message(STATUS "Enabling use of fill value when NC_ERANGE") + set(M4FLAGS "-DERANGE_FILL" CACHE STRING "") +endif() -IF(ENABLE_ZERO_LENGTH_COORD_BOUND) - MESSAGE(STATUS "Enabling a more relaxed check for NC_EINVALCOORDS") - ADD_DEFINITIONS(-DRELAX_COORD_BOUND) -ENDIF() +if(ENABLE_ZERO_LENGTH_COORD_BOUND) + message(STATUS "Enabling a more relaxed check for NC_EINVALCOORDS") + add_definitions(-DRELAX_COORD_BOUND) +endif() # Enable Parallel Tests. -OPTION(ENABLE_PARALLEL_TESTS "Enable Parallel IO Tests. Requires HDF5/NetCDF4 with parallel I/O Support." "${HDF5_PARALLEL}") -IF(ENABLE_PARALLEL_TESTS AND USE_PARALLEL) - SET(TEST_PARALLEL ON CACHE BOOL "") - IF(USE_NETCDF4) - SET(TEST_PARALLEL4 ON CACHE BOOL "") - ENDIF() -ENDIF() +option(ENABLE_PARALLEL_TESTS "Enable Parallel IO Tests. Requires HDF5/NetCDF4 with parallel I/O Support." "${HDF5_PARALLEL}") +if(ENABLE_PARALLEL_TESTS AND USE_PARALLEL) + set(TEST_PARALLEL ON CACHE BOOL "") + if(USE_NETCDF4) + set(TEST_PARALLEL4 ON CACHE BOOL "") + endif() +endif() IF (ENABLE_PARALLEL_TESTS AND NOT USE_PARALLEL) - MESSAGE(FATAL_ERROR "Parallel tests requested, but no parallel HDF5 installation detected.") -ENDIF() + message(FATAL_ERROR "Parallel tests requested, but no parallel HDF5 installation detected.") +endif() # Enable special filter test; experimental when using cmake. -OPTION(ENABLE_FILTER_TESTING "Enable filter testing. Ignored if shared libraries or netCDF4 are not enabled" ON) +option(ENABLE_FILTER_TESTING "Enable filter testing. Ignored if shared libraries or netCDF4 are not enabled" ON) -IF(ENABLE_FILTER_TESTING) +if(ENABLE_FILTER_TESTING) if(NOT ENABLE_HDF5 AND NOT ENABLE_NCZARR) - MESSAGE(WARNING "ENABLE_FILTER_TESTING requires HDF5 and/or NCZarr. Disabling.") - SET(ENABLE_FILTER_TESTING OFF CACHE BOOL "Enable Filter Testing" FORCE) - ENDIF() -ENDIF() + message(WARNING "ENABLE_FILTER_TESTING requires HDF5 and/or NCZarr. Disabling.") + set(ENABLE_FILTER_TESTING OFF CACHE BOOL "Enable Filter Testing" FORCE) + endif() +endif() -IF(NOT BUILD_SHARED_LIBS) - MESSAGE(WARNING "ENABLE_FILTER_TESTING requires shared libraries. Disabling.") - SET(ENABLE_FILTER_TESTING OFF) -ENDIF() +if(NOT BUILD_SHARED_LIBS) + message(WARNING "ENABLE_FILTER_TESTING requires shared libraries. Disabling.") + set(ENABLE_FILTER_TESTING OFF) +endif() -OPTION(ENABLE_NCZARR_FILTERS "Enable NCZarr filters" ON) -OPTION(ENABLE_NCZARR_FILTERS_TESTING "Enable NCZarr filter testing." ON) +option(ENABLE_NCZARR_FILTERS "Enable NCZarr filters" ON) +option(ENABLE_NCZARR_FILTERS_TESTING "Enable NCZarr filter testing." ON) # Constraints IF (NOT ENABLE_PLUGINS) - MESSAGE(WARNING "ENABLE_FILTER_TESTING requires shared libraries. Disabling.") - SET(ENABLE_NCZARR_FILTERS OFF CACHE BOOL "Enable NCZarr Filters." FORCE) -ENDIF() + message(WARNING "ENABLE_FILTER_TESTING requires shared libraries. Disabling.") + set(ENABLE_NCZARR_FILTERS OFF CACHE BOOL "Enable NCZarr Filters." FORCE) +endif() IF (NOT ENABLE_NCZARR) - MESSAGE(WARNING "ENABLE_NCZARR==NO => ENABLE_NCZARR_FILTERS==NO AND ENABLE_NCZARR_FILTER_TESTING==NO") - SET(ENABLE_NCZARR_FILTERS OFF CACHE BOOL "Disable NCZARR_FILTERS" FORCE) -ENDIF() + message(WARNING "ENABLE_NCZARR==NO => ENABLE_NCZARR_FILTERS==NO AND ENABLE_NCZARR_FILTER_TESTING==NO") + set(ENABLE_NCZARR_FILTERS OFF CACHE BOOL "Disable NCZARR_FILTERS" FORCE) +endif() IF (NOT ENABLE_NCZARR_FILTERS) - SET(ENABLE_NCZARR_FILTER_TESTING OFF CACHE BOOL "Enable NCZarr Filter Testing" FORCE) -ENDIF() + set(ENABLE_NCZARR_FILTER_TESTING OFF CACHE BOOL "Enable NCZarr Filter Testing" FORCE) +endif() -SET(ENABLE_CLIENTSIDE_FILTERS OFF) +set(ENABLE_CLIENTSIDE_FILTERS OFF) # Determine whether or not to generate documentation. -OPTION(ENABLE_DOXYGEN "Enable generation of doxygen-based documentation." OFF) -IF(ENABLE_DOXYGEN) +option(ENABLE_DOXYGEN "Enable generation of doxygen-based documentation." OFF) +if(ENABLE_DOXYGEN) # Offer the option to build internal documentation. - OPTION(ENABLE_INTERNAL_DOCS "Build internal documentation. This is of interest to developers only." OFF) - IF(ENABLE_INTERNAL_DOCS) - SET(BUILD_INTERNAL_DOCS yes CACHE STRING "") - ELSE() - SET(BUILD_INTERNAL_DOCS no CACHE STRING "") - ENDIF() + option(ENABLE_INTERNAL_DOCS "Build internal documentation. This is of interest to developers only." OFF) + if(ENABLE_INTERNAL_DOCS) + set(BUILD_INTERNAL_DOCS yes CACHE STRING "") + else() + set(BUILD_INTERNAL_DOCS no CACHE STRING "") + endif() ### # # If we are building release documentation, we need to set some # variables that will be used in the Doxygen.in template. ### - OPTION(ENABLE_DOXYGEN_BUILD_RELEASE_DOCS "Build release documentation. This is of interest only to the netCDF developers." OFF) - IF(ENABLE_DOXYGEN_BUILD_RELEASE_DOCS) - SET(DOXYGEN_CSS_FILE "${CMAKE_SOURCE_DIR}/docs/release.css" CACHE STRING "") - SET(DOXYGEN_HEADER_FILE "${CMAKE_SOURCE_DIR}/docs/release_header.html" CACHE STRING "") - SET(DOXYGEN_SEARCHENGINE "NO" CACHE STRING "") - SET(ENABLE_DOXYGEN_SERVER_BASED_SEARCH NO CACHE STRING "") - ELSE() - SET(DOXYGEN_CSS_FILE "" CACHE STRING "") - SET(DOXYGEN_HEADER_FILE "" CACHE STRING "") - SET(DOXYGEN_SEARCHENGINE "YES" CACHE STRING "") + option(ENABLE_DOXYGEN_BUILD_RELEASE_DOCS "Build release documentation. This is of interest only to the netCDF developers." OFF) + if(ENABLE_DOXYGEN_BUILD_RELEASE_DOCS) + set(DOXYGEN_CSS_FILE "${CMAKE_SOURCE_DIR}/docs/release.css" CACHE STRING "") + set(DOXYGEN_HEADER_FILE "${CMAKE_SOURCE_DIR}/docs/release_header.html" CACHE STRING "") + set(DOXYGEN_SEARCHENGINE "NO" CACHE STRING "") + set(ENABLE_DOXYGEN_SERVER_BASED_SEARCH NO CACHE STRING "") + else() + set(DOXYGEN_CSS_FILE "" CACHE STRING "") + set(DOXYGEN_HEADER_FILE "" CACHE STRING "") + set(DOXYGEN_SEARCHENGINE "YES" CACHE STRING "") # If not using release document configuration, # provide an option for server-based search. - OPTION(ENABLE_DOXYGEN_SERVER_SIDE_SEARCH "Configure Doxygen with server-based search." OFF) - IF(ENABLE_DOXYGEN_SERVER_SIDE_SEARCH) - SET(DOXYGEN_SERVER_BASED_SEARCH "YES" CACHE STRING "") - ELSE() - SET(DOXYGEN_SERVER_BASED_SEARCH "NO" CACHE STRING "") - ENDIF(ENABLE_DOXYGEN_SERVER_SIDE_SEARCH) + option(ENABLE_DOXYGEN_SERVER_SIDE_SEARCH "Configure Doxygen with server-based search." OFF) + if(ENABLE_DOXYGEN_SERVER_SIDE_SEARCH) + set(DOXYGEN_SERVER_BASED_SEARCH "YES" CACHE STRING "") + else() + set(DOXYGEN_SERVER_BASED_SEARCH "NO" CACHE STRING "") + endif(ENABLE_DOXYGEN_SERVER_SIDE_SEARCH) - ENDIF(ENABLE_DOXYGEN_BUILD_RELEASE_DOCS) + endif(ENABLE_DOXYGEN_BUILD_RELEASE_DOCS) # Option to turn on the TODO list in the doxygen-generated documentation. - OPTION(DOXYGEN_ENABLE_TASKS "Turn on test, todo, bug lists in documentation. This is of interest to developers only." OFF) - IF(DOXYGEN_ENABLE_TASKS) - SET(SHOW_DOXYGEN_TAG_LIST YES CACHE STRING "") - ELSE(DOXYGEN_ENABLE_TASKS) - SET(SHOW_DOXYGEN_TODO_LIST NO CACHE STRING "") - ENDIF(DOXYGEN_ENABLE_TASKS) - - OPTION(ENABLE_DOXYGEN_PDF_OUTPUT "[EXPERIMENTAL] Turn on PDF output for Doxygen-generated documentation." OFF) - - IF(ENABLE_DOXYGEN_PDF_OUTPUT) - SET(NC_ENABLE_DOXYGEN_PDF_OUTPUT "YES" CACHE STRING "") - ELSE() - SET(NC_ENABLE_DOXYGEN_PDF_OUTPUT "NO" CACHE STRING "") - ENDIF() - - FIND_PROGRAM(NC_DOT NAMES dot) + option(DOXYGEN_ENABLE_TASKS "Turn on test, todo, bug lists in documentation. This is of interest to developers only." OFF) + if(DOXYGEN_ENABLE_TASKS) + set(SHOW_DOXYGEN_TAG_LIST YES CACHE STRING "") + else(DOXYGEN_ENABLE_TASKS) + set(SHOW_DOXYGEN_TODO_LIST NO CACHE STRING "") + endif(DOXYGEN_ENABLE_TASKS) + + option(ENABLE_DOXYGEN_PDF_OUTPUT "[EXPERIMENTAL] Turn on PDF output for Doxygen-generated documentation." OFF) + + if(ENABLE_DOXYGEN_PDF_OUTPUT) + set(NC_ENABLE_DOXYGEN_PDF_OUTPUT "YES" CACHE STRING "") + else() + set(NC_ENABLE_DOXYGEN_PDF_OUTPUT "NO" CACHE STRING "") + endif() + + find_program(NC_DOT NAMES dot) # Specify whether or not 'dot' was found on the system path. - IF(NC_DOT) - SET(HAVE_DOT YES CACHE STRING "") - ELSE(NC_DOT) - SET(HAVE_DOT NO CACHE STRING "") - ENDIF(NC_DOT) -ENDIF() + if(NC_DOT) + set(HAVE_DOT YES CACHE STRING "") + else(NC_DOT) + set(HAVE_DOT NO CACHE STRING "") + endif(NC_DOT) +endif() # Always enable DISKLESS -OPTION(ENABLE_DISKLESS "Enable in-memory files" ON) +option(ENABLE_DISKLESS "Enable in-memory files" ON) # Always enable quantization. -OPTION(ENABLE_QUANTIZE "Enable variable quantization" ON) +option(ENABLE_QUANTIZE "Enable variable quantization" ON) # By default, MSVC has a stack size of 1000000. # Allow a user to override this. -IF(MSVC) - SET(NC_MSVC_STACK_SIZE 40000000 CACHE STRING "Default stack size for MSVC-based projects.") +if(MSVC) + set(NC_MSVC_STACK_SIZE 40000000 CACHE STRING "Default stack size for MSVC-based projects.") # By default, CMake sets the stack to 1000000. # Remove this limitation. # See here for more details: # http://www.cmake.org/pipermail/cmake/2009-April/028710.html - SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /STACK:${NC_MSVC_STACK_SIZE}") - SET(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /STACK:${NC_MSVC_STACK_SIZE}") - SET(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} /STACK:${NC_MSVC_STACK_SIZE}") -ENDIF() + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /STACK:${NC_MSVC_STACK_SIZE}") + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /STACK:${NC_MSVC_STACK_SIZE}") + set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} /STACK:${NC_MSVC_STACK_SIZE}") +endif() # Set some of the options as advanced. MARK_AS_ADVANCED(ENABLE_INTERNAL_DOCS VALGRIND_TESTS ENABLE_COVERAGE_TESTS ) @@ -1130,49 +1130,49 @@ MARK_AS_ADVANCED(ENABLE_SHARED_LIBRARY_VERSION) ################################ # Library include checks -CHECK_INCLUDE_FILE("math.h" HAVE_MATH_H) -CHECK_INCLUDE_FILE("unistd.h" HAVE_UNISTD_H) +CHECK_INCLUDE_file("math.h" HAVE_MATH_H) +CHECK_INCLUDE_file("unistd.h" HAVE_UNISTD_H) # Solve a compatibility issue in ncgen/, which checks # for NO_UNISTD_H -IF(NOT HAVE_UNISTD_H) - SET(YY_NO_UNISTD_H TRUE) -ENDIF() - -CHECK_INCLUDE_FILE("alloca.h" HAVE_ALLOCA_H) -CHECK_INCLUDE_FILE("malloc.h" HAVE_MALLOC_H) -CHECK_INCLUDE_FILE("fcntl.h" HAVE_FCNTL_H) -CHECK_INCLUDE_FILE("getopt.h" HAVE_GETOPT_H) -CHECK_INCLUDE_FILE("locale.h" HAVE_LOCALE_H) -CHECK_INCLUDE_FILE("stdint.h" HAVE_STDINT_H) -CHECK_INCLUDE_FILE("stdio.h" HAVE_STDIO_H) -IF(MSVC) -CHECK_INCLUDE_FILE("io.h" HAVE_IO_H) -ENDIF(MSVC) -CHECK_INCLUDE_FILE("stdlib.h" HAVE_STDLIB_H) -CHECK_INCLUDE_FILE("ctype.h" HAVE_CTYPE_H) -CHECK_INCLUDE_FILE("stdarg.h" HAVE_STDARG_H) -CHECK_INCLUDE_FILE("strings.h" HAVE_STRINGS_H) -CHECK_INCLUDE_FILE("signal.h" HAVE_SIGNAL_H) -CHECK_INCLUDE_FILE("sys/param.h" HAVE_SYS_PARAM_H) -CHECK_INCLUDE_FILE("sys/stat.h" HAVE_SYS_STAT_H) -CHECK_INCLUDE_FILE("sys/time.h" HAVE_SYS_TIME_H) -CHECK_INCLUDE_FILE("sys/types.h" HAVE_SYS_TYPES_H) -CHECK_INCLUDE_FILE("sys/mman.h" HAVE_SYS_MMAN_H) -CHECK_INCLUDE_FILE("sys/resource.h" HAVE_SYS_RESOURCE_H) -CHECK_INCLUDE_FILE("fcntl.h" HAVE_FCNTL_H) -CHECK_INCLUDE_FILE("inttypes.h" HAVE_INTTYPES_H) -CHECK_INCLUDE_FILE("pstdint.h" HAVE_PSTDINT_H) -CHECK_INCLUDE_FILE("endian.h" HAVE_ENDIAN_H) -CHECK_INCLUDE_FILE("BaseTsd.h" HAVE_BASETSD_H) -CHECK_INCLUDE_FILE("stddef.h" HAVE_STDDEF_H) -CHECK_INCLUDE_FILE("string.h" HAVE_STRING_H) -CHECK_INCLUDE_FILE("winsock2.h" HAVE_WINSOCK2_H) -CHECK_INCLUDE_FILE("ftw.h" HAVE_FTW_H) -CHECK_INCLUDE_FILE("libgen.h" HAVE_LIBGEN_H) -CHECK_INCLUDE_FILE("execinfo.h" HAVE_EXECINFO_H) -CHECK_INCLUDE_FILE("dirent.h" HAVE_DIRENT_H) -CHECK_INCLUDE_FILE("time.h" HAVE_TIME_H) -CHECK_INCLUDE_FILE("dlfcn.h" HAVE_DLFCN_H) +if(NOT HAVE_UNISTD_H) + set(YY_NO_UNISTD_H TRUE) +endif() + +CHECK_INCLUDE_file("alloca.h" HAVE_ALLOCA_H) +CHECK_INCLUDE_file("malloc.h" HAVE_MALLOC_H) +CHECK_INCLUDE_file("fcntl.h" HAVE_FCNTL_H) +CHECK_INCLUDE_file("getopt.h" HAVE_GETOPT_H) +CHECK_INCLUDE_file("locale.h" HAVE_LOCALE_H) +CHECK_INCLUDE_file("stdint.h" HAVE_STDINT_H) +CHECK_INCLUDE_file("stdio.h" HAVE_STDIO_H) +if(MSVC) +CHECK_INCLUDE_file("io.h" HAVE_IO_H) +endif(MSVC) +CHECK_INCLUDE_file("stdlib.h" HAVE_STDLIB_H) +CHECK_INCLUDE_file("ctype.h" HAVE_CTYPE_H) +CHECK_INCLUDE_file("stdarg.h" HAVE_STDARG_H) +CHECK_INCLUDE_file("strings.h" HAVE_STRINGS_H) +CHECK_INCLUDE_file("signal.h" HAVE_SIGNAL_H) +CHECK_INCLUDE_file("sys/param.h" HAVE_SYS_PARAM_H) +CHECK_INCLUDE_file("sys/stat.h" HAVE_SYS_STAT_H) +CHECK_INCLUDE_file("sys/time.h" HAVE_SYS_TIME_H) +CHECK_INCLUDE_file("sys/types.h" HAVE_SYS_TYPES_H) +CHECK_INCLUDE_file("sys/mman.h" HAVE_SYS_MMAN_H) +CHECK_INCLUDE_file("sys/resource.h" HAVE_SYS_RESOURCE_H) +CHECK_INCLUDE_file("fcntl.h" HAVE_FCNTL_H) +CHECK_INCLUDE_file("inttypes.h" HAVE_INTTYPES_H) +CHECK_INCLUDE_file("pstdint.h" HAVE_PSTDINT_H) +CHECK_INCLUDE_file("endian.h" HAVE_ENDIAN_H) +CHECK_INCLUDE_file("BaseTsd.h" HAVE_BASETSD_H) +CHECK_INCLUDE_file("stddef.h" HAVE_STDDEF_H) +CHECK_INCLUDE_file("string.h" HAVE_STRING_H) +CHECK_INCLUDE_file("winsock2.h" HAVE_WINSOCK2_H) +CHECK_INCLUDE_file("ftw.h" HAVE_FTW_H) +CHECK_INCLUDE_file("libgen.h" HAVE_LIBGEN_H) +CHECK_INCLUDE_file("execinfo.h" HAVE_EXECINFO_H) +CHECK_INCLUDE_file("dirent.h" HAVE_DIRENT_H) +CHECK_INCLUDE_file("time.h" HAVE_TIME_H) +CHECK_INCLUDE_file("dlfcn.h" HAVE_DLFCN_H) # Symbol Exists CHECK_SYMBOL_EXISTS(isfinite "math.h" HAVE_DECL_ISFINITE) @@ -1184,27 +1184,27 @@ CHECK_SYMBOL_EXISTS(snprintf "stdio.h" HAVE_SNPRINTF) # Type checks # Aliases for automake consistency -SET(SIZEOF_VOIDSTAR ${CMAKE_SIZEOF_VOID_P}) -SET(SIZEOF_VOIDP ${SIZEOF_VOIDSTAR}) +set(SIZEOF_VOIDSTAR ${CMAKE_SIZEOF_VOID_P}) +set(SIZEOF_VOIDP ${SIZEOF_VOIDSTAR}) CHECK_TYPE_SIZE("char" SIZEOF_CHAR) CHECK_TYPE_SIZE("double" SIZEOF_DOUBLE) CHECK_TYPE_SIZE("float" SIZEOF_FLOAT) CHECK_TYPE_SIZE("int" SIZEOF_INT) CHECK_TYPE_SIZE("uint" SIZEOF_UINT) -IF(SIZEOF_UINT) - SET(HAVE_UINT TRUE) -ENDIF(SIZEOF_UINT) +if(SIZEOF_UINT) + set(HAVE_UINT TRUE) +endif(SIZEOF_UINT) CHECK_TYPE_SIZE("schar" SIZEOF_SCHAR) -IF(SIZEOF_SCHAR) - SET(HAVE_SCHAR TRUE) -ENDIF(SIZEOF_SCHAR) +if(SIZEOF_SCHAR) + set(HAVE_SCHAR TRUE) +endif(SIZEOF_SCHAR) CHECK_TYPE_SIZE("long" SIZEOF_LONG) CHECK_TYPE_SIZE("long long" SIZEOF_LONG_LONG) -IF(SIZEOF_LONG_LONG) - SET(HAVE_LONG_LONG_INT TRUE) -ENDIF(SIZEOF_LONG_LONG) +if(SIZEOF_LONG_LONG) + set(HAVE_LONG_LONG_INT TRUE) +endif(SIZEOF_LONG_LONG) CHECK_TYPE_SIZE("unsigned long long" SIZEOF_UNSIGNED_LONG_LONG) @@ -1212,48 +1212,48 @@ CHECK_TYPE_SIZE("off_t" SIZEOF_OFF_T) CHECK_TYPE_SIZE("off64_t" SIZEOF_OFF64_T) CHECK_TYPE_SIZE("short" SIZEOF_SHORT) CHECK_TYPE_SIZE("ushort" SIZEOF_USHORT) -IF(SIZEOF_USHORT) - SET(HAVE_USHORT TRUE) -ENDIF(SIZEOF_USHORT) +if(SIZEOF_USHORT) + set(HAVE_USHORT TRUE) +endif(SIZEOF_USHORT) CHECK_TYPE_SIZE("_Bool" SIZEOF__BOOL) CHECK_TYPE_SIZE("size_t" SIZEOF_SIZE_T) # Check whether to turn on or off CDF5 support. -IF(SIZEOF_SIZE_T EQUAL 4) - IF(ENABLE_CDF5) # enable or auto - STRING(TOUPPER ${ENABLE_CDF5} ENABLE_CDF5) - IF(ENABLE_CDF5 AND NOT ENABLE_CDF5 STREQUAL "AUTO") # explicitly enabled - MESSAGE(FATAL_ERROR "Unable to support CDF5 feature because size_t is less than 8 bytes") - ENDIF(ENABLE_CDF5 AND NOT ENABLE_CDF5 STREQUAL "AUTO") - SET(ENABLE_CDF5 OFF) # cannot support CDF5 - SET(USE_CDF5 OFF CACHE BOOL "") # cannot support CDF5 - ENDIF(ENABLE_CDF5) -ELSE(SIZEOF_SIZE_T EQUAL 4) - IF(ENABLE_CDF5) # explicitly set by user or not set - SET(USE_CDF5 ON CACHE BOOL "") - ELSE(ENABLE_CDF5) # explicitly disabled by user - SET(USE_CDF5 OFF CACHE BOOL "") - ENDIF(ENABLE_CDF5) -ENDIF(SIZEOF_SIZE_T EQUAL 4) +if(SIZEOF_SIZE_T EQUAL 4) + if(ENABLE_CDF5) # enable or auto + string(TOUPPER ${ENABLE_CDF5} ENABLE_CDF5) + if(ENABLE_CDF5 AND NOT ENABLE_CDF5 STREQUAL "AUTO") # explicitly enabled + message(FATAL_ERROR "Unable to support CDF5 feature because size_t is less than 8 bytes") + endif(ENABLE_CDF5 AND NOT ENABLE_CDF5 STREQUAL "AUTO") + set(ENABLE_CDF5 OFF) # cannot support CDF5 + set(USE_CDF5 OFF CACHE BOOL "") # cannot support CDF5 + endif(ENABLE_CDF5) +else(SIZEOF_SIZE_T EQUAL 4) + if(ENABLE_CDF5) # explicitly set by user or not set + set(USE_CDF5 ON CACHE BOOL "") + else(ENABLE_CDF5) # explicitly disabled by user + set(USE_CDF5 OFF CACHE BOOL "") + endif(ENABLE_CDF5) +endif(SIZEOF_SIZE_T EQUAL 4) CHECK_TYPE_SIZE("ssize_t" SIZEOF_SSIZE_T) -IF(SIZEOF_SSIZE_T) - SET(HAVE_SSIZE_T TRUE) -ENDIF(SIZEOF_SSIZE_T) +if(SIZEOF_SSIZE_T) + set(HAVE_SSIZE_T TRUE) +endif(SIZEOF_SSIZE_T) CHECK_TYPE_SIZE("ptrdiff_t" SIZEOF_PTRDIFF_T) -IF(SIZEOF_PTRDIFF_T) - SET(HAVE_PTRDIFF_T TRUE) -ENDIF(SIZEOF_PTRDIFF_T) +if(SIZEOF_PTRDIFF_T) + set(HAVE_PTRDIFF_T TRUE) +endif(SIZEOF_PTRDIFF_T) CHECK_TYPE_SIZE("uintptr_t" SIZEOF_UINTPTR_T) -IF(SIZEOF_UINTPTR_T) - SET(HAVE_UINTPTR_T TRUE) -ENDIF(SIZEOF_UINTPTR_T) +if(SIZEOF_UINTPTR_T) + set(HAVE_UINTPTR_T TRUE) +endif(SIZEOF_UINTPTR_T) CHECK_TYPE_SIZE("mode_t" SIZEOF_MODE_T) -IF(SIZEOF_MODE_T) - SET(HAVE_MODE_T TRUE) -ENDIF(SIZEOF_MODE_T) +if(SIZEOF_MODE_T) + set(HAVE_MODE_T TRUE) +endif(SIZEOF_MODE_T) # __int64 is used on Windows for large file support. CHECK_TYPE_SIZE("__int64" SIZEOF___INT_64) @@ -1266,16 +1266,16 @@ CHECK_TYPE_SIZE("long long" SIZEOF_LONGLONG) CHECK_TYPE_SIZE("unsigned long long" SIZEOF_ULONGLONG) CHECK_TYPE_SIZE("uint64_t" SIZEOF_UINT64_T) -IF(SIZEOF_UINT64_T) - SET(HAVE_UINT64_T TRUE) -ENDIF(SIZEOF_UINT64_T) +if(SIZEOF_UINT64_T) + set(HAVE_UINT64_T TRUE) +endif(SIZEOF_UINT64_T) # On windows systems, we redefine off_t as __int64 # to enable LFS. This is true on 32 and 64 bit system.s # We must redefine SIZEOF_OFF_T to match. -IF(MSVC AND SIZEOF___INT_64) - SET(SIZEOF_OFF_T ${SIZEOF___INT_64}) -ENDIF() +if(MSVC AND SIZEOF___INT_64) + set(SIZEOF_OFF_T ${SIZEOF___INT_64}) +endif() # Check for various functions. CHECK_FUNCTION_EXISTS(fsync HAVE_FSYNC) @@ -1306,54 +1306,54 @@ CHECK_SYMBOL_EXISTS("struct timespec" "time.h" HAVE_STRUCT_TIMESPEC) CHECK_FUNCTION_EXISTS(atexit HAVE_ATEXIT) # Control invoking nc_finalize at exit -OPTION(ENABLE_ATEXIT_FINALIZE "Invoke nc_finalize at exit." ON) -IF(NOT HAVE_ATEXIT) -IF(ENABLE_ATEXIT_FINALIZE AND NOT HAVE_ATEXIT) - SET(ENABLE_ATEXIT_FINALIZE OFF CACHE BOOL "Enable ATEXIT" FORCE) - MESSAGE(WARNING "ENABLE_ATEXIT_FINALIZE set but atexit() function not defined") -ENDIF() -ENDIF() +option(ENABLE_ATEXIT_FINALIZE "Invoke nc_finalize at exit." ON) +if(NOT HAVE_ATEXIT) +if(ENABLE_ATEXIT_FINALIZE AND NOT HAVE_ATEXIT) + set(ENABLE_ATEXIT_FINALIZE OFF CACHE BOOL "Enable ATEXIT" FORCE) + message(WARNING "ENABLE_ATEXIT_FINALIZE set but atexit() function not defined") +endif() +endif() # Check to see if MAP_ANONYMOUS is defined. -IF(MSVC) - MESSAGE(WARNING "mmap not supported under visual studio: disabling MMAP support.") - SET(ENABLE_MMAP OFF) -ELSE() +if(MSVC) + message(WARNING "mmap not supported under visual studio: disabling MMAP support.") + set(ENABLE_MMAP OFF) +else() CHECK_C_SOURCE_COMPILES(" #include int main() {int x = MAP_ANONYMOUS;}" HAVE_MAPANON) - IF(NOT HAVE_MMAP OR NOT HAVE_MAPANON) - MESSAGE(WARNING "mmap or MAP_ANONYMOUS not found: disabling MMAP support.") - SET(ENABLE_MMAP OFF) - ENDIF() -ENDIF() + if(NOT HAVE_MMAP OR NOT HAVE_MAPANON) + message(WARNING "mmap or MAP_ANONYMOUS not found: disabling MMAP support.") + set(ENABLE_MMAP OFF) + endif() +endif() -IF(ENABLE_MMAP) +if(ENABLE_MMAP) # Aliases - SET(BUILD_MMAP ON) - SET(USE_MMAP ON) -ENDIF(ENABLE_MMAP) + set(BUILD_MMAP ON) + set(USE_MMAP ON) +endif(ENABLE_MMAP) #CHECK_FUNCTION_EXISTS(alloca HAVE_ALLOCA) # Used in the `configure_file` calls below -SET(ISCMAKE "yes") -IF(MSVC) - SET(ISMSVC ON CACHE BOOL "" FORCE) - SET(REGEDIT ON CACHE BOOL "" FORCE) +set(ISCMAKE "yes") +if(MSVC) + set(ISMSVC ON CACHE BOOL "" FORCE) + set(REGEDIT ON CACHE BOOL "" FORCE) # Get windows major version and build number - EXECUTE_PROCESS(COMMAND "systeminfo" OUTPUT_VARIABLE WININFO) - IF(WININFO STREQUAL "") - SET(WVM 0) - SET(WVB 0) - ELSE() - STRING(REGEX MATCH "\nOS Version:[ \t]+[0-9.]+" WINVERLINE "${WININFO}") - STRING(REGEX REPLACE "[^0-9]*([0-9]+)[.]([0-9])+[.]([0-9]+)" "\\1" WVM "${WINVERLINE}") - STRING(REGEX REPLACE "[^0-9]*([0-9]+)[.]([0-9])+[.]([0-9]+)" "\\3" WVB "${WINVERLINE}") - ENDIF() - SET(WINVERMAJOR ${WVM} CACHE STRING "" FORCE) - SET(WINVERBUILD ${WVB} CACHE STRING "" FORCE) -ENDIF() + execute_process(COMMAND "systeminfo" OUTPUT_VARIABLE WININFO) + if(WININFO STREQUAL "") + set(WVM 0) + set(WVB 0) + else() + string(REGEX MATCH "\nOS Version:[ \t]+[0-9.]+" WINVERLINE "${WININFO}") + string(REGEX REPLACE "[^0-9]*([0-9]+)[.]([0-9])+[.]([0-9]+)" "\\1" WVM "${WINVERLINE}") + string(REGEX REPLACE "[^0-9]*([0-9]+)[.]([0-9])+[.]([0-9]+)" "\\3" WVB "${WINVERLINE}") + endif() + set(WINVERMAJOR ${WVM} CACHE STRING "" FORCE) + set(WINVERBUILD ${WVB} CACHE STRING "" FORCE) +endif() ##### # End system inspection checks. @@ -1361,14 +1361,14 @@ ENDIF() # A basic script used to convert m4 files -FIND_PROGRAM(NC_M4 NAMES m4 m4.exe) -IF(NC_M4) - MESSAGE(STATUS "Found m4: ${NC_M4}") - SET(HAVE_M4 TRUE) -ELSE() - MESSAGE(STATUS "m4 not found.") - SET(HAVE_M4 FALSE) -ENDIF() +find_program(NC_M4 NAMES m4 m4.exe) +if(NC_M4) + message(STATUS "Found m4: ${NC_M4}") + set(HAVE_M4 TRUE) +else() + message(STATUS "m4 not found.") + set(HAVE_M4 FALSE) +endif() ##specific # Shell script Macro @@ -1376,36 +1376,36 @@ ENDIF() # Determine if 'bash' is on the system. ## -OPTION(ENABLE_BASH_SCRIPT_TESTING "Detection is typically automatic, but this option can be used to force enable/disable bash-script based tests." ON) - -IF(ENABLE_BASH_SCRIPT_TESTING) - FIND_PROGRAM(HAVE_BASH bash) - IF(HAVE_BASH) - STRING(COMPARE EQUAL "${HAVE_BASH}" "C:/Windows/System32/bash.exe" IS_BASH_EXE) - IF(NOT IS_BASH_EXE) - MESSAGE(STATUS "Found bash: ${HAVE_BASH}") - ELSE() - MESSAGE(STATUS "Ignoring ${HAVE_BASH}") - SET(HAVE_BASH "") - ENDIF() - ELSE() - MESSAGE(STATUS "Bash shell not found; disabling shell script tests.") - ENDIF() -ELSE(ENABLE_BASH_SCRIPT_TESTING) - SET(HAVE_BASH "") -ENDIF(ENABLE_BASH_SCRIPT_TESTING) +option(ENABLE_BASH_SCRIPT_TESTING "Detection is typically automatic, but this option can be used to force enable/disable bash-script based tests." ON) + +if(ENABLE_BASH_SCRIPT_TESTING) + find_program(HAVE_BASH bash) + if(HAVE_BASH) + string(COMPARE EQUAL "${HAVE_BASH}" "C:/Windows/System32/bash.exe" IS_BASH_EXE) + if(NOT IS_BASH_EXE) + message(STATUS "Found bash: ${HAVE_BASH}") + else() + message(STATUS "Ignoring ${HAVE_BASH}") + set(HAVE_BASH "") + endif() + else() + message(STATUS "Bash shell not found; disabling shell script tests.") + endif() +else(ENABLE_BASH_SCRIPT_TESTING) + set(HAVE_BASH "") +endif(ENABLE_BASH_SCRIPT_TESTING) # Create config.h file. configure_file("${netCDF_SOURCE_DIR}/config.h.cmake.in" "${netCDF_BINARY_DIR}/config.h") -ADD_DEFINITIONS(-DHAVE_CONFIG_H) -INCLUDE_DIRECTORIES(${netCDF_BINARY_DIR}) +add_definitions(-DHAVE_CONFIG_H) +include_directories(${netCDF_BINARY_DIR}) # End autotools-style checks for config.h ##### # Set core names of the libraries. ##### -SET(netCDF_LIB_CORENAME "netcdf") +set(netCDF_LIB_CORENAME "netcdf") ##### # Set the true names of all the libraries, if customized by external project @@ -1415,109 +1415,109 @@ add_subdirectory("include") add_subdirectory(libdispatch) add_subdirectory(libsrc) -IF(USE_PNETCDF) +if(USE_PNETCDF) add_subdirectory(libsrcp) -ENDIF(USE_PNETCDF) +endif(USE_PNETCDF) -IF(USE_NETCDF4) +if(USE_NETCDF4) add_subdirectory(libsrc4) -ENDIF() +endif() -IF(USE_HDF5) +if(USE_HDF5) add_subdirectory(libhdf5) -ENDIF(USE_HDF5) +endif(USE_HDF5) -IF(USE_HDF4) +if(USE_HDF4) add_subdirectory(libhdf4) add_subdirectory(hdf4_test) -ENDIF(USE_HDF4) - -IF(ENABLE_DAP2) - ADD_SUBDIRECTORY(oc2) - ADD_SUBDIRECTORY(libdap2) -ENDIF() - -IF(ENABLE_DAP4) - ADD_SUBDIRECTORY(libdap4) - ADD_SUBDIRECTORY(libncxml) -ELSE() - IF(ENABLE_S3_INTERNAL) - ADD_SUBDIRECTORY(libncxml) - ENDIF() -ENDIF() - -IF(ENABLE_PLUGINS) - ADD_SUBDIRECTORY(libncpoco) -ENDIF() - -IF(ENABLE_NCZARR) - ADD_SUBDIRECTORY(libnczarr) - FILE(COPY ${netCDF_SOURCE_DIR}/unit_test/timer_utils.h +endif(USE_HDF4) + +if(ENABLE_DAP2) + add_subdirectory(oc2) + add_subdirectory(libdap2) +endif() + +if(ENABLE_DAP4) + add_subdirectory(libdap4) + add_subdirectory(libncxml) +else() + if(ENABLE_S3_INTERNAL) + add_subdirectory(libncxml) + endif() +endif() + +if(ENABLE_PLUGINS) + add_subdirectory(libncpoco) +endif() + +if(ENABLE_NCZARR) + add_subdirectory(libnczarr) + file(COPY ${netCDF_SOURCE_DIR}/unit_test/timer_utils.h DESTINATION ${netCDF_BINARY_DIR}/nczarr_test/) - FILE(COPY ${netCDF_SOURCE_DIR}/unit_test/timer_utils.c + file(COPY ${netCDF_SOURCE_DIR}/unit_test/timer_utils.c DESTINATION ${netCDF_BINARY_DIR}/nczarr_test/) - FILE(COPY ${netCDF_SOURCE_DIR}/nc_test4/test_filter.c + file(COPY ${netCDF_SOURCE_DIR}/nc_test4/test_filter.c DESTINATION ${netCDF_BINARY_DIR}/nczarr_test/) - FILE(COPY ${netCDF_SOURCE_DIR}/nc_test4/test_filter_misc.c + file(COPY ${netCDF_SOURCE_DIR}/nc_test4/test_filter_misc.c DESTINATION ${netCDF_BINARY_DIR}/nczarr_test/) - FILE(COPY ${netCDF_SOURCE_DIR}/nc_test4/test_filter_repeat.c + file(COPY ${netCDF_SOURCE_DIR}/nc_test4/test_filter_repeat.c DESTINATION ${netCDF_BINARY_DIR}/nczarr_test/) - FILE(COPY ${netCDF_SOURCE_DIR}/nc_test4/test_filter_order.c + file(COPY ${netCDF_SOURCE_DIR}/nc_test4/test_filter_order.c DESTINATION ${netCDF_BINARY_DIR}/nczarr_test/) - FILE(COPY ${netCDF_SOURCE_DIR}/nc_test4/tst_multifilter.c + file(COPY ${netCDF_SOURCE_DIR}/nc_test4/tst_multifilter.c DESTINATION ${netCDF_BINARY_DIR}/nczarr_test/) -ENDIF() +endif() add_subdirectory(liblib) -IF(ENABLE_PLUGINS) +if(ENABLE_PLUGINS) add_subdirectory(plugins) -ENDIF() +endif() # For tests and utilities, we are no longer # exporting symbols but rather importing them. -IF(BUILD_DLL) +if(BUILD_DLL) REMOVE_DEFINITIONS(-DDLL_EXPORT) -ENDIF() +endif() # Enable Utilities. -IF(BUILD_UTILITIES) - INCLUDE_DIRECTORIES(ncdump) - ADD_SUBDIRECTORY(ncgen) - ADD_SUBDIRECTORY(ncgen3) - ADD_SUBDIRECTORY(ncdump) -ENDIF() +if(BUILD_UTILITIES) + include_directories(ncdump) + add_subdirectory(ncgen) + add_subdirectory(ncgen3) + add_subdirectory(ncdump) +endif() # Enable tests -IF(ENABLE_TESTS) - IF(ENABLE_V2_API) - ADD_SUBDIRECTORY(nctest) - ENDIF() - ADD_SUBDIRECTORY(nc_test) - IF(USE_HDF5) - INCLUDE_DIRECTORIES(h5_test) - ADD_SUBDIRECTORY(nc_test4) - ADD_SUBDIRECTORY(h5_test) - ENDIF() - IF(ENABLE_DAP2) - ADD_SUBDIRECTORY(ncdap_test) - ENDIF() - IF(ENABLE_DAP4) - ADD_SUBDIRECTORY(dap4_test) - ENDIF() - IF(ENABLE_EXAMPLES) - ADD_SUBDIRECTORY(examples) - ENDIF() - IF(ENABLE_BENCHMARKS) - ADD_SUBDIRECTORY(nc_perf) - ENDIF(ENABLE_BENCHMARKS) - IF(ENABLE_UNIT_TESTS) - ADD_SUBDIRECTORY(unit_test) - ENDIF(ENABLE_UNIT_TESTS) - IF(ENABLE_NCZARR) - ADD_SUBDIRECTORY(nczarr_test) - ENDIF() -ENDIF() +if(ENABLE_TESTS) + if(ENABLE_V2_API) + add_subdirectory(nctest) + endif() + add_subdirectory(nc_test) + if(USE_HDF5) + include_directories(h5_test) + add_subdirectory(nc_test4) + add_subdirectory(h5_test) + endif() + if(ENABLE_DAP2) + add_subdirectory(ncdap_test) + endif() + if(ENABLE_DAP4) + add_subdirectory(dap4_test) + endif() + if(ENABLE_EXAMPLES) + add_subdirectory(examples) + endif() + if(ENABLE_BENCHMARKS) + add_subdirectory(nc_perf) + endif(ENABLE_BENCHMARKS) + if(ENABLE_UNIT_TESTS) + add_subdirectory(unit_test) + endif(ENABLE_UNIT_TESTS) + if(ENABLE_NCZARR) + add_subdirectory(nczarr_test) + endif() +endif() # Code to generate an export header #GENERATE_EXPORT_HEADER(netcdf @@ -1530,32 +1530,32 @@ ENDIF() ##### # Build doxygen documentation, if need be. ##### -ADD_SUBDIRECTORY(docs) +add_subdirectory(docs) ## # Brute force, grab all of the dlls from the dependency directory, # install them in the binary dir. Grab all of the .libs, put them # in the libdir. ## -IF(MSVC) - FILE(GLOB COPY_FILES ${CMAKE_PREFIX_PATH}/lib/*.lib) - INSTALL(FILES ${COPY_FILES} +if(MSVC) + file(GLOB COPY_FILES ${CMAKE_PREFIX_PATH}/lib/*.lib) + install(FILES ${COPY_FILES} DESTINATION ${CMAKE_INSTALL_LIBDIR} COMPONENT dependencies) - FILE(GLOB COPY_FILES ${CMAKE_PREFIX_PATH}/bin/*.dll) - STRING(REGEX REPLACE "msv[.*].dll" "" COPY_FILES "${COPY_FILES}") - INSTALL(FILES ${COPY_FILES} + file(GLOB COPY_FILES ${CMAKE_PREFIX_PATH}/bin/*.dll) + string(REGEX REPLACE "msv[.*].dll" "" COPY_FILES "${COPY_FILES}") + install(FILES ${COPY_FILES} DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT dependencies) -ENDIF() +endif() # Subdirectory CMakeLists.txt files should specify their own # 'install' files. # Including 'CPack' kicks everything off. -INCLUDE(InstallRequiredSystemLibraries) -CONFIGURE_FILE( +include(InstallRequiredSystemLibraries) +configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/FixBundle.cmake.in ${CMAKE_CURRENT_BINARY_DIR}/FixBundle.cmake @ONLY @@ -1565,68 +1565,68 @@ CONFIGURE_FILE( # Create pkgconfig files. ### -IF(NOT DEFINED CMAKE_INSTALL_LIBDIR) - SET(CMAKE_INSTALL_LIBDIR lib) -ENDIF(NOT DEFINED CMAKE_INSTALL_LIBDIR) +if(NOT DEFINED CMAKE_INSTALL_LIBDIR) + set(CMAKE_INSTALL_LIBDIR lib) +endif(NOT DEFINED CMAKE_INSTALL_LIBDIR) # Set -SET(prefix ${CMAKE_INSTALL_PREFIX}) -SET(exec_prefix ${CMAKE_INSTALL_PREFIX}) -SET(libdir ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}) -SET(includedir ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}) -SET(CC ${CMAKE_C_COMPILER}) +set(prefix ${CMAKE_INSTALL_PREFIX}) +set(exec_prefix ${CMAKE_INSTALL_PREFIX}) +set(libdir ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}) +set(includedir ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}) +set(CC ${CMAKE_C_COMPILER}) # Process all dependency libraries and create a string # used when parsing netcdf.pc.in -SET(NC_LIBS "") - -FOREACH(_LIB ${ALL_TLL_LIBS}) - GET_FILENAME_COMPONENT(_LIB_NAME ${_LIB} NAME_WE) - STRING(REGEX REPLACE "^lib" "" _NAME ${_LIB_NAME}) - LIST(APPEND NC_LIBS "-l${_NAME}") - GET_FILENAME_COMPONENT(_LIB_DIR ${_LIB} PATH) - LIST(APPEND LINKFLAGS "-L${_LIB_DIR}") -ENDFOREACH() - -#SET(NC_LIBS "-lnetcdf ${NC_LIBS}") -IF(NC_LIBS) - STRING(REPLACE ";" " " NC_LIBS "${NC_LIBS}") - STRING(REPLACE "-lhdf5::hdf5-shared" "-lhdf5" NC_LIBS ${NC_LIBS}) - STRING(REPLACE "-lhdf5::hdf5_hl-shared" "-lhdf5_hl" NC_LIBS ${NC_LIBS}) - STRING(REPLACE "-lhdf5::hdf5-static" "-lhdf5" NC_LIBS ${NC_LIBS}) - STRING(REPLACE "-lhdf5::hdf5_hl-static" "-lhdf5_hl" NC_LIBS ${NC_LIBS}) -ENDIF() +set(NC_LIBS "") + +foreach(_LIB ${ALL_TLL_LIBS}) + get_filename_component(_LIB_NAME ${_LIB} NAME_WE) + string(REGEX REPLACE "^lib" "" _NAME ${_LIB_NAME}) + list(APPEND NC_LIBS "-l${_NAME}") + get_filename_component(_LIB_DIR ${_LIB} PATH) + list(APPEND LINKFLAGS "-L${_LIB_DIR}") +endforach() + +#set(NC_LIBS "-lnetcdf ${NC_LIBS}") +if(NC_LIBS) + string(REPLACE ";" " " NC_LIBS "${NC_LIBS}") + string(REPLACE "-lhdf5::hdf5-shared" "-lhdf5" NC_LIBS ${NC_LIBS}) + string(REPLACE "-lhdf5::hdf5_hl-shared" "-lhdf5_hl" NC_LIBS ${NC_LIBS}) + string(REPLACE "-lhdf5::hdf5-static" "-lhdf5" NC_LIBS ${NC_LIBS}) + string(REPLACE "-lhdf5::hdf5_hl-static" "-lhdf5_hl" NC_LIBS ${NC_LIBS}) +endif() -STRING(REPLACE ";" " " LINKFLAGS "${LINKFLAGS}") +string(REPLACE ";" " " LINKFLAGS "${LINKFLAGS}") -LIST(REMOVE_DUPLICATES NC_LIBS) -LIST(REMOVE_DUPLICATES LINKFLAGS) +list(REMOVE_DUPLICATES NC_LIBS) +list(REMOVE_DUPLICATES LINKFLAGS) -SET(LIBS ${NC_LIBS}) -SET(NC_LIBS "-lnetcdf") +set(LIBS ${NC_LIBS}) +set(NC_LIBS "-lnetcdf") configure_file( ${netCDF_SOURCE_DIR}/netcdf.pc.in ${netCDF_BINARY_DIR}/netcdf.pc @ONLY) -IF(NOT IS_DIRECTORY ${netCDF_BINARY_DIR}/tmp) - FILE(MAKE_DIRECTORY ${netCDF_BINARY_DIR}/tmp) -ENDIF() +if(NOT IS_DIRECTORY ${netCDF_BINARY_DIR}/tmp) + file(MAKE_DIRECTORY ${netCDF_BINARY_DIR}/tmp) +endif() configure_file("${netCDF_SOURCE_DIR}/nc-config.cmake.in" "${netCDF_BINARY_DIR}/tmp/nc-config" @ONLY NEWLINE_STYLE LF) -FILE(COPY "${netCDF_BINARY_DIR}/tmp/nc-config" +file(COPY "${netCDF_BINARY_DIR}/tmp/nc-config" DESTINATION ${netCDF_BINARY_DIR}/ FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE) -INSTALL(FILES ${netCDF_BINARY_DIR}/netcdf.pc +install(FILES ${netCDF_BINARY_DIR}/netcdf.pc DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig COMPONENT utilities) -INSTALL(PROGRAMS ${netCDF_BINARY_DIR}/nc-config +install(PROGRAMS ${netCDF_BINARY_DIR}/nc-config DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT utilities) @@ -1650,20 +1650,20 @@ ENABLE_MAKEDIST(README.md COPYRIGHT RELEASE_NOTES.md INSTALL INSTALL.cmake test_ # Set variables to mirror those used by autoconf. # This way we don't need to maintain two separate template # files. -SET(host_cpu "${cpu}") -SET(host_vendor "${osname}") -SET(host_os "${osrel}") -SET(abs_top_builddir "${CMAKE_CURRENT_BINARY_DIR}") -SET(abs_top_srcdir "${CMAKE_CURRENT_SOURCE_DIR}") -STRING(RANDOM LENGTH 3 ALPHABET "0123456789" PLATFORMUID) -MATH(EXPR PLATFORMUID "${PLATFORMUID} + 1" OUTPUT_FORMAT DECIMAL) +set(host_cpu "${cpu}") +set(host_vendor "${osname}") +set(host_os "${osrel}") +set(abs_top_builddir "${CMAKE_CURRENT_BINARY_DIR}") +set(abs_top_srcdir "${CMAKE_CURRENT_SOURCE_DIR}") +string(RANDOM LENGTH 3 ALPHABET "0123456789" PLATFORMUID) +math(EXPR PLATFORMUID "${PLATFORMUID} + 1" OUTPUT_FORMAT DECIMAL) -SET(CC_VERSION "${CMAKE_C_COMPILER}") +set(CC_VERSION "${CMAKE_C_COMPILER}") # Build *FLAGS for libnetcdf.settings. -SET(CFLAGS "${CMAKE_C_FLAGS} ${CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE}}") -SET(CPPFLAGS "${CMAKE_CPP_FLAGS} ${CMAKE_CPP_FLAGS_${CMAKE_BUILD_TYPE}}") -SET(LDFLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS_${CMAKE_BUILD_TYPE}}") +set(CFLAGS "${CMAKE_C_FLAGS} ${CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE}}") +set(CPPFLAGS "${CMAKE_CPP_FLAGS} ${CMAKE_CPP_FLAGS_${CMAKE_BUILD_TYPE}}") +set(LDFLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS_${CMAKE_BUILD_TYPE}}") is_disabled(BUILD_SHARED_LIBS enable_static) is_enabled(BUILD_SHARED_LIBS enable_shared) @@ -1705,58 +1705,58 @@ is_enabled(HAVE_BZ2 HAS_BZ2) is_enabled(ENABLE_REMOTE_FUNCTIONALITY DO_REMOTE_FUNCTIONALITY) if(ENABLE_S3_INTERNAL) - SET(WHICH_S3_SDK "internal") - SET(NC_WHICH_S3_SDK "internal") + set(WHICH_S3_SDK "internal") + set(NC_WHICH_S3_SDK "internal") elseif(ENABLE_S3_AWS) - SET(WHICH_S3_SDK "aws-sdk-cpp") - SET(NC_WHICH_S3_SDK "aws-sdk-cpp") + set(WHICH_S3_SDK "aws-sdk-cpp") + set(NC_WHICH_S3_SDK "aws-sdk-cpp") else() - SET(WHICH_S3_SDK "none") - SET(NC_WHICH_S3_SDK "none") + set(WHICH_S3_SDK "none") + set(NC_WHICH_S3_SDK "none") endif() if(WITH_S3_TESTING STREQUAL PUBLIC) -SET(ENABLE_S3_TESTING "public") +set(ENABLE_S3_TESTING "public") elseif(WITH_S3_TESTING) -SET(ENABLE_S3_TESTING "yes") -SET(ENABLE_S3_TESTALL "yes") +set(ENABLE_S3_TESTING "yes") +set(ENABLE_S3_TESTALL "yes") elseif(NOT WITH_S3_TESTING) -SET(ENABLE_S3_TESTING "no") +set(ENABLE_S3_TESTING "no") else() -SET(ENABLE_S3_TESTING "no") +set(ENABLE_S3_TESTING "no") endif() # The Unidata testing S3 bucket # WARNING: this must match the value in configure.ac -SET(S3TESTBUCKET "unidata-zarr-test-data" CACHE STRING "S3 test bucket") +set(S3TESTBUCKET "unidata-zarr-test-data" CACHE STRING "S3 test bucket") # The working S3 path tree within the Unidata bucket. # WARNING: this must match the value in configure.ac -SET(S3TESTSUBTREE "netcdf-c" CACHE STRING "Working S3 path.") +set(S3TESTSUBTREE "netcdf-c" CACHE STRING "Working S3 path.") # Build a unique id based on the date string(TIMESTAMP TESTUID "%s") if(ENABLE_S3_TESTING) -FILE(APPEND "${CMAKE_CURRENT_BINARY_DIR}/s3cleanup_${PLATFORMUID}.uids" "${TESTUID}\n") +file(APPEND "${CMAKE_CURRENT_BINARY_DIR}/s3cleanup_${PLATFORMUID}.uids" "${TESTUID}\n") endif() # Copy the CTest customization file into binary directory, as required. -CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/CTestCustom.cmake.in" "${CMAKE_CURRENT_BINARY_DIR}/CTestCustom.cmake") +configure_file("${CMAKE_CURRENT_SOURCE_DIR}/CTestCustom.cmake.in" "${CMAKE_CURRENT_BINARY_DIR}/CTestCustom.cmake") # Generate file from template. -CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/libnetcdf.settings.in" +configure_file("${CMAKE_CURRENT_SOURCE_DIR}/libnetcdf.settings.in" "${CMAKE_CURRENT_BINARY_DIR}/libnetcdf.settings" @ONLY) # Read in settings file, print out. # Avoid using system-specific calls so that this # might also work on Windows. -FILE(READ "${CMAKE_CURRENT_BINARY_DIR}/libnetcdf.settings" +file(READ "${CMAKE_CURRENT_BINARY_DIR}/libnetcdf.settings" LIBNETCDF_SETTINGS) -MESSAGE(STATUS ${LIBNETCDF_SETTINGS}) +message(STATUS ${LIBNETCDF_SETTINGS}) # Install libnetcdf.settings file into same location # as the libraries. -INSTALL(FILES "${netCDF_BINARY_DIR}/libnetcdf.settings" +install(FILES "${netCDF_BINARY_DIR}/libnetcdf.settings" DESTINATION "${CMAKE_INSTALL_LIBDIR}" COMPONENT libraries) @@ -1781,17 +1781,17 @@ configure_file( #### # Build test_common.sh ##### -SET(EXTRA_DIST ${EXTRA_DIST} ${CMAKE_CURRENT_SOURCE_DIR}/test_common.in) -SET(TOPSRCDIR "${CMAKE_CURRENT_SOURCE_DIR}") -SET(TOPBUILDDIR "${CMAKE_CURRENT_BINARY_DIR}") +set(EXTRA_DIST ${EXTRA_DIST} ${CMAKE_CURRENT_SOURCE_DIR}/test_common.in) +set(TOPSRCDIR "${CMAKE_CURRENT_SOURCE_DIR}") +set(TOPBUILDDIR "${CMAKE_CURRENT_BINARY_DIR}") configure_file(${CMAKE_CURRENT_SOURCE_DIR}/test_common.in ${CMAKE_CURRENT_BINARY_DIR}/test_common.sh @ONLY NEWLINE_STYLE LF) #### # Build s3cleanup.sh and s3gc.sh ##### -SET(EXTRA_DIST ${EXTRA_DIST} ${CMAKE_CURRENT_SOURCE_DIR}/s3cleanup.in ${CMAKE_CURRENT_SOURCE_DIR}/s3gc.in) -SET(TOPSRCDIR "${CMAKE_CURRENT_SOURCE_DIR}") -SET(TOPBUILDDIR "${CMAKE_CURRENT_BINARY_DIR}") +set(EXTRA_DIST ${EXTRA_DIST} ${CMAKE_CURRENT_SOURCE_DIR}/s3cleanup.in ${CMAKE_CURRENT_SOURCE_DIR}/s3gc.in) +set(TOPSRCDIR "${CMAKE_CURRENT_SOURCE_DIR}") +set(TOPBUILDDIR "${CMAKE_CURRENT_BINARY_DIR}") configure_file(${CMAKE_CURRENT_SOURCE_DIR}/s3cleanup.in ${CMAKE_CURRENT_BINARY_DIR}/s3cleanup.sh @ONLY NEWLINE_STYLE LF) configure_file(${CMAKE_CURRENT_SOURCE_DIR}/s3gc.in ${CMAKE_CURRENT_BINARY_DIR}/s3gc.sh @ONLY NEWLINE_STYLE LF) @@ -1803,14 +1803,14 @@ configure_file(${CMAKE_CURRENT_SOURCE_DIR}/nc_test4/findplugin.in ${CMAKE_CURREN configure_file(${CMAKE_CURRENT_SOURCE_DIR}/nc_test4/findplugin.in ${CMAKE_CURRENT_BINARY_DIR}/plugins/findplugin.sh @ONLY NEWLINE_STYLE LF) configure_file(${CMAKE_CURRENT_SOURCE_DIR}/nc_test4/findplugin.in ${CMAKE_CURRENT_BINARY_DIR}/examples/C/findplugin.sh @ONLY NEWLINE_STYLE LF) -IF(ENABLE_BENCHMARKS) - IF(ENABLE_PARALLEL4) - CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/nc_perf/run_par_bm_test.sh.in ${CMAKE_CURRENT_BINARY_DIR}/nc_perf/run_par_bm_test.sh @ONLY NEWLINE_STYLE LF) - CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/nc_perf/run_gfs_test.sh.in ${CMAKE_CURRENT_BINARY_DIR}/nc_perf/run_gfs_test.sh @ONLY NEWLINE_STYLE LF) - ENDIF(ENABLE_PARALLEL4) -ENDIF(ENABLE_BENCHMARKS) +if(ENABLE_BENCHMARKS) + if(ENABLE_PARALLEL4) + configure_file(${CMAKE_CURRENT_SOURCE_DIR}/nc_perf/run_par_bm_test.sh.in ${CMAKE_CURRENT_BINARY_DIR}/nc_perf/run_par_bm_test.sh @ONLY NEWLINE_STYLE LF) + configure_file(${CMAKE_CURRENT_SOURCE_DIR}/nc_perf/run_gfs_test.sh.in ${CMAKE_CURRENT_BINARY_DIR}/nc_perf/run_gfs_test.sh @ONLY NEWLINE_STYLE LF) + endif(ENABLE_PARALLEL4) +endif(ENABLE_BENCHMARKS) -IF(ENABLE_TESTS) +if(ENABLE_TESTS) ##### # Build ncdap_test|dap4_test/findtestserver[4].c ##### @@ -1828,7 +1828,7 @@ IF(ENABLE_TESTS) configure_file(${CMAKE_CURRENT_SOURCE_DIR}/CTestCustom.cmake.in ${CMAKE_CURRENT_BINARY_DIR}/CTestCustom.cmake NEWLINE_STYLE LF) -ENDIF() +endif() if(DEFINED ENV{LIB_FUZZING_ENGINE}) add_subdirectory(fuzz) @@ -1850,7 +1850,7 @@ install(EXPORT netCDFTargets ) include(CMakePackageConfigHelpers) -CONFIGURE_PACKAGE_CONFIG_FILE( +CONFIGURE_PACKAGE_CONFIG_file( "${CMAKE_CURRENT_SOURCE_DIR}/netCDFConfig.cmake.in" "${CMAKE_CURRENT_BINARY_DIR}/netCDFConfig.cmake" INSTALL_DESTINATION "${ConfigPackageLocation}" @@ -1861,7 +1861,7 @@ CONFIGURE_PACKAGE_CONFIG_FILE( CMAKE_INSTALL_LIBDIR ) -INSTALL( +install( FILES "${CMAKE_CURRENT_BINARY_DIR}/netCDFConfig.cmake" DESTINATION "${ConfigPackageLocation}" COMPONENT headers diff --git a/PostInstall.cmake b/PostInstall.cmake index c3fa9100b7..2383852520 100644 --- a/PostInstall.cmake +++ b/PostInstall.cmake @@ -1 +1 @@ -EXECUTE_PROCESS(COMMAND sh -c "${CMAKE_BINARY_DIR}/postinstall.sh -t cmake") \ No newline at end of file +execute_process(COMMAND sh -c "${CMAKE_BINARY_DIR}/postinstall.sh -t cmake") \ No newline at end of file From 206b8ce651018efb019f69c3a7b2a712c507fd37 Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Wed, 17 Jan 2024 15:40:23 -0800 Subject: [PATCH 26/33] Update release notes. --- RELEASE_NOTES.md | 1 + 1 file changed, 1 insertion(+) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 5dd9403045..3dba4adc30 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -7,6 +7,7 @@ This file contains a high-level description of this package's evolution. Release ## 4.9.3 - TBD +* [cmake] Move dependency management out of the root-level `CMakeLists.txt` into two different files in the `cmake/` folder, `dependencies.cmake` and `netcdf_functions_macros.cmake`. See [Github #2838](https://github.com/Unidata/netcdf-c/pull/2838/) for more information. * Obviate a number of irrelevant warnings. See [Github #2781](https://github.com/Unidata/netcdf-c/pull/2781). * Improve the speed and data quantity for DAP4 queries. See [Github #2765](https://github.com/Unidata/netcdf-c/pull/2765). * Remove the use of execinfo to programmatically dump the stack; it never worked. See [Github #2789](https://github.com/Unidata/netcdf-c/pull/2789). From 9fb46ce480b3d640f2cc58afb0fe320dcdba6c93 Mon Sep 17 00:00:00 2001 From: Kyle Shores Date: Thu, 18 Jan 2024 09:15:20 -0600 Subject: [PATCH 27/33] moving functions and macros to new file, lowercase things --- CMakeInstallation.cmake | 4 ---- cmake/dependencies.cmake | 11 +++++++++-- cmake/netcdf_functions_macros.cmake | 3 +++ 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 4cc5215b23..a558827f99 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -70,12 +70,8 @@ endif() ### # This should be set using the output of dpkg --print-architecture. -FIND_PROGRAM(NC_DPKG NAMES dpkg) if(NC_DPKG) # Define a macro for getting the dpkg architecture. - macro(getdpkg_arch arch) - execute_process(COMMAND "${NC_DPKG}" "--print-architecture" OUTPUT_VARIABLE "${arch}" OUTPUT_STRIP_TRAILING_WHITESPACE) - endmacro(getdpkg_arch) getdpkg_arch(dpkg_arch) set(CPACK_DEBIAN_PACKAGE_NAME "netcdf4-dev") diff --git a/cmake/dependencies.cmake b/cmake/dependencies.cmake index f101c6de9c..37767ed5cc 100644 --- a/cmake/dependencies.cmake +++ b/cmake/dependencies.cmake @@ -61,9 +61,9 @@ if(ENABLE_HDF4) message(STATUS "Found JPEG libraries: ${JPEG_LIB}") # Option to enable HDF4 file tests. - OPTION(ENABLE_HDF4_FILE_TESTS "Run HDF4 file tests. This fetches sample HDF4 files from the Unidata ftp site to test with (requires curl)." ON) + option(ENABLE_HDF4_FILE_TESTS "Run HDF4 file tests. This fetches sample HDF4 files from the Unidata ftp site to test with (requires curl)." ON) if(ENABLE_HDF4_FILE_TESTS) - FIND_PROGRAM(PROG_CURL NAMES curl) + find_program(PROG_CURL NAMES curl) if(PROG_CURL) set(USE_HDF4_FILE_TESTS ON ) else() @@ -649,4 +649,11 @@ endif() ################################ if(ENABLE_DOXYGEN) find_package(Doxygen REQUIRED) +endif() + +################################ +# NC_DPKG +################################ +if (NETCDF_PACKAGE) + find_program(NC_DPKG NAMES dpkg) endif() \ No newline at end of file diff --git a/cmake/netcdf_functions_macros.cmake b/cmake/netcdf_functions_macros.cmake index c60d380e64..f615f50274 100644 --- a/cmake/netcdf_functions_macros.cmake +++ b/cmake/netcdf_functions_macros.cmake @@ -275,6 +275,9 @@ macro(add_sh_test prefix F) endif() endmacro() +macro(getdpkg_arch arch) + execute_process(COMMAND "${NC_DPKG}" "--print-architecture" OUTPUT_VARIABLE "${arch}" OUTPUT_STRIP_TRAILING_WHITESPACE) +endmacro(getdpkg_arch) ################################ # Functions From e6f68f7f538e9c0f16c3840fab8e886b8683cca8 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Fri, 6 Oct 2023 16:39:39 +0100 Subject: [PATCH 28/33] CMake: Find HDF5 header we can safely include for other checks --- cmake/check_hdf5.cmake | 55 ++++++++++++++++++++++++++++++++++++++++ cmake/dependencies.cmake | 23 ++++------------- 2 files changed, 60 insertions(+), 18 deletions(-) create mode 100644 cmake/check_hdf5.cmake diff --git a/cmake/check_hdf5.cmake b/cmake/check_hdf5.cmake new file mode 100644 index 0000000000..da097a112e --- /dev/null +++ b/cmake/check_hdf5.cmake @@ -0,0 +1,55 @@ +# Work out which HDF5 config header we can safely include +# +# We'd like to just use H5public.h, but if HDF5 was built against MPI, this +# might require us to have found MPI already. The next best file is H5pubconf.h, +# which actually has all the feature macros we want to check, but some +# distributions rename this for multiarch, so we've got to check some different +# names. +# +# HDF5_INCLUDE_DIR should already be set before calling this +function(check_hdf5_feature_header) + if (_H5_FEATURE_HEADER) + return() + endif() + + include(CheckIncludeFile) + + set(CMAKE_REQUIRED_INCLUDES ${HDF5_INCLUDE_DIR}) + + message(STATUS "Checking for HDF5 config header") + foreach(_h5_header "H5public.h" "H5pubconf.h" "H5pubconf-64.h" "H5pubconf-32.h") + check_include_file(${_h5_header} _can_include_h5_header) + + if (_can_include_h5_header) + message(STATUS "Using ${_h5_header} to check for feature macros") + set(_H5_FEATURE_HEADER ${_h5_header} CACHE INTERNAL "") + return() + endif() + endforeach() + + message(FATAL_ERROR "Could not include any HDF5 config headers") +endfunction() + + +# Check for an HDF5 feature macro named FEATURE and store the result in VAR +# +# This just wraps `check_c_source_compiles` but ensures we use the correct header +function(check_hdf5_feature VAR FEATURE) + if (NOT _H5_FEATURE_HEADER) + check_hdf5_feature_header() + endif() + + include(CheckCSourceCompiles) + set(CMAKE_REQUIRED_INCLUDES ${HDF5_INCLUDE_DIR}) + + message(STATUS "Checking for ${FEATURE}") + check_c_source_compiles(" +#include <${_H5_FEATURE_HEADER}> +#if !${FEATURE} +#error +#endif +int main() {}" + _has_${FEATURE}) + + set(${VAR} ${_has_${FEATURE}} PARENT_SCOPE) +endfunction() diff --git a/cmake/dependencies.cmake b/cmake/dependencies.cmake index 6c9e9a62c5..154de78a17 100644 --- a/cmake/dependencies.cmake +++ b/cmake/dependencies.cmake @@ -304,18 +304,13 @@ IF(USE_HDF5) INCLUDE_DIRECTORIES(${HAVE_HDF5_H}) ENDIF(NOT HAVE_HDF5_H) - set (CMAKE_REQUIRED_INCLUDES ${HDF5_INCLUDE_DIR}) + include(cmake/check_hdf5.cmake) # Check to ensure that HDF5 was built with zlib. # This needs to be near the beginning since we # need to know whether to add "-lz" to the symbol # tests below. - CHECK_C_SOURCE_COMPILES("#include - #if !H5_HAVE_ZLIB_H - #error - #endif - int main() { - int x = 1;}" HAVE_HDF5_ZLIB) + check_hdf5_feature(HAVE_HDF5_ZLIB H5_HAVE_ZLIB_H) IF(NOT HAVE_HDF5_ZLIB) MESSAGE(FATAL_ERROR "HDF5 was built without zlib. Rebuild HDF5 with zlib.") ELSE() @@ -332,16 +327,8 @@ IF(USE_HDF5) MESSAGE(STATUS "HDF5 has zlib.") ENDIF() - #Check to see if H5Z_SZIP exists in HDF5_Libraries. If so, we must use szip library. - CHECK_C_SOURCE_COMPILES("#include - #if !H5_HAVE_FILTER_SZIP - #error - #endif - int main() { - int x = 1;}" USE_HDF5_SZIP) - IF(USE_HDF5_SZIP) - SET(HAVE_H5Z_SZIP yes ) - ENDIF() + # Check to see if H5Z_SZIP exists in HDF5_Libraries. If so, we must use szip library. + check_hdf5_feature(HAVE_H5Z_SZIP H5_HAVE_FILTER_SZIP) #### # Check to see if HDF5 library is 1.10.6 or greater. @@ -651,4 +638,4 @@ ENDIF() ################################ IF(ENABLE_DOXYGEN) FIND_PACKAGE(Doxygen REQUIRED) -ENDIF() \ No newline at end of file +ENDIF() From a40c7847f98a97f541761fcfef36e2ebc12119dd Mon Sep 17 00:00:00 2001 From: Kyle Shores Date: Thu, 18 Jan 2024 15:02:48 -0600 Subject: [PATCH 29/33] Update CMakeLists.txt Co-authored-by: Peter Hill --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index c51e34fd4c..f559649a69 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -408,7 +408,7 @@ if(NC_EXTRA_DEPS) message(STATUS "Found ${_LIB}: ${${_LIB}_DEP}") endif() set(EXTRA_DEPS ${EXTRA_DEPS} "${${_LIB}_DEP}") - endforach() + endforeach() message("Extra deps: ${EXTRA_DEPS}") list(REMOVE_DUPLICATES EXTRA_DEPS) set(CMAKE_REQUIRED_LIBRARIES ${CMAKE_REQUIRED_LIBRARIES} ${EXTRA_DEPS}) From 05ce85be78de7d78c760b1583de60ef52f70c66c Mon Sep 17 00:00:00 2001 From: Kyle Shores Date: Thu, 18 Jan 2024 15:07:13 -0600 Subject: [PATCH 30/33] more syntax fixes --- CMakeLists.txt | 2 +- cmake/netcdf_functions_macros.cmake | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index f559649a69..602f3dc3a1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1587,7 +1587,7 @@ foreach(_LIB ${ALL_TLL_LIBS}) list(APPEND NC_LIBS "-l${_NAME}") get_filename_component(_LIB_DIR ${_LIB} PATH) list(APPEND LINKFLAGS "-L${_LIB_DIR}") -endforach() +endforeach() #set(NC_LIBS "-lnetcdf ${NC_LIBS}") if(NC_LIBS) diff --git a/cmake/netcdf_functions_macros.cmake b/cmake/netcdf_functions_macros.cmake index f615f50274..27b6b0d49b 100644 --- a/cmake/netcdf_functions_macros.cmake +++ b/cmake/netcdf_functions_macros.cmake @@ -82,7 +82,7 @@ macro(GEN_m4 filename) # If m4 isn't present, and the generated file doesn't exist, # it cannot be generated and an error should be thrown. - if(not HAVE_M4) + if(NOT HAVE_M4) if(NOT EXISTS ${fallbackdest}) message(FATAL_ERROR "m4 is required to generate ${filename}.c. Please install m4 so that it is on the PATH and try again.") else() From 42b3f5582e7c1032f2b6ff49442b102c9d2deb9d Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Mon, 22 Jan 2024 12:37:35 -0800 Subject: [PATCH 31/33] Added a comment block for future reference. --- libdap2/CMakeLists.txt | 14 ++++++++++++++ libdap4/CMakeLists.txt | 9 +++++++++ libdispatch/CMakeLists.txt | 9 +++++++++ libsrc/CMakeLists.txt | 9 +++++++++ nc_test/CMakeLists.txt | 10 ++++++++++ ncdump/CMakeLists.txt | 9 +++++++++ ncgen/CMakeLists.txt | 9 +++++++++ nctest/CMakeLists.txt | 10 ++++++++++ 8 files changed, 79 insertions(+) diff --git a/libdap2/CMakeLists.txt b/libdap2/CMakeLists.txt index eae80d0acd..141c030250 100644 --- a/libdap2/CMakeLists.txt +++ b/libdap2/CMakeLists.txt @@ -6,6 +6,20 @@ # See netcdf-c/COPYRIGHT file for more info. SET(dap2_SOURCES constraints.c dapcvt.c dapodom.c daputil.c ncdaperr.c cdf.c cache.c dapdump.c dapdebug.c dapattr.c ncd2dispatch.c getvara.c dceconstraints.c dcetab.c dceparse.c dcelex.c) +## +# Turn off inclusion of particular files when using the cmake-native +# option to turn on Unity Builds. +# +# For more information, see: +# * https://github.com/Unidata/netcdf-c/pull/2839/ +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD.html +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD_MODE.html#prop_tgt:UNITY_BUILD_MODE +## +set_property(SOURCE ncd2dispatch.c + PROPERTY + SKIP_UNITY_BUILD_INCLUSION ON) + + set_property(SOURCE ncd2dispatch.c PROPERTY SKIP_UNITY_BUILD_INCLUSION ON) diff --git a/libdap4/CMakeLists.txt b/libdap4/CMakeLists.txt index 1f3d0a9ffc..4e0a726ea5 100644 --- a/libdap4/CMakeLists.txt +++ b/libdap4/CMakeLists.txt @@ -6,6 +6,15 @@ # See netcdf-c/COPYRIGHT file for more info. SET(dap4_SOURCES d4curlfunctions.c d4fix.c d4data.c d4file.c d4parser.c d4meta.c d4varx.c d4dump.c d4swap.c d4chunk.c d4printer.c d4read.c d4http.c d4util.c d4odom.c d4cvt.c d4debug.c ncd4dispatch.c) +## +# Turn off inclusion of particular files when using the cmake-native +# option to turn on Unity Builds. +# +# For more information, see: +# * https://github.com/Unidata/netcdf-c/pull/2839/ +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD.html +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD_MODE.html#prop_tgt:UNITY_BUILD_MODE +## set_property(SOURCE d4meta.c PROPERTY SKIP_UNITY_BUILD_INCLUSION ON) diff --git a/libdispatch/CMakeLists.txt b/libdispatch/CMakeLists.txt index 9a5a705a10..39bd77e41b 100644 --- a/libdispatch/CMakeLists.txt +++ b/libdispatch/CMakeLists.txt @@ -8,6 +8,15 @@ SET(libdispatch_SOURCES dcopy.c dfile.c ddim.c datt.c dattinq.c dattput.c dattge daux.c dinstance.c dinstance_intern.c dcrc32.c dcrc32.h dcrc64.c ncexhash.c ncxcache.c ncjson.c ds3util.c dparallel.c dmissing.c) +## +# Turn off inclusion of particular files when using the cmake-native +# option to turn on Unity Builds. +# +# For more information, see: +# * https://github.com/Unidata/netcdf-c/pull/2839/ +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD.html +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD_MODE.html#prop_tgt:UNITY_BUILD_MODE +## set_property(SOURCE dinstance_intern.c dinstance.c dvarput.c PROPERTY SKIP_UNITY_BUILD_INCLUSION ON) diff --git a/libsrc/CMakeLists.txt b/libsrc/CMakeLists.txt index 9654422225..8e7398f75c 100644 --- a/libsrc/CMakeLists.txt +++ b/libsrc/CMakeLists.txt @@ -4,6 +4,15 @@ SET(libsrc_SOURCES v1hpg.c putget.c attr.c nc3dispatch.c nc3internal.c var.c dim.c ncx.c lookup3.c ncio.c) +## +# Turn off inclusion of particular files when using the cmake-native +# option to turn on Unity Builds. +# +# For more information, see: +# * https://github.com/Unidata/netcdf-c/pull/2839/ +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD.html +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD_MODE.html#prop_tgt:UNITY_BUILD_MODE +## set_property(SOURCE httpio.c posixio.c mmapio.c PROPERTY SKIP_UNITY_BUILD_INCLUSION ON) diff --git a/nc_test/CMakeLists.txt b/nc_test/CMakeLists.txt index 3ea7113ca5..ba9c9f6d30 100644 --- a/nc_test/CMakeLists.txt +++ b/nc_test/CMakeLists.txt @@ -34,6 +34,16 @@ TARGET_LINK_LIBRARIES(nc_test netcdf ${HAVE_LIBM} ) + +## +# Turn off inclusion of particular files when using the cmake-native +# option to turn on Unity Builds. +# +# For more information, see: +# * https://github.com/Unidata/netcdf-c/pull/2839/ +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD.html +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD_MODE.html#prop_tgt:UNITY_BUILD_MODE +## set_property(TARGET nc_test PROPERTY UNITY_BUILD OFF) # Some extra stand-alone tests diff --git a/ncdump/CMakeLists.txt b/ncdump/CMakeLists.txt index 5ca76c2a37..1e7e9313bb 100644 --- a/ncdump/CMakeLists.txt +++ b/ncdump/CMakeLists.txt @@ -24,6 +24,15 @@ SET(ncpathcvt_FILES ncpathcvt.c ${XGETOPTSRC}) SET(ncfilteravail_FILES ncfilteravail.c ${XGETOPTSRC}) SET(nchdf5version_FILES nchdf5version.c) +## +# Turn off inclusion of particular files when using the cmake-native +# option to turn on Unity Builds. +# +# For more information, see: +# * https://github.com/Unidata/netcdf-c/pull/2839/ +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD.html +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD_MODE.html#prop_tgt:UNITY_BUILD_MODE +## set_property(SOURCE dumplib.c PROPERTY SKIP_UNITY_BUILD_INCLUSION ON) diff --git a/ncgen/CMakeLists.txt b/ncgen/CMakeLists.txt index 4d218d3e46..6ed5d87ed3 100644 --- a/ncgen/CMakeLists.txt +++ b/ncgen/CMakeLists.txt @@ -22,6 +22,15 @@ util.c bytebuffer.h data.h debug.h dump.h generate.h generr.h genlib.h includes.h list.h ncgen.h ncgeny.h util.h ${XGETOPTSRC}) +## +# Turn off inclusion of particular files when using the cmake-native +# option to turn on Unity Builds. +# +# For more information, see: +# * https://github.com/Unidata/netcdf-c/pull/2839/ +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD.html +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD_MODE.html#prop_tgt:UNITY_BUILD_MODE +## set_property(SOURCE data.c PROPERTY SKIP_UNITY_BUILD_INCLUSION ON) diff --git a/nctest/CMakeLists.txt b/nctest/CMakeLists.txt index e4c5ea210f..434716d3c1 100644 --- a/nctest/CMakeLists.txt +++ b/nctest/CMakeLists.txt @@ -16,6 +16,16 @@ FILE(COPY ${COPY_FILES} DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) ADD_EXECUTABLE(nctest ${nctest_SRC}) TARGET_LINK_LIBRARIES(nctest netcdf) ADD_TEST(nctest ${EXECUTABLE_OUTPUT_PATH}/nctest) + +## +# Turn off inclusion of particular files when using the cmake-native +# option to turn on Unity Builds. +# +# For more information, see: +# * https://github.com/Unidata/netcdf-c/pull/2839/ +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD.html +# * https://cmake.org/cmake/help/latest/prop_tgt/UNITY_BUILD_MODE.html#prop_tgt:UNITY_BUILD_MODE +## set_property(TARGET nctest PROPERTY UNITY_BUILD OFF) add_bin_test_no_prefix(tst_rename) From df7704343a477bbfef0c7e133fdc28b137ce0df3 Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Mon, 22 Jan 2024 12:41:29 -0800 Subject: [PATCH 32/33] Updated release notes. --- RELEASE_NOTES.md | 1 + 1 file changed, 1 insertion(+) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 5dd9403045..09c38bf65b 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -7,6 +7,7 @@ This file contains a high-level description of this package's evolution. Release ## 4.9.3 - TBD +* Added infrastructure to allow for `CMAKE_UNITY_BUILD`, @jschueller. See [Github #2839](https://github.com/Unidata/netcdf-c/pull/2839) for more information. * Obviate a number of irrelevant warnings. See [Github #2781](https://github.com/Unidata/netcdf-c/pull/2781). * Improve the speed and data quantity for DAP4 queries. See [Github #2765](https://github.com/Unidata/netcdf-c/pull/2765). * Remove the use of execinfo to programmatically dump the stack; it never worked. See [Github #2789](https://github.com/Unidata/netcdf-c/pull/2789). From 9faaa05f8f6355a7849b02ea56f3b49fa56bd27f Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Mon, 22 Jan 2024 13:24:16 -0800 Subject: [PATCH 33/33] Escape a character causing a doxygen error. --- RELEASE_NOTES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index b443b12fdf..b248365579 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -8,7 +8,7 @@ This file contains a high-level description of this package's evolution. Release ## 4.9.3 - TBD -* Added infrastructure to allow for `CMAKE_UNITY_BUILD`, @jschueller. See [Github #2839](https://github.com/Unidata/netcdf-c/pull/2839) for more information. +* Added infrastructure to allow for `CMAKE_UNITY_BUILD`, (thanks \@jschueller). See [Github #2839](https://github.com/Unidata/netcdf-c/pull/2839) for more information. * [cmake] Move dependency management out of the root-level `CMakeLists.txt` into two different files in the `cmake/` folder, `dependencies.cmake` and `netcdf_functions_macros.cmake`. See [Github #2838](https://github.com/Unidata/netcdf-c/pull/2838/) for more information. * Obviate a number of irrelevant warnings. See [Github #2781](https://github.com/Unidata/netcdf-c/pull/2781). * Improve the speed and data quantity for DAP4 queries. See [Github #2765](https://github.com/Unidata/netcdf-c/pull/2765).