aboutsummaryrefslogtreecommitdiff
path: root/gnu
diff options
context:
space:
mode:
authorThomas Danckaert <thomas.danckaert@gmail.com>2016-09-30 09:59:08 +0200
committerLudovic Courtès <ludo@gnu.org>2016-10-24 23:05:20 +0200
commitd3c4f3bb00b9e04b7bdb2847f21e33292b103c21 (patch)
tree3634a5946338b5a389181ff2f9eb2633508ee83e /gnu
parent3b5783fcd94a7e213b37e1d0008403fec68db70f (diff)
downloadguix-d3c4f3bb00b9e04b7bdb2847f21e33292b103c21.tar
guix-d3c4f3bb00b9e04b7bdb2847f21e33292b103c21.tar.gz
gnu: Add hdf-eos2.
* gnu/packages/maths.scm (hdf-eos2): New variable. * gnu/packages/patches/hdf-eos2-build-shared.patch: New file. * gnu/packages/patches/hdf-eos2-remove-gctp.patch: New file. * gnu/packages/patches/hdf-eos2-fortrantests.patch: New file. * gnu/local.mk (dist_patch_DATA): Add patches. Signed-off-by: Ludovic Courtès <ludo@gnu.org>
Diffstat (limited to 'gnu')
-rw-r--r--gnu/local.mk3
-rw-r--r--gnu/packages/maths.scm35
-rw-r--r--gnu/packages/patches/hdf-eos2-build-shared.patch25
-rw-r--r--gnu/packages/patches/hdf-eos2-fortrantests.patch329
-rw-r--r--gnu/packages/patches/hdf-eos2-remove-gctp.patch55
5 files changed, 447 insertions, 0 deletions
diff --git a/gnu/local.mk b/gnu/local.mk
index 0d400e93e4..3e900b14fb 100644
--- a/gnu/local.mk
+++ b/gnu/local.mk
@@ -590,6 +590,9 @@ dist_patch_DATA = \
%D%/packages/patches/hdf4-reproducibility.patch \
%D%/packages/patches/hdf4-shared-fortran.patch \
%D%/packages/patches/hdf5-config-date.patch \
+ %D%/packages/patches/hdf-eos2-build-shared.patch \
+ %D%/packages/patches/hdf-eos2-remove-gctp.patch \
+ %D%/packages/patches/hdf-eos2-fortrantests.patch \
%D%/packages/patches/hdf-eos5-build-shared.patch \
%D%/packages/patches/hdf-eos5-remove-gctp.patch \
%D%/packages/patches/hdf-eos5-fix-szip.patch \
diff --git a/gnu/packages/maths.scm b/gnu/packages/maths.scm
index d41a3f11bf..0cdf4f74b5 100644
--- a/gnu/packages/maths.scm
+++ b/gnu/packages/maths.scm
@@ -544,6 +544,41 @@ extremely large and complex data collections.")
(license (license:x11-style
"http://www.hdfgroup.org/ftp/HDF5/current/src/unpacked/COPYING"))))
+(define-public hdf-eos2
+ (package
+ (name "hdf-eos2")
+ (version "19.1.0")
+ (source
+ (origin
+ (method url-fetch)
+ (uri "ftp://edhs1.gsfc.nasa.gov\
+/edhs/hdfeos/latest_release/HDF-EOS2.19v1.00.tar.Z")
+ (sha256
+ (base32 "0c9fcz25s292ldap12wxmlrvnyz99z24p63d8fwx51bf8s0s1zrz"))
+ (patches (search-patches "hdf-eos2-remove-gctp.patch"
+ "hdf-eos2-build-shared.patch"
+ "hdf-eos2-fortrantests.patch"))))
+ (build-system gnu-build-system)
+ (native-inputs
+ `(("gfortran" ,gfortran)))
+ (inputs
+ `(("hdf4" ,hdf4-alt) ; assume most HDF-EOS2 users won't use the HDF4 netCDF API
+ ("zlib" ,zlib)
+ ("libjpeg" ,libjpeg)
+ ("gctp" ,gctp)))
+ (arguments
+ `( #:configure-flags '("--enable-install-include" "--enable-shared"
+ "CC=h4cc -Df2cFortran" "LIBS=-lgctp")
+ #:parallel-tests? #f))
+ (home-page "http://hdfeos.org/software/library.php#HDF-EOS2")
+ (synopsis "HDF4-based data format for NASA's Earth Observing System")
+ (description "HDF-EOS2 is a software library built on HDF4 which supports
+the construction of data structures used in NASA's Earth Observing
+System (Grid, Point and Swath).")
+
+ ;; Source files carry a permissive license header.
+ (license (license:non-copyleft home-page))))
+
(define-public hdf-eos5
(package
(name "hdf-eos5")
diff --git a/gnu/packages/patches/hdf-eos2-build-shared.patch b/gnu/packages/patches/hdf-eos2-build-shared.patch
new file mode 100644
index 0000000000..71112e38c2
--- /dev/null
+++ b/gnu/packages/patches/hdf-eos2-build-shared.patch
@@ -0,0 +1,25 @@
+Changes necessary for shared library linking to succeed.
+
+diff --git a/src/Makefile.in b/src/Makefile.in
+index 9534473..12411bf 100644
+--- a/src/Makefile.in
++++ b/src/Makefile.in
+@@ -73,7 +73,7 @@ LTCOMPILE = $(LIBTOOL) --mode=compile --tag=CC $(CC) $(DEFS) \
+ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
+ $(AM_CFLAGS) $(CFLAGS)
+ CCLD = $(CC)
+-LINK = $(LIBTOOL) --mode=link --tag=CC $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
++LINK = HDF4_USE_SHLIB=yes $(LIBTOOL) --mode=link --tag=CC $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+ $(AM_LDFLAGS) $(LDFLAGS) -o $@
+ SOURCES = $(libhdfeos_la_SOURCES)
+ DIST_SOURCES = $(libhdfeos_la_SOURCES)
+@@ -125,8 +125,6 @@ INSTALL_PROGRAM = @INSTALL_PROGRAM@
+ INSTALL_SCRIPT = @INSTALL_SCRIPT@
+ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+
+-# Set LDFLAGS to alow the HDF-EOS library to use extern variables from HDF4
+-LDFLAGS = -Wl,-single_module
+ LIBOBJS = @LIBOBJS@
+ LIBS = @LIBS@
+ LIBTOOL = @LIBTOOL@
+
diff --git a/gnu/packages/patches/hdf-eos2-fortrantests.patch b/gnu/packages/patches/hdf-eos2-fortrantests.patch
new file mode 100644
index 0000000000..adecc184cb
--- /dev/null
+++ b/gnu/packages/patches/hdf-eos2-fortrantests.patch
@@ -0,0 +1,329 @@
+Fix multi-line string formatting in fortran test programs (reported upstream).
+
+diff --git a/samples/appendfield.f b/samples/appendfield.f
+index 42c4b6b..58257f7 100644
+--- a/samples/appendfield.f
++++ b/samples/appendfield.f
+@@ -22,8 +22,8 @@ c
+ inarray(i) = i
+ enddo
+
+- swfid = swopen("SwathFile_created_with_hadeos_sample_file_writer_o
+- 1f_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
++ swfid = swopen("SwathFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
+ swid = swattach(swfid, "Swath1")
+
+
+diff --git a/samples/definefields.f b/samples/definefields.f
+index 89859e4..f3b3497 100644
+--- a/samples/definefields.f
++++ b/samples/definefields.f
+@@ -24,8 +24,8 @@ c DFACC_RDWR accesscode in the open statement. The SWopen
+ c routine returns the swath fileid, swfid, which is used to
+ c identify the file in subsequent routines.
+
+- swfid = swopen("SwathFile_created_with_hadeos_sample_file_writer
+- 1_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
++ swfid = swopen("SwathFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
+
+
+ c
+diff --git a/samples/definegdflds.f b/samples/definegdflds.f
+index 177422e..1b7fcf6 100644
+--- a/samples/definegdflds.f
++++ b/samples/definegdflds.f
+@@ -21,8 +21,8 @@
+ fillval1=-7.0
+ fillval2=-9999.0
+
+- gdfid = gdopen("GridFile_created_with_hadeos_sample_file_writer_
+- 1of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
++ gdfid = gdopen("GridFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
+
+
+ gdid1 = gdattach(gdfid, "UTMGrid")
+diff --git a/samples/definelevels.f b/samples/definelevels.f
+index 2496d5f..64b2842 100644
+--- a/samples/definelevels.f
++++ b/samples/definelevels.f
+@@ -32,8 +32,8 @@ c DFACC_RDWR access code in the open statement. The ptopen
+ c routine returns the point fileid, ptfid, which is used to
+ c identify the file in subsequent routines.
+
+- ptfid = ptopen("PointFile_created_with_hadeos_sample_file_writer
+- 1_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
++ ptfid = ptopen("PointFile_created_with_hadeos_sample_file_write"//
++ 1 "r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
+
+
+ c
+diff --git a/samples/inquiregrid.f b/samples/inquiregrid.f
+index 8110461..8ce71e4 100644
+--- a/samples/inquiregrid.f
++++ b/samples/inquiregrid.f
+@@ -18,8 +18,8 @@
+
+
+
+- gdfid = gdopen('GridFile_created_with_hadeos_sample_file_writer_
+- 1of_HDFEOS2_version_219_or_higher_release.hdf', DFACC_READ)
++ gdfid = gdopen("GridFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_READ)
+
+
+ if (gdfid .ne. -1) then
+diff --git a/samples/inquireswath.f b/samples/inquireswath.f
+index 899ee59..78c292b 100644
+--- a/samples/inquireswath.f
++++ b/samples/inquireswath.f
+@@ -24,8 +24,8 @@ c
+ c Open the Swath File for read only access
+ c
+
+- swfid = swopen("SwathFile_created_with_hadeos_sample_file_writer
+- 1_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_READ)
++ swfid = swopen("SwathFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_READ)
+
+
+ if (swfid .NE. -1) then
+diff --git a/samples/readdimscalegrid.f b/samples/readdimscalegrid.f
+index fed5540..a0bb48a 100644
+--- a/samples/readdimscalegrid.f
++++ b/samples/readdimscalegrid.f
+@@ -34,8 +34,8 @@
+ ! * id, gdfid, which is used to identify the file in subsequent routines.
+ ! */
+
+- gdfid = gdopen("GridFile_created_with_hadeos_sample_file_writer_
+- 1of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
++ gdfid = gdopen("GridFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
+
+ ! /*
+ ! * If the grid file cannot be found, gdopen will return -1 for the file
+diff --git a/samples/readdimscaleswath.f b/samples/readdimscaleswath.f
+index 97b6264..1b61624 100644
+--- a/samples/readdimscaleswath.f
++++ b/samples/readdimscaleswath.f
+@@ -33,8 +33,8 @@
+ ! * id, swfid, which is used to identify the file in subsequent routines.
+ ! */
+
+- swfid = swopen("SwathFile_created_with_hadeos_sample_file_writer
+- 1_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
++ swfid = swopen("SwathFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
+
+ ! /*
+ ! * If the swath file cannot be found, swopen will return -1 for the file
+diff --git a/samples/readfields.f b/samples/readfields.f
+index 873b30a..29d42f0 100644
+--- a/samples/readfields.f
++++ b/samples/readfields.f
+@@ -21,8 +21,8 @@ c
+ c Open the HDF swath file, "SwathFile.hdf"
+ c
+
+- swfid = swopen("SwathFile_created_with_hadeos_sample_file_writer
+- 1_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_READ)
++ swfid = swopen("SwathFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_READ)
+
+
+ if (swfid .NE. -1) then
+diff --git a/samples/readgdflds.f b/samples/readgdflds.f
+index e5fe85f..ff2bd86 100644
+--- a/samples/readgdflds.f
++++ b/samples/readgdflds.f
+@@ -9,8 +9,8 @@
+ integer DFNT_FLOAT32
+ parameter (DFNT_FLOAT32=5)
+
+- gdfid = gdopen("GridFile_created_with_hadeos_sample_file_writer_
+- 1of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
++ gdfid = gdopen("GridFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
+
+ if (gdfid .ne. -1) then
+
+diff --git a/samples/readlevels.f b/samples/readlevels.f
+index a7fd033..f349398 100644
+--- a/samples/readlevels.f
++++ b/samples/readlevels.f
+@@ -36,8 +36,8 @@ c
+ c Open the HDF swath file, "PointFile.hdf".
+ c
+
+- ptfid = ptopen("PointFile_created_with_hadeos_sample_file_writer
+- 1_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_READ)
++ ptfid = ptopen("PointFile_created_with_hadeos_sample_file_write"//
++ + "r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_READ)
+
+ c
+ c Read Simple Point
+@@ -47,6 +47,8 @@ c
+ status = ptlevinfo(ptid, 0, fldlist, fldtype, fldorder)
+ n = ptnrecs(ptid, 0)
+
++ write(*,*) n
++
+ do 5 i=1,n
+ recs(i) = i - 1
+ 5 continue
+diff --git a/samples/setupgrid.f b/samples/setupgrid.f
+index be5408c..cf4bd04 100644
+--- a/samples/setupgrid.f
++++ b/samples/setupgrid.f
+@@ -34,8 +34,8 @@ c code in the open statement. The GDopen routine returns the grid
+ c file id, gdfid, which is used to identify the file in subsequent
+ c routines in the library.
+ c
+- gdfid = gdopen('GridFile_created_with_hadeos_sample_file_writer_of
+- 1_HDFEOS2_version_219_or_higher_release.hdf',DFACC_CREATE)
++ gdfid = gdopen('GridFile_created_with_hadeos_sample_file_writer_"//
++ 1"of_HDFEOS2_version_219_or_higher_release.hdf',DFACC_CREATE)
+
+ c
+ c Create UTM Grid
+diff --git a/samples/setupswath.f b/samples/setupswath.f
+index d0289d6..fbaa0bb 100644
+--- a/samples/setupswath.f
++++ b/samples/setupswath.f
+@@ -22,8 +22,8 @@ c code in the open statement. The SWopen routine returns the swath
+ c file id, swfid, which is used to identify the file in subsequent
+ c routines in the library.
+ c
+- swfid = swopen('SwathFile_created_with_hadeos_sample_file_writer_o
+- 1f_HDFEOS2_version_219_or_higher_release.hdf',DFACC_CREATE)
++ swfid = swopen("SwathFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf",DFACC_CREATE)
+
+ c
+ c The first of these, SWcreate, creates the swath, "Swath1", within the
+diff --git a/samples/subsetgrid.f b/samples/subsetgrid.f
+index c57e541..087e5b1 100644
+--- a/samples/subsetgrid.f
++++ b/samples/subsetgrid.f
+@@ -22,8 +22,8 @@ c
+ c Open the HDF grid file, "GridFile.hdf"
+ c
+
+- gdfid = gdopen("GridFile_created_with_hadeos_sample_file_writer_
+- 1of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_READ)
++ gdfid = gdopen("GridFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_READ)
+
+ if (gdfid .NE. -1) then
+
+diff --git a/samples/subsetpoint.f b/samples/subsetpoint.f
+index 9e72c5f..2e76d7d 100644
+--- a/samples/subsetpoint.f
++++ b/samples/subsetpoint.f
+@@ -21,8 +21,8 @@ c
+ c Open the HDF point file, "PointFile.hdf"
+ c
+
+- ptfid = ptopen("PointFile_created_with_hadeos_sample_file_writer
+- 1_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_READ)
++ ptfid = ptopen("PointFile_created_with_hadeos_sample_file_write"//
++ 1 "r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_READ)
+
+ if (ptfid .NE. -1) then
+
+diff --git a/samples/subsetswath.f b/samples/subsetswath.f
+index dcee609..9af8a46 100644
+--- a/samples/subsetswath.f
++++ b/samples/subsetswath.f
+@@ -28,8 +28,8 @@ c
+ c Open the HDF swath file, "SwathFile.hdf"
+ c
+
+- swfid = swopen("SwathFile_created_with_hadeos_sample_file_writer
+- 1_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_READ)
++ swfid = swopen("SwathFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_READ)
+
+ if (swfid .NE. -1) then
+
+diff --git a/samples/writedimscalegrid.f b/samples/writedimscalegrid.f
+index 09688d8..42013fe 100644
+--- a/samples/writedimscalegrid.f
++++ b/samples/writedimscalegrid.f
+@@ -29,8 +29,8 @@
+ ! * id, gdfid, which is used to identify the file in subsequent routines.
+ ! */
+
+- gdfid = gdopen("GridFile_created_with_hadeos_sample_file_writer_
+- 1of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
++ gdfid = gdopen("GridFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
+
+ ! /*
+ ! * If the grid file cannot be found, gdopen will return -1 for the file
+diff --git a/samples/writedimscaleswath.f b/samples/writedimscaleswath.f
+index 1151671..1a911a6 100644
+--- a/samples/writedimscaleswath.f
++++ b/samples/writedimscaleswath.f
+@@ -31,8 +31,8 @@
+ ! * id, swfid, which is used to identify the file in subsequent routines.
+ ! */
+
+- swfid = swopen("SwathFile_created_with_hadeos_sample_file_write
+- 1r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
++ swfid = swopen("SwathFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
+
+ ! /*
+ ! * If the swath file cannot be found, swopen will return -1 for the file
+diff --git a/samples/writefields.f b/samples/writefields.f
+index a743661..862b96c 100644
+--- a/samples/writefields.f
++++ b/samples/writefields.f
+@@ -31,8 +31,8 @@ c
+ c Open the HDF swath file, "SwathFile.hdf"
+ c
+
+- swfid = swopen("SwathFile_created_with_hadeos_sample_file_writer
+- 1_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
++ swfid = swopen("SwathFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
+
+
+ if (swfid .NE. -1) then
+diff --git a/samples/writegdflds.f b/samples/writegdflds.f
+index d1540b3..81aef75 100644
+--- a/samples/writegdflds.f
++++ b/samples/writegdflds.f
+@@ -23,8 +23,8 @@
+ enddo
+
+
+- gdfid = gdopen("GridFile_created_with_hadeos_sample_file_writer_
+- 1of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
++ gdfid = gdopen("GridFile_created_with_hadeos_sample_file_write"//
++ 1"r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
+
+ if (gdfid .ne. -1) then
+
+diff --git a/samples/writelevels.f b/samples/writelevels.f
+index 88e7780..cb40c9e 100644
+--- a/samples/writelevels.f
++++ b/samples/writelevels.f
+@@ -32,8 +32,8 @@ c
+ c Open the HDF point file, "PointFile.hdf".
+ c
+
+- ptfid = ptopen("PointFile_created_with_hadeos_sample_file_writer
+- 1_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
++ ptfid = ptopen("PointFile_created_with_hadeos_sample_file_write"//
++ + "r_of_HDFEOS2_version_219_or_higher_release.hdf", DFACC_RDWR)
+
+
+ c
+--
+2.10.0
+
diff --git a/gnu/packages/patches/hdf-eos2-remove-gctp.patch b/gnu/packages/patches/hdf-eos2-remove-gctp.patch
new file mode 100644
index 0000000000..0776c0685e
--- /dev/null
+++ b/gnu/packages/patches/hdf-eos2-remove-gctp.patch
@@ -0,0 +1,55 @@
+Don't build the GCTP bundled with the source and link with the
+system's -lgctp instead. We also remove references to the
+"testdrivers" directory, which is not distributed together with the
+source, causing autoreconf to fail.
+
+diff --git a/Makefile.in b/Makefile.in
+index d468af2..90428a7 100644
+--- a/Makefile.in
++++ b/Makefile.in
+@@ -206,7 +206,7 @@ LIBGCTP = $(top_builddir)/gctp/src/libGctp.la
+ @TESTDRIVERS_CONDITIONAL_TRUE@TESTDRIVERS = testdrivers
+ @INSTALL_INCLUDE_CONDITIONAL_FALSE@INCLUDE =
+ @INSTALL_INCLUDE_CONDITIONAL_TRUE@INCLUDE = include
+-SUBDIRS = gctp src $(INCLUDE) samples $(TESTDRIVERS)
++SUBDIRS = src $(INCLUDE) samples $(TESTDRIVERS)
+ all: all-recursive
+
+ .SUFFIXES:
+diff --git a/include/Makefile.in b/include/Makefile.in
+index 9938b23..afb7f40 100644
+--- a/include/Makefile.in
++++ b/include/Makefile.in
+@@ -190,7 +190,7 @@ LIBGCTP = $(top_builddir)/gctp/src/libGctp.la
+ # Boilerplate include
+
+ # Headers to install
+-include_HEADERS = HE2_config.h HdfEosDef.h HDFEOSVersion.h cfortHdf.h ease.h
++include_HEADERS = HdfEosDef.h HDFEOSVersion.h cfortHdf.h ease.h
+ all: HE2_config.h
+ $(MAKE) $(AM_MAKEFLAGS) all-am
+
+diff --git a/samples/Makefile.in b/samples/Makefile.in
+index 9da6e28..6a6186c 100644
+--- a/samples/Makefile.in
++++ b/samples/Makefile.in
+@@ -108,7 +108,6 @@ AppendField_SOURCES = AppendField.c
+ AppendField_OBJECTS = AppendField.$(OBJEXT)
+ AppendField_LDADD = $(LDADD)
+ am__DEPENDENCIES_1 = $(top_builddir)/src/libhdfeos.la
+-am__DEPENDENCIES_2 = $(top_builddir)/gctp/src/libGctp.la
+ AppendField_DEPENDENCIES = $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_2)
+ DefineFields_SOURCES = DefineFields.c
+ DefineFields_OBJECTS = DefineFields.$(OBJEXT)
+@@ -481,7 +480,7 @@ sharedstatedir = @sharedstatedir@
+ sysconfdir = @sysconfdir@
+ target_alias = @target_alias@
+ LIBHDFEOS2 = $(top_builddir)/src/libhdfeos.la
+-LIBGCTP = $(top_builddir)/gctp/src/libGctp.la
++LIBGCTP =
+
+ # Boilerplate definitions file
+
+--
+2.10.0
+