[gsw] 01/07: Upstream 3.2.0
Alastair McKinstry
mckinstry at moszumanska.debian.org
Wed Sep 20 15:36:36 UTC 2017
This is an automated email from the git hooks/post-receive script.
mckinstry pushed a commit to branch debian/master
in repository gsw.
commit e8890b086eaf7b5cc0bfd2b33fa918d700b2020c
Author: Alastair McKinstry <mckinstry at debian.org>
Date: Tue Sep 19 06:57:42 2017 +0100
Upstream 3.2.0
---
MANIFEST.in | 2 +
PKG-INFO | 4 +-
README.rst | 2 +
gsw.egg-info/PKG-INFO | 4 +-
gsw.egg-info/SOURCES.txt | 5 +
gsw.egg-info/requires.txt | 1 +
gsw/__init__.py | 6 +-
gsw/_version.py | 21 +
gsw/_wrapped_ufuncs.py | 2 +-
gsw/geostrophy.py | 81 +-
gsw/utility.py | 64 ++
setup.cfg | 8 +
setup.py | 35 +-
src/c_gsw/gsw_oceanographic_toolbox.c | 625 +++++++++-
src/c_gsw/gswteos-10.h | 8 +
src/method_bodies.c | 150 ++-
src/method_def_entries.c | 4 +
src2/c_gsw/gsw_oceanographic_toolbox.cpp | 621 +++++++++-
src2/c_gsw/gswteos-10.h | 8 +
src2/c_gsw/gswteos-10_cpp.h | 8 +
src2/method_bodies.c | 150 ++-
src2/method_def_entries.c | 4 +
versioneer.py | 1822 ++++++++++++++++++++++++++++++
23 files changed, 3512 insertions(+), 123 deletions(-)
diff --git a/MANIFEST.in b/MANIFEST.in
index 82b4fcd..b387719 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -3,3 +3,5 @@ include README.rst
recursive-include src *
recursive-include src2 *
# recursive-include docs *
+include versioneer.py
+include gsw/_version.py
diff --git a/PKG-INFO b/PKG-INFO
index b160b6d..de35cee 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.2
Name: gsw
-Version: 3.1.1
+Version: 3.2.0
Summary: Gibbs Seawater Oceanographic Package of TEOS-10
Home-page: https://github.com/TEOS-10/GSW-python
Author: ['Eric Firing', 'Filipe Fernandes']
@@ -41,6 +41,8 @@ Description: gsw Python package
.. image:: https://travis-ci.org/TEOS-10/GSW-Python.svg?branch=master
:target: https://travis-ci.org/TEOS-10/GSW-Python
+ .. image:: https://anaconda.org/conda-forge/gsw/badges/installer/conda.svg
+ :target: https://conda.anaconda.org/conda-forge
This Python implementation of the Thermodynamic Equation of
Seawater 2010 (TEOS-10) is based primarily on numpy ufunc wrappers of
diff --git a/README.rst b/README.rst
index 22e2ca3..39a5095 100644
--- a/README.rst
+++ b/README.rst
@@ -3,6 +3,8 @@ gsw Python package
.. image:: https://travis-ci.org/TEOS-10/GSW-Python.svg?branch=master
:target: https://travis-ci.org/TEOS-10/GSW-Python
+.. image:: https://anaconda.org/conda-forge/gsw/badges/installer/conda.svg
+ :target: https://conda.anaconda.org/conda-forge
This Python implementation of the Thermodynamic Equation of
Seawater 2010 (TEOS-10) is based primarily on numpy ufunc wrappers of
diff --git a/gsw.egg-info/PKG-INFO b/gsw.egg-info/PKG-INFO
index b160b6d..de35cee 100644
--- a/gsw.egg-info/PKG-INFO
+++ b/gsw.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.2
Name: gsw
-Version: 3.1.1
+Version: 3.2.0
Summary: Gibbs Seawater Oceanographic Package of TEOS-10
Home-page: https://github.com/TEOS-10/GSW-python
Author: ['Eric Firing', 'Filipe Fernandes']
@@ -41,6 +41,8 @@ Description: gsw Python package
.. image:: https://travis-ci.org/TEOS-10/GSW-Python.svg?branch=master
:target: https://travis-ci.org/TEOS-10/GSW-Python
+ .. image:: https://anaconda.org/conda-forge/gsw/badges/installer/conda.svg
+ :target: https://conda.anaconda.org/conda-forge
This Python implementation of the Thermodynamic Equation of
Seawater 2010 (TEOS-10) is based primarily on numpy ufunc wrappers of
diff --git a/gsw.egg-info/SOURCES.txt b/gsw.egg-info/SOURCES.txt
index 909ba26..51e160e 100644
--- a/gsw.egg-info/SOURCES.txt
+++ b/gsw.egg-info/SOURCES.txt
@@ -1,9 +1,12 @@
LICENSE
MANIFEST.in
README.rst
+setup.cfg
setup.py
+versioneer.py
gsw/__init__.py
gsw/_utilities.py
+gsw/_version.py
gsw/_wrapped_ufuncs.py
gsw/conversions.py
gsw/density.py
@@ -12,9 +15,11 @@ gsw/freezing.py
gsw/geostrophy.py
gsw/ice.py
gsw/stability.py
+gsw/utility.py
gsw.egg-info/PKG-INFO
gsw.egg-info/SOURCES.txt
gsw.egg-info/dependency_links.txt
+gsw.egg-info/requires.txt
gsw.egg-info/top_level.txt
src/_ufuncs.c
src/method_bodies.c
diff --git a/gsw.egg-info/requires.txt b/gsw.egg-info/requires.txt
new file mode 100644
index 0000000..24ce15a
--- /dev/null
+++ b/gsw.egg-info/requires.txt
@@ -0,0 +1 @@
+numpy
diff --git a/gsw/__init__.py b/gsw/__init__.py
index be9225c..76e838f 100644
--- a/gsw/__init__.py
+++ b/gsw/__init__.py
@@ -35,7 +35,9 @@ from ._wrapped_ufuncs import *
from .stability import *
from .geostrophy import *
+from .utility import *
from . import geostrophy
+from . import utility
from . import stability
from . import density
from . import energy
@@ -45,4 +47,6 @@ from . import ice
from .conversions import t90_from_t68
-__version__ = "3.1.1"
+from ._version import get_versions
+__version__ = get_versions()['version']
+del get_versions
diff --git a/gsw/_version.py b/gsw/_version.py
new file mode 100644
index 0000000..e91fe98
--- /dev/null
+++ b/gsw/_version.py
@@ -0,0 +1,21 @@
+
+# This file was generated by 'versioneer.py' (0.18) from
+# revision-control system data, or from the parent directory name of an
+# unpacked source archive. Distribution tarballs contain a pre-generated copy
+# of this file.
+
+import json
+
+version_json = '''
+{
+ "date": "2017-09-18T14:58:03-0300",
+ "dirty": false,
+ "error": null,
+ "full-revisionid": "a759761e88bf058fbe6765ecd8e236c33ef87cb4",
+ "version": "3.2.0"
+}
+''' # END VERSION_JSON
+
+
+def get_versions():
+ return json.loads(version_json)
diff --git a/gsw/_wrapped_ufuncs.py b/gsw/_wrapped_ufuncs.py
index 3bd3c21..21080a6 100644
--- a/gsw/_wrapped_ufuncs.py
+++ b/gsw/_wrapped_ufuncs.py
@@ -4141,7 +4141,7 @@ def t_freezing(SA, p, saturation_fraction):
@match_args_return
def t_freezing_first_derivatives(SA, p, saturation_fraction):
"""
- Calculates the frist derivatives of the in-situ temperature at which
+ Calculates the first derivatives of the in-situ temperature at which
seawater freezes with respect to Absolute Salinity SA and pressure P (in
Pa). These expressions come from differentiating the expression that
defines the freezing temperature, namely the equality between the
diff --git a/gsw/geostrophy.py b/gsw/geostrophy.py
index 47d882d..3a696e3 100644
--- a/gsw/geostrophy.py
+++ b/gsw/geostrophy.py
@@ -15,7 +15,8 @@ __all__ = ['geo_strf_dyn_height',
]
@match_args_return
-def geo_strf_dyn_height(SA, CT, p, p_ref=0, axis=0):
+def geo_strf_dyn_height(SA, CT, p, p_ref=0, axis=0, max_dp=1.0,
+ interp_method='pchip'):
"""
Dynamic height anomaly as a function of pressure.
@@ -29,8 +30,14 @@ def geo_strf_dyn_height(SA, CT, p, p_ref=0, axis=0):
Sea pressure (absolute pressure minus 10.1325 dbar), dbar
p_ref : float or array-like, optional
Reference pressure, dbar
- axis : int, optional
+ axis : int, optional, default is 0
The index of the pressure dimension in SA and CT.
+ max_dp : float
+ If any pressure interval in the input p exceeds max_dp, the dynamic
+ height will be calculated after interpolating to a grid with this
+ spacing.
+ interp_method : string {'pchip', 'linear'}
+ Interpolation algorithm.
Returns
-------
@@ -41,6 +48,10 @@ def geo_strf_dyn_height(SA, CT, p, p_ref=0, axis=0):
in an isobaric surface, relative to the reference surface.
"""
+ interp_methods = {'pchip' : 2, 'linear' : 1}
+ if interp_method not in interp_methods:
+ raise ValueError('interp_method must be one of %s'
+ % (interp_methods.keys(),))
if SA.shape != CT.shape:
raise ValueError('Shapes of SA and CT must match; found %s and %s'
% (SA.shape, CT.shape))
@@ -67,14 +78,27 @@ def geo_strf_dyn_height(SA, CT, p, p_ref=0, axis=0):
igood = goodmask[ind]
# If p_ref is below the deepest value, skip the profile.
pgood = p[ind][igood]
- # The C function calls the rr68 interpolation, which
- # requires at least 4 "bottles"; but the C function is
- # not checking this, so we need to do so.
- if pgood[-1] >= p_ref and len(pgood) > 3:
- dh[ind][igood] = _gsw_ufuncs.geo_strf_dyn_height(
- SA[ind][igood],
- CT[ind][igood],
- pgood, p_ref)
+ if len(pgood) > 1 and pgood[-1] >= p_ref:
+ sa = SA[ind][igood]
+ ct = CT[ind][igood]
+ # Temporarily add a top (typically surface) point and mixed layer
+ # if p_ref is above the shallowest pressure.
+ if pgood[0] < p_ref:
+ ptop = np.arange(p_ref, pgood[0], max_dp)
+ ntop = len(ptop)
+ sa = np.hstack(([sa[0]]*ntop, sa))
+ ct = np.hstack(([ct[0]]*ntop, ct))
+ pgood = np.hstack((ptop, pgood))
+ else:
+ ntop = 0
+ dh_all = _gsw_ufuncs.geo_strf_dyn_height_1(
+ sa, ct, pgood, p_ref, max_dp,
+ interp_methods[interp_method])
+ if ntop > 0:
+ dh[ind][igood] = dh_all[ntop:]
+ else:
+ dh[ind][igood] = dh_all
+
return dh
@@ -155,17 +179,27 @@ def distance(lon, lat, p=0, axis=-1):
raise ValueError('lon, lat must be 1-D or 2-D with more than one point'
' along axis; found shape %s and axis %s'
% (lon.shape, axis))
+ if lon.ndim == 1:
+ one_d = True
+ lon = lon[np.newaxis, :]
+ lat = lat[np.newaxis, :]
+ axis = -1
+ else:
+ one_d = False
+
+ one_d = one_d and p.ndim == 1
+
+ if axis == 0:
+ indm = (slice(0, -1), slice(None))
+ indp = (slice(1, None), slice(None))
+ else:
+ indm = (slice(None), slice(0, -1))
+ indp = (slice(None), slice(1, None))
if np.all(p == 0):
z = 0
else:
lon, lat, p = np.broadcast_arrays(lon, lat, p)
- if axis == 0:
- indm = (slice(0, -1), slice(None))
- indp = (slice(1, None), slice(None))
- else:
- indm = (slice(None), slice(0, -1))
- indp = (slice(None), slice(1, None))
p_mid = 0.5 * (p[indm] + p[indp])
lat_mid = 0.5 * (lat[indm] + lat[indp])
@@ -178,13 +212,16 @@ def distance(lon, lat, p=0, axis=-1):
dlon = np.diff(lon, axis=axis)
dlat = np.diff(lat, axis=axis)
- a = ((np.sin(dlat / 2)) ** 2 + np.cos(lat[:, :-1]) *
- np.cos(lat[:, 1:]) * (np.sin(dlon / 2)) ** 2)
+ a = ((np.sin(dlat / 2)) ** 2 + np.cos(lat[indm]) *
+ np.cos(lat[indp]) * (np.sin(dlon / 2)) ** 2)
angles = 2 * np.arctan2(np.sqrt(a), np.sqrt(1 - a))
distance = (earth_radius + z) * angles
+ if one_d:
+ distance = distance[0]
+
return distance
@@ -202,9 +239,9 @@ def geostrophic_velocity(geo_strf, lon, lat, p=0, axis=0):
"""
Calculate geostrophic velocity from a streamfunction.
- Calculates geostrophic velocity relative to the sea surface, given a
- geostrophic streamfunction and the position of each station in
- sequence along an ocean section. The data can be from a single
+ Calculates geostrophic velocity relative to a reference pressure,
+ given a geostrophic streamfunction and the position of each station
+ in sequence along an ocean section. The data can be from a single
isobaric or "density" surface, or from a series of such surfaces.
Parameters
@@ -266,4 +303,4 @@ def geostrophic_velocity(geo_strf, lon, lat, p=0, axis=0):
u = np.diff(geo_strf, axis=laxis) / (ds * f(mid_lat))
- return u, mid_lat, mid_lon
+ return u, mid_lon, mid_lat
diff --git a/gsw/utility.py b/gsw/utility.py
new file mode 100644
index 0000000..be16263
--- /dev/null
+++ b/gsw/utility.py
@@ -0,0 +1,64 @@
+"""
+Functions not specific to the TEOS-10 realm of variables.
+"""
+
+import numpy as np
+
+from . import _gsw_ufuncs
+from ._utilities import match_args_return, indexer
+
+ at match_args_return
+def pchip_interp(x, y, xi, axis=0):
+ """
+ Interpolate using Piecewise Cubic Hermite Interpolating Polynomial
+
+ This is a shape-preserving algorithm; it does not introduce new local
+ extrema. The implementation in C that is wrapped here is largely taken
+ from the scipy implementation,
+ https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.PchipInterpolator.html.
+
+ Points outside the range of the interpolation table are filled using the
+ end values in the table. (In contrast,
+ scipy.interpolate.pchip_interpolate() extrapolates using the end
+ polynomials.)
+
+ Parameters
+ ----------
+ x, y : array-like
+ Interpolation table x and y; n-dimensional, must be broadcastable to
+ the same dimensions.
+ xi : array-like
+ One-dimensional array of new x values.
+ axis : int, optional, default is 0
+ Axis along which xi is taken.
+
+ Returns
+ -------
+ yi : array
+ Values of y interpolated to xi along the specified axis.
+
+ """
+
+ xi = np.array(xi, dtype=float, copy=False, order='C', ndmin=1)
+ if xi.ndim > 1:
+ raise ValueError('xi must be no more than 1-dimensional')
+ nxi = xi.size
+ x, y = np.broadcast_arrays(x, y)
+ shape0 = x.shape
+ out_shape = list(x.shape)
+ out_shape[axis] = nxi
+ yi = np.empty(out_shape, dtype=float)
+ yi.fill(np.nan)
+
+ goodmask = ~(np.isnan(x) | np.isnan(y))
+
+ order = 'F' if y.flags.fortran else 'C'
+ for ind in indexer(y.shape, axis, order=order):
+ igood = goodmask[ind]
+ # If p_ref is below the deepest value, skip the profile.
+ xgood = x[ind][igood]
+ ygood = y[ind][igood]
+
+ yi[ind][igood] = _gsw_ufuncs.util_pchip_interp(xgood, ygood, xi)
+
+ return yi
diff --git a/setup.cfg b/setup.cfg
index 8bfd5a1..ececdaa 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,3 +1,11 @@
+[versioneer]
+vcs = git
+style = pep440
+versionfile_source = gsw/_version.py
+versionfile_build = gsw/_version.py
+tag_prefix = v
+parentdir_prefix =
+
[egg_info]
tag_build =
tag_date = 0
diff --git a/setup.py b/setup.py
index 54056b4..e2c5836 100644
--- a/setup.py
+++ b/setup.py
@@ -7,9 +7,12 @@ from __future__ import print_function
import os
import sys
+import pkg_resources
from setuptools import Extension, setup
+from distutils.command.build_ext import build_ext as _build_ext
+
+import versioneer
-import numpy as np
# Check Python version.
if sys.version_info < (3, 5):
@@ -54,23 +57,26 @@ def read(*parts):
return open(os.path.join(rootpath, *parts), 'r').read()
-def extract_version():
- version = None
- fname = os.path.join(rootpath, 'gsw', '__init__.py')
- with open(fname) as f:
- for line in f:
- if (line.startswith('__version__')):
- _, version = line.split('=')
- version = version.strip()[1:-1] # Remove quotation characters
- break
- return version
+class build_ext(_build_ext):
+ # Extention builder from pandas without the cython stuff
+ def build_extensions(self):
+ numpy_incl = pkg_resources.resource_filename('numpy', 'core/include')
+
+ for ext in self.extensions:
+ if hasattr(ext, 'include_dirs') and not numpy_incl in ext.include_dirs:
+ ext.include_dirs.append(numpy_incl)
+ _build_ext.build_extensions(self)
+
LICENSE = read('LICENSE')
long_description = read('README.rst')
+cmdclass = versioneer.get_cmdclass()
+cmdclass.update({'build_ext': build_ext})
+
config = dict(
name='gsw',
- version=extract_version(),
+ version=versioneer.get_version(),
packages=['gsw'],
author=['Eric Firing', 'Filipe Fernandes'],
author_email='efiring at hawaii.edu',
@@ -94,14 +100,15 @@ config = dict(
python_requires='>=3.5',
platforms='any',
keywords=['oceanography', 'seawater', 'TEOS-10'],
+ install_requires=['numpy'],
setup_requires=['numpy'],
ext_modules=[
Extension('gsw._gsw_ufuncs',
[srcdir + '/_ufuncs.c',
srcdir + '/c_gsw/gsw_oceanographic_toolbox.' + c_ext,
srcdir + '/c_gsw/gsw_saar.' + c_ext])],
- include_dirs=[np.get_include(),
- os.path.join(rootpath, srcdir, 'c_gsw')],
+ include_dirs=[os.path.join(rootpath, srcdir, 'c_gsw')],
+ cmdclass=cmdclass,
)
setup(**config)
diff --git a/src/c_gsw/gsw_oceanographic_toolbox.c b/src/c_gsw/gsw_oceanographic_toolbox.c
index eeee4c9..64427d9 100644
--- a/src/c_gsw/gsw_oceanographic_toolbox.c
+++ b/src/c_gsw/gsw_oceanographic_toolbox.c
@@ -1,6 +1,6 @@
/*
** $Id: gsw_oceanographic_toolbox-head,v c61271a7810d 2016/08/19 20:04:03 fdelahoyde $
-** Version: 3.05.0-3
+** Version: 4
**
** This is a translation of the original f90 source code into C
** by the Shipboard Technical Support Computing Resources group
@@ -3923,8 +3923,366 @@ p_sequence(double p1, double p2, double max_dp_i, double *pseq, int *nps)
if (nps != NULL) *nps = n;
- for (i=1; i<=n; i++)
- pseq[i-1] = p1+pstep*i;
+ /*
+ ! Generate the sequence ensuring that the value of p2 is exact to
+ ! avoid round-off issues, ie. don't do "pseq = p1+pstep*(i+1)".
+ */
+ for (i=0; i<n; i++)
+ pseq[i] = p2-pstep*(n-1-i);
+
+}
+/*
+This is a replacement for gsw_geo_strf_dyn_height, with a different
+signature and interpolation algorithms.
+!==========================================================================
+int (returns nonzero on error, 0 if OK)
+gsw_geo_strf_dyn_height_1(double *sa, double *ct, double *p, double p_ref,
+ int nz, double *dyn_height, double max_dp_i, int interp_method)
+!==========================================================================
+!
+! Calculates dynamic height anomaly as the integral of specific volume
+! anomaly from the pressure p of the bottle to the reference pressure
+! p_ref.
+!
+! Hence, geo_strf_dyn_height is the dynamic height anomaly with respect
+! to a given reference pressure. This is the geostrophic streamfunction
+! for the difference between the horizontal velocity at the pressure
+! concerned, p, and the horizontal velocity at p_ref. Dynamic height
+! anomaly is the geostrophic streamfunction in an isobaric surface. The
+! reference values used for the specific volume anomaly are
+! SSO = 35.16504 g/kg and CT = 0 deg C. This function calculates
+! specific volume anomaly using the computationally efficient
+! expression for specific volume of Roquet et al. (2015).
+!
+! This function evaluates the pressure integral of specific volume using
+! SA and CT interpolated with respect to pressure. The interpolation method
+! may be chosen as linear or "PCHIP", piecewise cubic Hermite using a shape-
+! preserving algorithm for setting the derivatives.
+!
+! SA = Absolute Salinity [ g/kg ]
+! CT = Conservative Temperature (ITS-90) [ deg C ]
+! p = sea pressure (increasing with index) [ dbar ]
+! ( i.e. absolute pressure - 10.1325 dbar )
+! nz = number of points in each array
+! p_ref = reference pressure [ dbar ]
+! ( i.e. reference absolute pressure - 10.1325 dbar )
+! geo_strf_dyn_height = dynamic height anomaly [ m^2/s^2 ]
+! max_dp_i = maximum pressure difference between points for triggering
+! interpolation.
+! interp_method = 1 for linear, 2 for PCHIP
+!
+! Note. If p_ref falls outside the range of a
+! vertical profile, the dynamic height anomaly for each bottle
+! on the whole vertical profile is returned as NaN.
+!--------------------------------------------------------------------------
+*/
+
+/*
+ Make a new grid based on an original monotonic array, typically P, such that
+ on the new monotonic grid, p_i:
+
+ 1) The first element is p[0], the last is p[nz-1].
+ 2) Approximate integer multiples of dp are included.
+ 3) All original p points are included.
+ 4) The value p_ref is included.
+
+ This function allocates memory; it is
+ the responsibility of subsequent code to free this memory.
+
+ In addition to the new p_i grid, the function returns the array
+ of indices (p_indices)
+ of the original p grid points in p_i, and a pointer to
+ the index of p_ref within p_i.
+
+ Its return argument is the number of elements in p_i.
+*/
+static int refine_grid_for_dh(double *p, double p_ref, int nz,
+ double dp,
+ double *p_i, int ni_max, /* size of p_i array; larger than needed */
+ int *p_indices, int *p_ref_ind_ptr)
+{
+ int i, iuniform, iorig;
+ double p_next;
+ /* Don't add a new point if it is within p_tol of an original. */
+ double p_tol = 0.001 * dp;
+
+ p_i[0] = p[0];
+ p_indices[0] = 0;
+ *p_ref_ind_ptr = -1; /* initialize to a flag value */
+ if (p_ref <= p[0] + p_tol)
+ {
+ *p_ref_ind_ptr = 0;
+ }
+ for (i=1, iuniform=1, iorig=1; i<ni_max && iorig<nz; i++)
+ {
+ /* Candidate insertion based on uniform grid: */
+ p_next = p[0] + dp * iuniform;
+
+ /* See if we need to insert p_ref: */
+ if (*p_ref_ind_ptr == -1 && p_ref <= p_next && p_ref <= p[iorig])
+ {
+ p_i[i] = p_ref;
+ *p_ref_ind_ptr = i;
+ if (p_ref == p[iorig])
+ {
+ p_indices[iorig] = i;
+ iorig++;
+ }
+ if (p_ref > p_next - p_tol)
+ {
+ iuniform++;
+ }
+ continue;
+ }
+
+ /* We did not insert p_ref, so insert either p_next or p[iorig]. */
+ if (p_next < p[iorig] - p_tol)
+ {
+ p_i[i] = p_next;
+ iuniform++;
+ }
+ else
+ {
+ p_i[i] = p[iorig];
+ p_indices[iorig] = i;
+ /* Skip this p_next if it is close to the point we just added. */
+ if (p_next < p[iorig] + p_tol)
+ {
+ iuniform++;
+ }
+ iorig++;
+ }
+ }
+
+ if (i == ni_max)
+ {
+ return (-1); /* error! */
+ }
+ return (i); /* number of elements in p_i */
+}
+
+/* Linearly interpolate to the grid made by define_grid_for_dh.
+ We take advantage of what we know about the grids: they match
+ at the end points, and both are monotonic.
+*/
+
+static int linear_interp_SA_CT_for_dh(double *sa, double *ct, double *p, int nz,
+ double *p_i, int n_i,
+ double *sa_i, double *ct_i)
+{
+ int i, ii;
+ double pfac;
+
+ sa_i[0] = sa[0];
+ sa_i[n_i-1] = sa[nz-1];
+ ct_i[0] = ct[0];
+ ct_i[n_i-1] = ct[nz-1];
+ i = 1;
+ for (ii=1; ii<n_i-1; ii++)
+ {
+ /* Find the second point of the pair in the original grid that
+ bracket the target.
+ */
+ while (p[i] < p_i[ii])
+ {
+ i++;
+ if (i == nz)
+ {
+ return -1; /* error! */
+ }
+ }
+ pfac = (p_i[ii] - p[i-1]) / (p[i] - p[i-1]);
+ sa_i[ii] = sa[i-1] + pfac * (sa[i] - sa[i-1]);
+ ct_i[ii] = ct[i-1] + pfac * (ct[i] - ct[i-1]);
+ }
+ return 0;
+}
+
+
+int /* returns nonzero on error, 0 if OK */
+gsw_geo_strf_dyn_height_1(double *sa, double *ct, double *p, double p_ref,
+ int nz, double *dyn_height, double max_dp_i, int interp_method)
+{
+ GSW_TEOS10_CONSTANTS;
+ int i, ipref,
+ *p_indices, n_i, ni_max, err;
+ double dp_min, dp_max, p_min, p_max,
+ *b, *b_av, *dp, *sa_i, *ct_i, *p_i,
+ *dh_i;
+ double dh_ref;
+
+ if (nz < 2)
+ return (1);
+
+ dp = malloc((nz-1) * sizeof(double));
+ dp_min = 11000.0;
+ dp_max = -11000.0;
+ for (i=0; i<nz-1; i++)
+ {
+ dp[i] = p[i+1] - p[i];
+ if (dp[i] < dp_min)
+ {
+ dp_min = dp[i];
+ }
+ if (dp[i] > dp_max)
+ {
+ dp_max = dp[i];
+ }
+ }
+
+ if (dp_min <= 0.0) {
+ /* pressure must be monotonic */
+ free(dp);
+ return (2);
+ }
+ p_min = p[0];
+ p_max = p[nz-1];
+
+ if (p_ref > p_max || p_ref < p_min) {
+ /* Reference pressure must be within the data range. */
+ free(dp);
+ return (3);
+ }
+
+ /* Determine if there is a sample at exactly p_ref */
+ ipref = -1;
+ for (i = 0; i < nz; i++)
+ {
+ if (p[i] == p_ref)
+ {
+ ipref = i;
+ break;
+ }
+ }
+
+ if ((dp_max <= max_dp_i) && (ipref >= 0)) {
+ /*
+ !vertical resolution is good (bottle gap is no larger than max_dp_i)
+ ! & the profile contains a "bottle" at exactly p_ref.
+ */
+ b = malloc(nz*sizeof (double));
+ b_av = malloc((nz-1) * sizeof(double));
+ for (i=0; i<nz; i++) {
+ b[i] = gsw_specvol_anom_standard(sa[i],ct[i],p[i]);
+ }
+ for (i=0; i<(nz-1); i++) {
+ b_av[i] = 0.5*(b[i+1] + b[i]);
+ }
+ /* First calculate dynamic height relative to the first (shallowest)
+ depth. */
+ dyn_height[0] = 0.0;
+ for (i=1; i<nz; i++)
+ {
+ dyn_height[i] = dyn_height[i-1] - b_av[i-1]*dp[i-1]*db2pa;
+ }
+ /* Then subtract out the value at the reference pressure. */
+ dh_ref = dyn_height[ipref];
+ for (i=0; i<nz; i++)
+ {
+ dyn_height[i] -= dh_ref;
+ }
+ free(b);
+ free(b_av);
+ free(dp);
+ return (0);
+ }
+
+ /*
+ If we got this far, then we need to interpolate: either or both of
+ inserting a point for p_ref and subdividing the intervals to keep the max
+ interval less than max_dp_i.
+ */
+
+ free(dp); /* Need to recalculate, so free here and malloc when needed. */
+
+ ni_max = nz + (int) ceil((p[nz-1] - p[0]) / max_dp_i) + 2;
+ /* Maximum possible size of new grid: Original grid size plus
+ the number of dp intervals plus 1 for the p_ref,
+ plus 1 so that we can know we exited the loop before we hit it.
+ */
+
+ p_i = (double *) malloc(ni_max * sizeof(double));
+ p_indices = (int *) malloc(nz * sizeof(int));
+
+ n_i = refine_grid_for_dh(p, p_ref, nz, max_dp_i,
+ p_i, ni_max,
+ p_indices, &ipref);
+ /* Reminder: if successful, this allocated p_i and p_indices. */
+ if (n_i == -1)
+ {
+ free(p_i);
+ free(p_indices);
+ return (4);
+ }
+
+ ct_i = malloc(n_i * sizeof(double));
+ sa_i = malloc(n_i * sizeof(double));
+
+ if (interp_method == INTERP_METHOD_LINEAR)
+ {
+ err = linear_interp_SA_CT_for_dh(sa, ct, p, nz,
+ p_i, n_i,
+ sa_i, ct_i);
+ if (err) err = 5;
+ }
+ else if (interp_method == INTERP_METHOD_PCHIP)
+ {
+ err = gsw_util_pchip_interp(p, sa, nz, p_i, sa_i, n_i);
+ err = err || gsw_util_pchip_interp(p, ct, nz, p_i, ct_i, n_i);
+ if (err) err = 6;
+ }
+ else
+ {
+ err = 7;
+ }
+ if (err)
+ {
+ free(p_i);
+ free(p_indices);
+ free(ct_i);
+ free(sa_i);
+ return (err);
+ }
+
+ dh_i = malloc(n_i * sizeof(double));
+ dp = malloc((n_i-1) * sizeof(double));
+ b = malloc(n_i*sizeof (double));
+ b_av = malloc((n_i-1) * sizeof(double));
+
+ for (i=0; i<n_i; i++)
+ {
+ b[i] = gsw_specvol_anom_standard(sa_i[i], ct_i[i], p_i[i]);
+ }
+ free(ct_i);
+ free(sa_i);
+
+ for (i=0; i<(n_i-1); i++)
+ {
+ b_av[i] = 0.5*(b[i+1] + b[i]);
+ dp[i] = p_i[i+1] - p_i[i];
+ }
+ free(p_i);
+ /* First calculate dynamic height relative to the first (shallowest)
+ depth. */
+ dh_i[0] = 0.0;
+ for (i=1; i<n_i; i++)
+ {
+ dh_i[i] = dh_i[i-1] - b_av[i-1]*dp[i-1]*db2pa;
+ }
+ free(b);
+ free(b_av);
+ free(dp);
+
+ dh_ref = dh_i[ipref];
+
+ for (i=0; i<nz; i++)
+ {
+ dyn_height[i] = dh_i[p_indices[i]] - dh_ref;
+ }
+ free(p_indices);
+ free(dh_i);
+
+ return 0;
}
/*
!==========================================================================
@@ -7859,20 +8217,31 @@ rr68_interp_section(int sectnum, double *sa, double *ct, double *p, int mp,
for (i=0; i<nsect; i++) {
ip_1[i] = floor(ip_sect[i]);
ip_2[i] = ceil(ip_sect[i]);
+ if (ip_1[i] == ip_2[i])
+ ip_2[i] = ip_1[i] + 1;
ip_3[i] = ip_2[i] + 1;
ip_4[i] = ip_3[i] + 1;
}
} else if (sectnum == 0) { /* central */
for (i=0; i<nsect; i++) {
ip_2[i] = floor(ip_sect[i]);
- ip_1[i] = ip_2[i] - 1;
ip_3[i] = ceil(ip_sect[i]);
+ if (ip_2[i] == ip_3[i])
+ ip_2[i] = ip_3[i] - 1;
+ ip_1[i] = ip_2[i] - 1;
+ if (ip_1[i] < 0) {
+ ip_1[i] = 0;
+ ip_2[i] = 1;
+ ip_3[i] = 2;
+ }
ip_4[i] = ip_3[i] + 1;
}
} else if (sectnum > 0) { /* deep */
for (i=0; i<nsect; i++) {
ip_1[i] = ceil(ip_sect[i]);
ip_2[i] = floor(ip_sect[i]);
+ if (ip_1[i] == ip_2[i])
+ ip_2[i] = ip_1[i] - 1;
ip_3[i] = ip_2[i] - 1;
ip_4[i] = ip_3[i] - 1;
}
@@ -10594,36 +10963,214 @@ gsw_util_linear_interp(int nx, double *x, int ny, double *y, int nxi,
free(j); free(xxi); free(k); free(xi);
return (y_i);
}
+/*******************************************************************************
+ Functions for pchip interpolation
+ (Piecewise Cubic Hermite Interpolating Polynomial)
+ based on
+ https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.PchipInterpolator.html#scipy.interpolate.PchipInterpolator
+
+ See references therein.
+ This is a shape-preserving algorithm, in the sense that it does not
+ overshoot the original points; extrema of the interpolated curve match
+ the extrema of the original points.
+*/
+
+#define sgn(x) (((x) > 0) ? 1 : (((x) < 0) ? -1 : 0))
+
+static double pchip_edge_case(double h0, double h1, double m0, double m1)
+{
+ double d;
+ int mask, mask2;
+
+ d = ((2*h0 + h1)*m0 - h0*m1) / (h0 + h1);
+ mask = sgn(d) != sgn(m0);
+ mask2 = (sgn(m0) != sgn(m1)) && (fabs(d) > 3.0*fabs(m0));
+ if (mask)
+ {
+ return 0.0;
+ }
+ if (!mask && mask2)
+ {
+ return 3.0*m0;
+ }
+ return d;
+}
+
+/*
+ Calculation of the derivatives is the key to the shape-preservation.
+ There are other algorithms that could be used, but this appears to be
+ the simplest, and adequate for our purposes.
+
+ At minimal computational cost, we include here a check for increasing x.
+
+ Returns 0 on success, 1 if x is not strictly increasing.
+*/
+static int pchip_derivs(double *x, double *y, int n,
+ double *d)
+{
+ double mm, mp; /* slopes bracketing x */
+ double hm, hp; /* bracketing delta-x values */
+ int smm, smp; /* slope signs */
+ double w1, w2;
+ int i;
+
+ if (n == 2)
+ {
+ d[0] = d[1] = (y[1] - y[0]) / (x[1] - x[0]);
+ return 0;
+ }
+
+ hm = x[1] - x[0];
+ hp = x[2] - x[1];
+ mm = (y[1] - y[0]) / hm;
+ mp = (y[2] - y[1]) / hp;
+ d[0] = pchip_edge_case(hm, hp, mm, mp);
+ smm = sgn(mm);
+ smp = sgn(mp);
+
+ for (i=1; i<n-1; i++)
+ {
+ if (hm <= 0)
+ {
+ return 1;
+ }
+ /* change of sign, or either slope is zero */
+ if ((smm != smp) || mp == 0 || mm == 0)
+ {
+ d[i] = 0.0;
+ }
+ else
+ {
+ w1 = 2*hp + hm;
+ w2 = hp + 2*hm;
+ d[i] = (w1 + w2) / (w1/mm + w2/mp);
+ }
+ if (i < n-2)
+ {
+ hm = hp;
+ hp = x[i+2] - x[i+1];
+ mm = mp;
+ mp = (y[i+2] - y[i+1]) / hp;
+ smm = smp;
+ smp = sgn(mp);
+ }
+ }
+ if (hp <= 0)
+ {
+ return 1;
+ }
+ d[n-1] = pchip_edge_case(hp, hm, mp, mm);
+ return 0;
+}
+
+/*************************************************************************
+ Piecewise-Hermite algorithm from
+ https://en.wikipedia.org/wiki/Cubic_Hermite_spline
+
+ Extrapolation to points outside the range is done by setting those
+ points to the corresponding end values.
+
+ The input x must be monotonically increasing; the interpolation points,
+ xi, may be in any order, but the algorithm will be faster if they are
+ monotonic, increasing or decreasing.
+
+ Returns 0 on success, 1 if it fails because there are fewer than 2 points,
+ 2 if it fails because x is not increasing.
+ Consistent with other GSW-C code at present, the memory allocations
+ are assumed to succeed.
+*/
+int gsw_util_pchip_interp(double *x, double *y, int n,
+ double *xi, double *yi, int ni)
+{
+ double *d;
+ double t, tt, ttt, xx, dx;
+ int i, j0, j1, err;
+ double h00, h10, h01, h11;
+
+ if (n<2)
+ {
+ return 1;
+ }
+ d = (double *)calloc(n, sizeof(double));
+ err = pchip_derivs(x, y, n, d);
+ if (err)
+ {
+ return 2;
+ }
+
+ j0 = 0;
+ for (i=0; i<ni; i++)
+ {
+ xx = xi[i];
+ /* Linear search is appropriate and probably optimal for the
+ expected primary use case of interpolation to a finer grid.
+ It is inefficient but still functional in the worst case of
+ randomly distributed xi.
+ */
+ while (xx < x[j0] && j0 > 0)
+ {
+ j0--;
+ }
+ while (xx > x[j0+1] && j0 < n - 2)
+ {
+ j0++;
+ }
+ j1 = j0 + 1;
+ if (xx >= x[j0] && xx <= x[j1])
+ {
+ dx = x[j1] - x[j0];
+ t = (xx - x[j0]) / dx;
+ tt = t * t;
+ ttt = tt * t;
+ /* Using intermediate variables for readability. */
+ h00 = (2*ttt - 3*tt + 1);
+ h10 = (ttt - 2*tt + t);
+ h01 = (-2*ttt + 3*tt);
+ h11 = (ttt - tt);
+ yi[i] = y[j0] * h00 + d[j0] * dx * h10 +
+ y[j1] * h01 + d[j1] * dx * h11;
+ }
+ else
+ {
+ /* extrapolate with constant end values */
+ yi[i] = (xx < x[0]) ? y[0] : y[n-1];
+ }
+ }
+ free(d);
+ return 0;
+}
+
+/*
+ End of the pchip interpolation.
+ *******************************
+*/
/*
pure function gsw_util_sort_real (rarray) result(iarray)
*/
-#if (defined __APPLE__ || defined __MACH__ || defined __DARWIN__ || \
- defined __FREEBSD__ || defined __BSD__ || \
- defined _WIN32 || defined _WIN64 || defined __WINDOWS__)
-static int
-compare(void *rarray, const void *p1, const void *p2)
-#else
-
-extern void qsort_r(void *, size_t, size_t, int (*)(const void *, const void *,
- void *), void *);
-static int
-compare(const void *p1, const void *p2, void *rarray)
-#endif
-{
- double *rdata = rarray;
- if (rdata[*(int *)p1] < rdata[*(int *)p2])
- return (-1);
- if (rdata[*(int *)p1] > rdata[*(int *)p2])
- return (1);
- /*
- ** Note that the library functions using this utility
- ** depend on the fact that for replicate values in rdata
- ** the indexes are returned in descending sequence.
- */
- if (*(int *)p1 < *(int *)p2)
- return (1);
- return (0);
+typedef struct {
+ double d;
+ int i;
+} DI;
+
+/*
+ * Rank two items, by value if possible,
+ * and by inverse index, if the values are
+ * equal.
+ * FIXME: decide if index method matches docs.
+ */
+int
+compareDI(const void *a, const void *b)
+{
+ DI *A = (DI*)a;
+ DI *B = (DI*)b;
+ if (A->d < B->d)
+ return (-1);
+ if (A->d > B->d)
+ return (1);
+ if (A->i < B->i)
+ return (1);
+ return (-1);
}
/*
@@ -10635,17 +11182,15 @@ void
gsw_util_sort_real(double *rarray, int nx, int *iarray)
{
int i;
-
+ DI* di = (DI*)malloc(nx*sizeof(DI));
+ for (i=0; i<nx; i++) {
+ di[i].d = rarray[i];
+ di[i].i = i;
+ }
+ qsort(di, nx, sizeof(DI), compareDI);
for (i=0; i<nx; i++)
- iarray[i] = i;
-#if (defined __APPLE__ || defined __MACH__ || defined __DARWIN__ || \
- defined __FREEBSD__ || defined __BSD__ )
- qsort_r(iarray, nx, sizeof (int), (void *)rarray, compare);
-#elif (defined _WIN32 || defined _WIN64 || defined __WINDOWS__)
- qsort_s(iarray, nx, sizeof (int), compare, (void *)rarray);
-#else
- qsort_r(iarray, nx, sizeof (int), compare, (void *)rarray);
-#endif
+ iarray[i] = di[i].i;
+ free(di);
}
/*
!==========================================================================
diff --git a/src/c_gsw/gswteos-10.h b/src/c_gsw/gswteos-10.h
index b6e8957..4927520 100644
--- a/src/c_gsw/gswteos-10.h
+++ b/src/c_gsw/gswteos-10.h
@@ -21,6 +21,9 @@ extern "C" {
#define GSW_INVALID_VALUE 9e15 /* error return from gsw_saar et al. */
#define GSW_ERROR_LIMIT 1e10
+#define INTERP_METHOD_LINEAR 1
+#define INTERP_METHOD_PCHIP 2
+
/*
** Prototypes:
*/
@@ -112,6 +115,9 @@ extern void gsw_frazil_ratios_adiabatic_poly(double sa, double p,
double *dct_dp_frazil);
extern double *gsw_geo_strf_dyn_height(double *sa, double *ct, double *p,
double p_ref, int n_levels, double *dyn_height);
+extern int gsw_geo_strf_dyn_height_1(double *sa, double *ct, double *p,
+ double p_ref, int n_levels, double *dyn_height,
+ double max_dp_i, int interp_method);
extern double *gsw_geo_strf_dyn_height_pc(double *sa, double *ct,
double *delta_p, int n_levels, double *geo_strf_dyn_height_pc,
double *p_mid);
@@ -284,6 +290,8 @@ extern double *gsw_util_linear_interp(int nx, double *x, int ny, double *y,
int nxi, double *x_i, double *y_i);
extern void gsw_util_sort_real(double *rarray, int nx, int *iarray);
extern double gsw_util_xinterp1(double *x, double *y, int n, double x0);
+extern int gsw_util_pchip_interp(double *x, double *y, int n,
+ double *xi, double *yi, int ni);
extern double gsw_z_from_p(double p, double lat);
extern double gsw_p_from_z(double z, double lat);
diff --git a/src/method_bodies.c b/src/method_bodies.c
index 1dac9fb..130eea2 100644
--- a/src/method_bodies.c
+++ b/src/method_bodies.c
@@ -11,7 +11,7 @@ gsw_geo_strf_dyn_height(double *sa, double *ct, double *p, double p_ref,
static PyObject *
geo_strf_dyn_height(PyObject *NPY_UNUSED(self), PyObject *args)
{
- PyObject *sa_o, *ct_o, *p_o, *dh_o;
+ PyObject *sa_o, *ct_o, *p_o;
double p_ref;
PyArrayObject *sa_a, *ct_a, *p_a, *dh_a;
int n_levels;
@@ -38,7 +38,7 @@ geo_strf_dyn_height(PyObject *NPY_UNUSED(self), PyObject *args)
goto error;
}
- dh_a = PyArray_NewLikeArray(sa_a, NPY_CORDER, NULL, 0);
+ dh_a = (PyArrayObject *)PyArray_NewLikeArray(sa_a, NPY_CORDER, NULL, 0);
if (dh_a == NULL)
goto error;
@@ -67,3 +67,149 @@ geo_strf_dyn_height(PyObject *NPY_UNUSED(self), PyObject *args)
return (PyObject *)dh_a;
}
+
+
+static PyObject *
+geo_strf_dyn_height_1(PyObject *NPY_UNUSED(self), PyObject *args)
+{
+ PyObject *sa_o, *ct_o, *p_o;
+ double p_ref;
+ PyArrayObject *sa_a, *ct_a, *p_a, *dh_a;
+ int n_levels;
+ int ret = 1; /* error (1) until set to 0 by the C function */
+ double max_dp_i;
+ int interp_method;
+
+ if (!PyArg_ParseTuple(args, "OOOddi", &sa_o, &ct_o, &p_o, &p_ref,
+ &max_dp_i, &interp_method))
+ return NULL;
+
+ sa_a = (PyArrayObject *)PyArray_ContiguousFromAny(sa_o, NPY_DOUBLE, 1, 1);
+ if (sa_a == NULL)
+ return NULL;
+
+ ct_a = (PyArrayObject *)PyArray_ContiguousFromAny(ct_o, NPY_DOUBLE, 1, 1);
+ if (ct_a == NULL)
+ {
+ Py_DECREF(sa_a);
+ return NULL;
+ }
+ p_a = (PyArrayObject *)PyArray_ContiguousFromAny(p_o, NPY_DOUBLE, 1, 1);
+ if (p_a == NULL)
+ {
+ Py_DECREF(sa_a);
+ Py_DECREF(ct_a);
+ return NULL;
+ }
+
+ n_levels = PyArray_DIM(sa_a, 0);
+ if (PyArray_DIM(ct_a, 0) != n_levels || PyArray_DIM(p_a, 0) != n_levels)
+ {
+ PyErr_SetString(PyExc_ValueError,
+ "Arguments SA, CT, and p must have the same dimensions.");
+ Py_DECREF(sa_a);
+ Py_DECREF(ct_a);
+ Py_DECREF(p_a);
+ return NULL;
+ }
+
+ dh_a = (PyArrayObject *)PyArray_NewLikeArray(sa_a, NPY_CORDER, NULL, 0);
+ if (dh_a == NULL)
+ {
+ Py_DECREF(sa_a);
+ Py_DECREF(ct_a);
+ Py_DECREF(p_a);
+ return NULL;
+ }
+
+ ret = gsw_geo_strf_dyn_height_1((double *)PyArray_DATA(sa_a),
+ (double *)PyArray_DATA(ct_a),
+ (double *)PyArray_DATA(p_a),
+ p_ref,
+ n_levels,
+ (double *)PyArray_DATA(dh_a),
+ max_dp_i,
+ interp_method);
+ Py_DECREF(sa_a);
+ Py_DECREF(ct_a);
+ Py_DECREF(p_a);
+
+ if (ret)
+ {
+ PyErr_Format(PyExc_RuntimeError,
+ "gws_geo_strf_dyn_height_1 failed with code %d; check input arguments",
+ ret);
+ Py_DECREF(dh_a);
+ return NULL;
+ }
+ return (PyObject *)dh_a;
+}
+
+
+static PyObject *
+util_pchip_interp(PyObject *NPY_UNUSED(self), PyObject *args)
+{
+ PyObject *x, *y, *xi;
+ PyArrayObject *xa, *ya, *xia, *yia;
+ int n, ni;
+ int ret = 1; /* error (1) until set to 0 by the C function */
+
+ if (!PyArg_ParseTuple(args, "OOO", &x, &y, &xi))
+ return NULL;
+
+ xa = (PyArrayObject *)PyArray_ContiguousFromAny(x, NPY_DOUBLE, 1, 1);
+ if (xa == NULL)
+ {
+ PyErr_SetString(PyExc_RuntimeError,
+ "failed to convert argument x");
+ return NULL;
+ }
+
+ ya = (PyArrayObject *)PyArray_ContiguousFromAny(y, NPY_DOUBLE, 1, 1);
+ if (ya == NULL)
+ {
+ PyErr_SetString(PyExc_RuntimeError,
+ "failed to convert argument y");
+ Py_DECREF(xa);
+ return NULL;
+ }
+ n = PyArray_DIM(xa, 0);
+
+ xia = (PyArrayObject *)PyArray_ContiguousFromAny(xi, NPY_DOUBLE, 1, 1);
+ if (xia == NULL)
+ {
+ PyErr_SetString(PyExc_RuntimeError,
+ "failed to convert argument xi");
+ Py_DECREF(xa);
+ Py_DECREF(ya);
+ return NULL;
+ }
+ ni = PyArray_DIM(xia, 0);
+
+ yia = (PyArrayObject *)PyArray_NewLikeArray(xia, NPY_CORDER, NULL, 0);
+ if (yia == NULL)
+ {
+ Py_DECREF(xa);
+ Py_DECREF(ya);
+ Py_DECREF(xia);
+ return NULL;
+ }
+
+ ret = gsw_util_pchip_interp((double *)PyArray_DATA(xa),
+ (double *)PyArray_DATA(ya),
+ n,
+ (double *)PyArray_DATA(xia),
+ (double *)PyArray_DATA(yia),
+ ni);
+
+ Py_DECREF(xa);
+ Py_DECREF(ya);
+ Py_DECREF(xia);
+ if (ret)
+ {
+ PyErr_SetString(PyExc_RuntimeError,
+ "gsw_util_pchip_interp failed; check input arguments");
+ return NULL;
+ }
+ return (PyObject *)yia;
+}
diff --git a/src/method_def_entries.c b/src/method_def_entries.c
index 5bfa770..0fe48fd 100644
--- a/src/method_def_entries.c
+++ b/src/method_def_entries.c
@@ -1,3 +1,7 @@
/* Entries in the GswMethods table. */
{"geo_strf_dyn_height", geo_strf_dyn_height, METH_VARARGS,
"geostrophic streamfunction dynamic height"},
+{"geo_strf_dyn_height_1", geo_strf_dyn_height_1, METH_VARARGS,
+ "geostrophic streamfunction dynamic height"},
+{"util_pchip_interp", util_pchip_interp, METH_VARARGS,
+ "PCHIP interpolation"},
diff --git a/src2/c_gsw/gsw_oceanographic_toolbox.cpp b/src2/c_gsw/gsw_oceanographic_toolbox.cpp
index 468d376..ac922ae 100644
--- a/src2/c_gsw/gsw_oceanographic_toolbox.cpp
+++ b/src2/c_gsw/gsw_oceanographic_toolbox.cpp
@@ -3922,9 +3922,366 @@ p_sequence(double p1, double p2, double max_dp_i, double *pseq, int *nps)
pstep = dp/n;
if (nps != NULL) *nps = n;
+ /*
+ ! Generate the sequence ensuring that the value of p2 is exact to
+ ! avoid round-off issues, ie. don't do "pseq = p1+pstep*(i+1)".
+ */
+ for (i=0; i<n; i++)
+ pseq[i] = p2-pstep*(n-1-i);
- for (i=1; i<=n; i++)
- pseq[i-1] = p1+pstep*i;
+}
+/*
+This is a replacement for gsw_geo_strf_dyn_height, with a different
+signature and interpolation algorithms.
+!==========================================================================
+int (returns nonzero on error, 0 if OK)
+gsw_geo_strf_dyn_height_1(double *sa, double *ct, double *p, double p_ref,
+ int nz, double *dyn_height, double max_dp_i, int interp_method)
+!==========================================================================
+!
+! Calculates dynamic height anomaly as the integral of specific volume
+! anomaly from the pressure p of the bottle to the reference pressure
+! p_ref.
+!
+! Hence, geo_strf_dyn_height is the dynamic height anomaly with respect
+! to a given reference pressure. This is the geostrophic streamfunction
+! for the difference between the horizontal velocity at the pressure
+! concerned, p, and the horizontal velocity at p_ref. Dynamic height
+! anomaly is the geostrophic streamfunction in an isobaric surface. The
+! reference values used for the specific volume anomaly are
+! SSO = 35.16504 g/kg and CT = 0 deg C. This function calculates
+! specific volume anomaly using the computationally efficient
+! expression for specific volume of Roquet et al. (2015).
+!
+! This function evaluates the pressure integral of specific volume using
+! SA and CT interpolated with respect to pressure. The interpolation method
+! may be chosen as linear or "PCHIP", piecewise cubic Hermite using a shape-
+! preserving algorithm for setting the derivatives.
+!
+! SA = Absolute Salinity [ g/kg ]
+! CT = Conservative Temperature (ITS-90) [ deg C ]
+! p = sea pressure (increasing with index) [ dbar ]
+! ( i.e. absolute pressure - 10.1325 dbar )
+! nz = number of points in each array
+! p_ref = reference pressure [ dbar ]
+! ( i.e. reference absolute pressure - 10.1325 dbar )
+! geo_strf_dyn_height = dynamic height anomaly [ m^2/s^2 ]
+! max_dp_i = maximum pressure difference between points for triggering
+! interpolation.
+! interp_method = 1 for linear, 2 for PCHIP
+!
+! Note. If p_ref falls outside the range of a
+! vertical profile, the dynamic height anomaly for each bottle
+! on the whole vertical profile is returned as NaN.
+!--------------------------------------------------------------------------
+*/
+
+/*
+ Make a new grid based on an original monotonic array, typically P, such that
+ on the new monotonic grid, p_i:
+
+ 1) The first element is p[0], the last is p[nz-1].
+ 2) Approximate integer multiples of dp are included.
+ 3) All original p points are included.
+ 4) The value p_ref is included.
+
+ This function allocates memory; it is
+ the responsibility of subsequent code to free this memory.
+
+ In addition to the new p_i grid, the function returns the array
+ of indices (p_indices)
+ of the original p grid points in p_i, and a pointer to
+ the index of p_ref within p_i.
+
+ Its return argument is the number of elements in p_i.
+*/
+static int refine_grid_for_dh(double *p, double p_ref, int nz,
+ double dp,
+ double *p_i, int ni_max, /* size of p_i array; larger than needed */
+ int *p_indices, int *p_ref_ind_ptr)
+{
+ int i, iuniform, iorig;
+ double p_next;
+ /* Don't add a new point if it is within p_tol of an original. */
+ double p_tol = 0.001 * dp;
+
+ p_i[0] = p[0];
+ p_indices[0] = 0;
+ *p_ref_ind_ptr = -1; /* initialize to a flag value */
+ if (p_ref <= p[0] + p_tol)
+ {
+ *p_ref_ind_ptr = 0;
+ }
+ for (i=1, iuniform=1, iorig=1; i<ni_max && iorig<nz; i++)
+ {
+ /* Candidate insertion based on uniform grid: */
+ p_next = p[0] + dp * iuniform;
+
+ /* See if we need to insert p_ref: */
+ if (*p_ref_ind_ptr == -1 && p_ref <= p_next && p_ref <= p[iorig])
+ {
+ p_i[i] = p_ref;
+ *p_ref_ind_ptr = i;
+ if (p_ref == p[iorig])
+ {
+ p_indices[iorig] = i;
+ iorig++;
+ }
+ if (p_ref > p_next - p_tol)
+ {
+ iuniform++;
+ }
+ continue;
+ }
+
+ /* We did not insert p_ref, so insert either p_next or p[iorig]. */
+ if (p_next < p[iorig] - p_tol)
+ {
+ p_i[i] = p_next;
+ iuniform++;
+ }
+ else
+ {
+ p_i[i] = p[iorig];
+ p_indices[iorig] = i;
+ /* Skip this p_next if it is close to the point we just added. */
+ if (p_next < p[iorig] + p_tol)
+ {
+ iuniform++;
+ }
+ iorig++;
+ }
+ }
+
+ if (i == ni_max)
+ {
+ return (-1); /* error! */
+ }
+ return (i); /* number of elements in p_i */
+}
+
+/* Linearly interpolate to the grid made by define_grid_for_dh.
+ We take advantage of what we know about the grids: they match
+ at the end points, and both are monotonic.
+*/
+
+static int linear_interp_SA_CT_for_dh(double *sa, double *ct, double *p, int nz,
+ double *p_i, int n_i,
+ double *sa_i, double *ct_i)
+{
+ int i, ii;
+ double pfac;
+
+ sa_i[0] = sa[0];
+ sa_i[n_i-1] = sa[nz-1];
+ ct_i[0] = ct[0];
+ ct_i[n_i-1] = ct[nz-1];
+ i = 1;
+ for (ii=1; ii<n_i-1; ii++)
+ {
+ /* Find the second point of the pair in the original grid that
+ bracket the target.
+ */
+ while (p[i] < p_i[ii])
+ {
+ i++;
+ if (i == nz)
+ {
+ return -1; /* error! */
+ }
+ }
+ pfac = (p_i[ii] - p[i-1]) / (p[i] - p[i-1]);
+ sa_i[ii] = sa[i-1] + pfac * (sa[i] - sa[i-1]);
+ ct_i[ii] = ct[i-1] + pfac * (ct[i] - ct[i-1]);
+ }
+ return 0;
+}
+
+
+int /* returns nonzero on error, 0 if OK */
+gsw_geo_strf_dyn_height_1(double *sa, double *ct, double *p, double p_ref,
+ int nz, double *dyn_height, double max_dp_i, int interp_method)
+{
+ GSW_TEOS10_CONSTANTS;
+ int i, ipref,
+ *p_indices, n_i, ni_max, err;
+ double dp_min, dp_max, p_min, p_max,
+ *b, *b_av, *dp, *sa_i, *ct_i, *p_i,
+ *dh_i;
+ double dh_ref;
+
+ if (nz < 2)
+ return (1);
+
+ dp = (double *)malloc((nz-1) * sizeof(double));
+ dp_min = 11000.0;
+ dp_max = -11000.0;
+ for (i=0; i<nz-1; i++)
+ {
+ dp[i] = p[i+1] - p[i];
+ if (dp[i] < dp_min)
+ {
+ dp_min = dp[i];
+ }
+ if (dp[i] > dp_max)
+ {
+ dp_max = dp[i];
+ }
+ }
+
+ if (dp_min <= 0.0) {
+ /* pressure must be monotonic */
+ free(dp);
+ return (2);
+ }
+ p_min = p[0];
+ p_max = p[nz-1];
+
+ if (p_ref > p_max || p_ref < p_min) {
+ /* Reference pressure must be within the data range. */
+ free(dp);
+ return (3);
+ }
+
+ /* Determine if there is a sample at exactly p_ref */
+ ipref = -1;
+ for (i = 0; i < nz; i++)
+ {
+ if (p[i] == p_ref)
+ {
+ ipref = i;
+ break;
+ }
+ }
+
+ if ((dp_max <= max_dp_i) && (ipref >= 0)) {
+ /*
+ !vertical resolution is good (bottle gap is no larger than max_dp_i)
+ ! & the profile contains a "bottle" at exactly p_ref.
+ */
+ b = (double *)malloc(nz*sizeof (double));
+ b_av = (double *)malloc((nz-1) * sizeof(double));
+ for (i=0; i<nz; i++) {
+ b[i] = gsw_specvol_anom_standard(sa[i],ct[i],p[i]);
+ }
+ for (i=0; i<(nz-1); i++) {
+ b_av[i] = 0.5*(b[i+1] + b[i]);
+ }
+ /* First calculate dynamic height relative to the first (shallowest)
+ depth. */
+ dyn_height[0] = 0.0;
+ for (i=1; i<nz; i++)
+ {
+ dyn_height[i] = dyn_height[i-1] - b_av[i-1]*dp[i-1]*db2pa;
+ }
+ /* Then subtract out the value at the reference pressure. */
+ dh_ref = dyn_height[ipref];
+ for (i=0; i<nz; i++)
+ {
+ dyn_height[i] -= dh_ref;
+ }
+ free(b);
+ free(b_av);
+ free(dp);
+ return (0);
+ }
+
+ /*
+ If we got this far, then we need to interpolate: either or both of
+ inserting a point for p_ref and subdividing the intervals to keep the max
+ interval less than max_dp_i.
+ */
+
+ free(dp); /* Need to recalculate, so free here and malloc when needed. */
+
+ ni_max = nz + (int) ceil((p[nz-1] - p[0]) / max_dp_i) + 2;
+ /* Maximum possible size of new grid: Original grid size plus
+ the number of dp intervals plus 1 for the p_ref,
+ plus 1 so that we can know we exited the loop before we hit it.
+ */
+
+ p_i = (double *) malloc(ni_max * sizeof(double));
+ p_indices = (int *) malloc(nz * sizeof(int));
+
+ n_i = refine_grid_for_dh(p, p_ref, nz, max_dp_i,
+ p_i, ni_max,
+ p_indices, &ipref);
+ /* Reminder: if successful, this allocated p_i and p_indices. */
+ if (n_i == -1)
+ {
+ free(p_i);
+ free(p_indices);
+ return (4);
+ }
+
+ ct_i = (double *)malloc(n_i * sizeof(double));
+ sa_i = (double *)malloc(n_i * sizeof(double));
+
+ if (interp_method == INTERP_METHOD_LINEAR)
+ {
+ err = linear_interp_SA_CT_for_dh(sa, ct, p, nz,
+ p_i, n_i,
+ sa_i, ct_i);
+ if (err) err = 5;
+ }
+ else if (interp_method == INTERP_METHOD_PCHIP)
+ {
+ err = gsw_util_pchip_interp(p, sa, nz, p_i, sa_i, n_i);
+ err = err || gsw_util_pchip_interp(p, ct, nz, p_i, ct_i, n_i);
+ if (err) err = 6;
+ }
+ else
+ {
+ err = 7;
+ }
+ if (err)
+ {
+ free(p_i);
+ free(p_indices);
+ free(ct_i);
+ free(sa_i);
+ return (err);
+ }
+
+ dh_i = (double *)malloc(n_i * sizeof(double));
+ dp = (double *)malloc((n_i-1) * sizeof(double));
+ b = (double *)malloc(n_i*sizeof (double));
+ b_av = (double *)malloc((n_i-1) * sizeof(double));
+
+ for (i=0; i<n_i; i++)
+ {
+ b[i] = gsw_specvol_anom_standard(sa_i[i], ct_i[i], p_i[i]);
+ }
+ free(ct_i);
+ free(sa_i);
+
+ for (i=0; i<(n_i-1); i++)
+ {
+ b_av[i] = 0.5*(b[i+1] + b[i]);
+ dp[i] = p_i[i+1] - p_i[i];
+ }
+ free(p_i);
+ /* First calculate dynamic height relative to the first (shallowest)
+ depth. */
+ dh_i[0] = 0.0;
+ for (i=1; i<n_i; i++)
+ {
+ dh_i[i] = dh_i[i-1] - b_av[i-1]*dp[i-1]*db2pa;
+ }
+ free(b);
+ free(b_av);
+ free(dp);
+
+ dh_ref = dh_i[ipref];
+
+ for (i=0; i<nz; i++)
+ {
+ dyn_height[i] = dh_i[p_indices[i]] - dh_ref;
+ }
+ free(p_indices);
+ free(dh_i);
+
+ return 0;
}
/*
!==========================================================================
@@ -7859,20 +8216,31 @@ rr68_interp_section(int sectnum, double *sa, double *ct, double *p, int mp,
for (i=0; i<nsect; i++) {
ip_1[i] = floor(ip_sect[i]);
ip_2[i] = ceil(ip_sect[i]);
+ if (ip_1[i] == ip_2[i])
+ ip_2[i] = ip_1[i] + 1;
ip_3[i] = ip_2[i] + 1;
ip_4[i] = ip_3[i] + 1;
}
} else if (sectnum == 0) { /* central */
for (i=0; i<nsect; i++) {
ip_2[i] = floor(ip_sect[i]);
- ip_1[i] = ip_2[i] - 1;
ip_3[i] = ceil(ip_sect[i]);
+ if (ip_2[i] == ip_3[i])
+ ip_2[i] = ip_3[i] - 1;
+ ip_1[i] = ip_2[i] - 1;
+ if (ip_1[i] < 0) {
+ ip_1[i] = 0;
+ ip_2[i] = 1;
+ ip_3[i] = 2;
+ }
ip_4[i] = ip_3[i] + 1;
}
} else if (sectnum > 0) { /* deep */
for (i=0; i<nsect; i++) {
ip_1[i] = ceil(ip_sect[i]);
ip_2[i] = floor(ip_sect[i]);
+ if (ip_1[i] == ip_2[i])
+ ip_2[i] = ip_1[i] - 1;
ip_3[i] = ip_2[i] - 1;
ip_4[i] = ip_3[i] - 1;
}
@@ -10594,36 +10962,213 @@ gsw_util_linear_interp(int nx, double *x, int ny, double *y, int nxi,
free(j); free(xxi); free(k); free(xi);
return (y_i);
}
+/*******************************************************************************
+ Functions for pchip interpolation
+ (Piecewise Cubic Hermite Interpolating Polynomial)
+ based on
+ https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.PchipInterpolator.html#scipy.interpolate.PchipInterpolator
+
+ See references therein.
+ This is a shape-preserving algorithm, in the sense that it does not
+ overshoot the original points; extrema of the interpolated curve match
+ the extrema of the original points.
+*/
+
+#define sgn(x) (((x) > 0) ? 1 : (((x) < 0) ? -1 : 0))
+
+static double pchip_edge_case(double h0, double h1, double m0, double m1)
+{
+ double d;
+ int mask, mask2;
+
+ d = ((2*h0 + h1)*m0 - h0*m1) / (h0 + h1);
+ mask = sgn(d) != sgn(m0);
+ mask2 = (sgn(m0) != sgn(m1)) && (fabs(d) > 3.0*fabs(m0));
+ if (mask)
+ {
+ return 0.0;
+ }
+ if (!mask && mask2)
+ {
+ return 3.0*m0;
+ }
+ return d;
+}
+
+/*
+ Calculation of the derivatives is the key to the shape-preservation.
+ There are other algorithms that could be used, but this appears to be
+ the simplest, and adequate for our purposes.
+
+ At minimal computational cost, we include here a check for increasing x.
+
+ Returns 0 on success, 1 if x is not strictly increasing.
+*/
+static int pchip_derivs(double *x, double *y, int n,
+ double *d)
+{
+ double mm, mp; /* slopes bracketing x */
+ double hm, hp; /* bracketing delta-x values */
+ int smm, smp; /* slope signs */
+ double w1, w2;
+ int i;
+
+ if (n == 2)
+ {
+ d[0] = d[1] = (y[1] - y[0]) / (x[1] - x[0]);
+ return 0;
+ }
+
+ hm = x[1] - x[0];
+ hp = x[2] - x[1];
+ mm = (y[1] - y[0]) / hm;
+ mp = (y[2] - y[1]) / hp;
+ d[0] = pchip_edge_case(hm, hp, mm, mp);
+ smm = sgn(mm);
+ smp = sgn(mp);
+
+ for (i=1; i<n-1; i++)
+ {
+ if (hm <= 0)
+ {
+ return 1;
+ }
+ /* change of sign, or either slope is zero */
+ if ((smm != smp) || mp == 0 || mm == 0)
+ {
+ d[i] = 0.0;
+ }
+ else
+ {
+ w1 = 2*hp + hm;
+ w2 = hp + 2*hm;
+ d[i] = (w1 + w2) / (w1/mm + w2/mp);
+ }
+ if (i < n-2)
+ {
+ hm = hp;
+ hp = x[i+2] - x[i+1];
+ mm = mp;
+ mp = (y[i+2] - y[i+1]) / hp;
+ smm = smp;
+ smp = sgn(mp);
+ }
+ }
+ if (hp <= 0)
+ {
+ return 1;
+ }
+ d[n-1] = pchip_edge_case(hp, hm, mp, mm);
+ return 0;
+}
+
+/*************************************************************************
+ Piecewise-Hermite algorithm from
+ https://en.wikipedia.org/wiki/Cubic_Hermite_spline
+
+ Extrapolation to points outside the range is done by setting those
+ points to the corresponding end values.
+
+ The input x must be monotonically increasing; the interpolation points,
+ xi, may be in any order, but the algorithm will be faster if they are
+ monotonic, increasing or decreasing.
+
+ Returns 0 on success, 1 if it fails because there are fewer than 2 points,
+ 2 if it fails because x is not increasing.
+ Consistent with other GSW-C code at present, the memory allocations
+ are assumed to succeed.
+*/
+int gsw_util_pchip_interp(double *x, double *y, int n,
+ double *xi, double *yi, int ni)
+{
+ double *d;
+ double t, tt, ttt, xx, dx;
+ int i, j0, j1, err;
+ double h00, h10, h01, h11;
+
+ if (n<2)
+ {
+ return 1;
+ }
+ d = (double *)calloc(n, sizeof(double));
+ err = pchip_derivs(x, y, n, d);
+ if (err)
+ {
+ return 2;
+ }
+
+ j0 = 0;
+ for (i=0; i<ni; i++)
+ {
+ xx = xi[i];
+ /* Linear search is appropriate and probably optimal for the
+ expected primary use case of interpolation to a finer grid.
+ It is inefficient but still functional in the worst case of
+ randomly distributed xi.
+ */
+ while (xx < x[j0] && j0 > 0)
+ {
+ j0--;
+ }
+ while (xx > x[j0+1] && j0 < n - 2)
+ {
+ j0++;
+ }
+ j1 = j0 + 1;
+ if (xx >= x[j0] && xx <= x[j1])
+ {
+ dx = x[j1] - x[j0];
+ t = (xx - x[j0]) / dx;
+ tt = t * t;
+ ttt = tt * t;
+ /* Using intermediate variables for readability. */
+ h00 = (2*ttt - 3*tt + 1);
+ h10 = (ttt - 2*tt + t);
+ h01 = (-2*ttt + 3*tt);
+ h11 = (ttt - tt);
+ yi[i] = y[j0] * h00 + d[j0] * dx * h10 +
+ y[j1] * h01 + d[j1] * dx * h11;
+ }
+ else
+ {
+ /* extrapolate with constant end values */
+ yi[i] = (xx < x[0]) ? y[0] : y[n-1];
+ }
+ }
+ free(d);
+ return 0;
+}
+
+/*
+ End of the pchip interpolation.
+ *******************************
+*/
/*
pure function gsw_util_sort_real (rarray) result(iarray)
*/
+typedef struct {
+ double d;
+ int i;
+} DI;
-#if (defined __APPLE__ || defined __MACH__ || defined __DARWIN__ || \
- defined __FREEBSD__ || defined __BSD__ || \
- defined _WIN32 || defined _WIN64 || defined __WINDOWS__)
-static int
-compare(void *rarray, const void *p1, const void *p2)
-#else
-
-extern void qsort_r(void *, size_t, size_t, int (*)(const void *, const void *,
- void *), void *);
-static int
-compare(const void *p1, const void *p2, void *rarray)
-#endif
-{
- double *rdata = (double *) rarray;
- if (rdata[*(int *)p1] < rdata[*(int *)p2])
- return (-1);
- if (rdata[*(int *)p1] > rdata[*(int *)p2])
- return (1);
- /*
- ** Note that the library functions using this utility
- ** depend on the fact that for replicate values in rdata
- ** the indexes are returned in descending sequence.
- */
- if (*(int *)p1 < *(int *)p2)
- return (1);
- return (0);
+/*
+ * Rank two items, by value if possible,
+ * and by inverse index, if the values are
+ * equal.
+ * FIXME: decide if index method matches docs.
+ */
+int
+compareDI(const void *a, const void *b)
+{
+ DI *A = (DI*)a;
+ DI *B = (DI*)b;
+ if (A->d < B->d)
+ return (-1);
+ if (A->d > B->d)
+ return (1);
+ if (A->i < B->i)
+ return (1);
+ return (-1);
}
/*
@@ -10635,17 +11180,15 @@ void
gsw_util_sort_real(double *rarray, int nx, int *iarray)
{
int i;
-
+ DI* di = (DI*)malloc(nx*sizeof(DI));
+ for (i=0; i<nx; i++) {
+ di[i].d = rarray[i];
+ di[i].i = i;
+ }
+ qsort(di, nx, sizeof(DI), compareDI);
for (i=0; i<nx; i++)
- iarray[i] = i;
-#if (defined __APPLE__ || defined __MACH__ || defined __DARWIN__ || \
- defined __FREEBSD__ || defined __BSD__ )
- qsort_r(iarray, nx, sizeof (int), (void *)rarray, compare);
-#elif (defined _WIN32 || defined _WIN64 || defined __WINDOWS__)
- qsort_s(iarray, nx, sizeof (int), compare, (void *)rarray);
-#else
- qsort_r(iarray, nx, sizeof (int), compare, (void *)rarray);
-#endif
+ iarray[i] = di[i].i;
+ free(di);
}
/*
!==========================================================================
diff --git a/src2/c_gsw/gswteos-10.h b/src2/c_gsw/gswteos-10.h
index b6e8957..4927520 100644
--- a/src2/c_gsw/gswteos-10.h
+++ b/src2/c_gsw/gswteos-10.h
@@ -21,6 +21,9 @@ extern "C" {
#define GSW_INVALID_VALUE 9e15 /* error return from gsw_saar et al. */
#define GSW_ERROR_LIMIT 1e10
+#define INTERP_METHOD_LINEAR 1
+#define INTERP_METHOD_PCHIP 2
+
/*
** Prototypes:
*/
@@ -112,6 +115,9 @@ extern void gsw_frazil_ratios_adiabatic_poly(double sa, double p,
double *dct_dp_frazil);
extern double *gsw_geo_strf_dyn_height(double *sa, double *ct, double *p,
double p_ref, int n_levels, double *dyn_height);
+extern int gsw_geo_strf_dyn_height_1(double *sa, double *ct, double *p,
+ double p_ref, int n_levels, double *dyn_height,
+ double max_dp_i, int interp_method);
extern double *gsw_geo_strf_dyn_height_pc(double *sa, double *ct,
double *delta_p, int n_levels, double *geo_strf_dyn_height_pc,
double *p_mid);
@@ -284,6 +290,8 @@ extern double *gsw_util_linear_interp(int nx, double *x, int ny, double *y,
int nxi, double *x_i, double *y_i);
extern void gsw_util_sort_real(double *rarray, int nx, int *iarray);
extern double gsw_util_xinterp1(double *x, double *y, int n, double x0);
+extern int gsw_util_pchip_interp(double *x, double *y, int n,
+ double *xi, double *yi, int ni);
extern double gsw_z_from_p(double p, double lat);
extern double gsw_p_from_z(double z, double lat);
diff --git a/src2/c_gsw/gswteos-10_cpp.h b/src2/c_gsw/gswteos-10_cpp.h
index c39c8d4..2b2389b 100644
--- a/src2/c_gsw/gswteos-10_cpp.h
+++ b/src2/c_gsw/gswteos-10_cpp.h
@@ -21,6 +21,9 @@ extern "C" {
#define GSW_INVALID_VALUE 9e15 /* error return from gsw_saar et al. */
#define GSW_ERROR_LIMIT 1e10
+#define INTERP_METHOD_LINEAR 1
+#define INTERP_METHOD_PCHIP 2
+
/*
** Prototypes:
*/
@@ -112,6 +115,9 @@ extern void gsw_frazil_ratios_adiabatic_poly(double sa, double p,
double *dct_dp_frazil);
extern double *gsw_geo_strf_dyn_height(double *sa, double *ct, double *p,
double p_ref, int n_levels, double *dyn_height);
+extern int gsw_geo_strf_dyn_height_1(double *sa, double *ct, double *p,
+ double p_ref, int n_levels, double *dyn_height,
+ double max_dp_i, int interp_method);
extern double *gsw_geo_strf_dyn_height_pc(double *sa, double *ct,
double *delta_p, int n_levels, double *geo_strf_dyn_height_pc,
double *p_mid);
@@ -284,6 +290,8 @@ extern double *gsw_util_linear_interp(int nx, double *x, int ny, double *y,
int nxi, double *x_i, double *y_i);
extern void gsw_util_sort_real(double *rarray, int nx, int *iarray);
extern double gsw_util_xinterp1(double *x, double *y, int n, double x0);
+extern int gsw_util_pchip_interp(double *x, double *y, int n,
+ double *xi, double *yi, int ni);
extern double gsw_z_from_p(double p, double lat);
extern double gsw_p_from_z(double z, double lat);
diff --git a/src2/method_bodies.c b/src2/method_bodies.c
index 1dac9fb..130eea2 100644
--- a/src2/method_bodies.c
+++ b/src2/method_bodies.c
@@ -11,7 +11,7 @@ gsw_geo_strf_dyn_height(double *sa, double *ct, double *p, double p_ref,
static PyObject *
geo_strf_dyn_height(PyObject *NPY_UNUSED(self), PyObject *args)
{
- PyObject *sa_o, *ct_o, *p_o, *dh_o;
+ PyObject *sa_o, *ct_o, *p_o;
double p_ref;
PyArrayObject *sa_a, *ct_a, *p_a, *dh_a;
int n_levels;
@@ -38,7 +38,7 @@ geo_strf_dyn_height(PyObject *NPY_UNUSED(self), PyObject *args)
goto error;
}
- dh_a = PyArray_NewLikeArray(sa_a, NPY_CORDER, NULL, 0);
+ dh_a = (PyArrayObject *)PyArray_NewLikeArray(sa_a, NPY_CORDER, NULL, 0);
if (dh_a == NULL)
goto error;
@@ -67,3 +67,149 @@ geo_strf_dyn_height(PyObject *NPY_UNUSED(self), PyObject *args)
return (PyObject *)dh_a;
}
+
+
+static PyObject *
+geo_strf_dyn_height_1(PyObject *NPY_UNUSED(self), PyObject *args)
+{
+ PyObject *sa_o, *ct_o, *p_o;
+ double p_ref;
+ PyArrayObject *sa_a, *ct_a, *p_a, *dh_a;
+ int n_levels;
+ int ret = 1; /* error (1) until set to 0 by the C function */
+ double max_dp_i;
+ int interp_method;
+
+ if (!PyArg_ParseTuple(args, "OOOddi", &sa_o, &ct_o, &p_o, &p_ref,
+ &max_dp_i, &interp_method))
+ return NULL;
+
+ sa_a = (PyArrayObject *)PyArray_ContiguousFromAny(sa_o, NPY_DOUBLE, 1, 1);
+ if (sa_a == NULL)
+ return NULL;
+
+ ct_a = (PyArrayObject *)PyArray_ContiguousFromAny(ct_o, NPY_DOUBLE, 1, 1);
+ if (ct_a == NULL)
+ {
+ Py_DECREF(sa_a);
+ return NULL;
+ }
+ p_a = (PyArrayObject *)PyArray_ContiguousFromAny(p_o, NPY_DOUBLE, 1, 1);
+ if (p_a == NULL)
+ {
+ Py_DECREF(sa_a);
+ Py_DECREF(ct_a);
+ return NULL;
+ }
+
+ n_levels = PyArray_DIM(sa_a, 0);
+ if (PyArray_DIM(ct_a, 0) != n_levels || PyArray_DIM(p_a, 0) != n_levels)
+ {
+ PyErr_SetString(PyExc_ValueError,
+ "Arguments SA, CT, and p must have the same dimensions.");
+ Py_DECREF(sa_a);
+ Py_DECREF(ct_a);
+ Py_DECREF(p_a);
+ return NULL;
+ }
+
+ dh_a = (PyArrayObject *)PyArray_NewLikeArray(sa_a, NPY_CORDER, NULL, 0);
+ if (dh_a == NULL)
+ {
+ Py_DECREF(sa_a);
+ Py_DECREF(ct_a);
+ Py_DECREF(p_a);
+ return NULL;
+ }
+
+ ret = gsw_geo_strf_dyn_height_1((double *)PyArray_DATA(sa_a),
+ (double *)PyArray_DATA(ct_a),
+ (double *)PyArray_DATA(p_a),
+ p_ref,
+ n_levels,
+ (double *)PyArray_DATA(dh_a),
+ max_dp_i,
+ interp_method);
+ Py_DECREF(sa_a);
+ Py_DECREF(ct_a);
+ Py_DECREF(p_a);
+
+ if (ret)
+ {
+ PyErr_Format(PyExc_RuntimeError,
+ "gws_geo_strf_dyn_height_1 failed with code %d; check input arguments",
+ ret);
+ Py_DECREF(dh_a);
+ return NULL;
+ }
+ return (PyObject *)dh_a;
+}
+
+
+static PyObject *
+util_pchip_interp(PyObject *NPY_UNUSED(self), PyObject *args)
+{
+ PyObject *x, *y, *xi;
+ PyArrayObject *xa, *ya, *xia, *yia;
+ int n, ni;
+ int ret = 1; /* error (1) until set to 0 by the C function */
+
+ if (!PyArg_ParseTuple(args, "OOO", &x, &y, &xi))
+ return NULL;
+
+ xa = (PyArrayObject *)PyArray_ContiguousFromAny(x, NPY_DOUBLE, 1, 1);
+ if (xa == NULL)
+ {
+ PyErr_SetString(PyExc_RuntimeError,
+ "failed to convert argument x");
+ return NULL;
+ }
+
+ ya = (PyArrayObject *)PyArray_ContiguousFromAny(y, NPY_DOUBLE, 1, 1);
+ if (ya == NULL)
+ {
+ PyErr_SetString(PyExc_RuntimeError,
+ "failed to convert argument y");
+ Py_DECREF(xa);
+ return NULL;
+ }
+ n = PyArray_DIM(xa, 0);
+
+ xia = (PyArrayObject *)PyArray_ContiguousFromAny(xi, NPY_DOUBLE, 1, 1);
+ if (xia == NULL)
+ {
+ PyErr_SetString(PyExc_RuntimeError,
+ "failed to convert argument xi");
+ Py_DECREF(xa);
+ Py_DECREF(ya);
+ return NULL;
+ }
+ ni = PyArray_DIM(xia, 0);
+
+ yia = (PyArrayObject *)PyArray_NewLikeArray(xia, NPY_CORDER, NULL, 0);
+ if (yia == NULL)
+ {
+ Py_DECREF(xa);
+ Py_DECREF(ya);
+ Py_DECREF(xia);
+ return NULL;
+ }
+
+ ret = gsw_util_pchip_interp((double *)PyArray_DATA(xa),
+ (double *)PyArray_DATA(ya),
+ n,
+ (double *)PyArray_DATA(xia),
+ (double *)PyArray_DATA(yia),
+ ni);
+
+ Py_DECREF(xa);
+ Py_DECREF(ya);
+ Py_DECREF(xia);
+ if (ret)
+ {
+ PyErr_SetString(PyExc_RuntimeError,
+ "gsw_util_pchip_interp failed; check input arguments");
+ return NULL;
+ }
+ return (PyObject *)yia;
+}
diff --git a/src2/method_def_entries.c b/src2/method_def_entries.c
index 5bfa770..0fe48fd 100644
--- a/src2/method_def_entries.c
+++ b/src2/method_def_entries.c
@@ -1,3 +1,7 @@
/* Entries in the GswMethods table. */
{"geo_strf_dyn_height", geo_strf_dyn_height, METH_VARARGS,
"geostrophic streamfunction dynamic height"},
+{"geo_strf_dyn_height_1", geo_strf_dyn_height_1, METH_VARARGS,
+ "geostrophic streamfunction dynamic height"},
+{"util_pchip_interp", util_pchip_interp, METH_VARARGS,
+ "PCHIP interpolation"},
diff --git a/versioneer.py b/versioneer.py
new file mode 100644
index 0000000..64fea1c
--- /dev/null
+++ b/versioneer.py
@@ -0,0 +1,1822 @@
+
+# Version: 0.18
+
+"""The Versioneer - like a rocketeer, but for versions.
+
+The Versioneer
+==============
+
+* like a rocketeer, but for versions!
+* https://github.com/warner/python-versioneer
+* Brian Warner
+* License: Public Domain
+* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy
+* [![Latest Version]
+(https://pypip.in/version/versioneer/badge.svg?style=flat)
+](https://pypi.python.org/pypi/versioneer/)
+* [![Build Status]
+(https://travis-ci.org/warner/python-versioneer.png?branch=master)
+](https://travis-ci.org/warner/python-versioneer)
+
+This is a tool for managing a recorded version number in distutils-based
+python projects. The goal is to remove the tedious and error-prone "update
+the embedded version string" step from your release process. Making a new
+release should be as easy as recording a new tag in your version-control
+system, and maybe making new tarballs.
+
+
+## Quick Install
+
+* `pip install versioneer` to somewhere to your $PATH
+* add a `[versioneer]` section to your setup.cfg (see below)
+* run `versioneer install` in your source tree, commit the results
+
+## Version Identifiers
+
+Source trees come from a variety of places:
+
+* a version-control system checkout (mostly used by developers)
+* a nightly tarball, produced by build automation
+* a snapshot tarball, produced by a web-based VCS browser, like github's
+ "tarball from tag" feature
+* a release tarball, produced by "setup.py sdist", distributed through PyPI
+
+Within each source tree, the version identifier (either a string or a number,
+this tool is format-agnostic) can come from a variety of places:
+
+* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
+ about recent "tags" and an absolute revision-id
+* the name of the directory into which the tarball was unpacked
+* an expanded VCS keyword ($Id$, etc)
+* a `_version.py` created by some earlier build step
+
+For released software, the version identifier is closely related to a VCS
+tag. Some projects use tag names that include more than just the version
+string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
+needs to strip the tag prefix to extract the version identifier. For
+unreleased software (between tags), the version identifier should provide
+enough information to help developers recreate the same tree, while also
+giving them an idea of roughly how old the tree is (after version 1.2, before
+version 1.3). Many VCS systems can report a description that captures this,
+for example `git describe --tags --dirty --always` reports things like
+"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
+0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
+uncommitted changes.
+
+The version identifier is used for multiple purposes:
+
+* to allow the module to self-identify its version: `myproject.__version__`
+* to choose a name and prefix for a 'setup.py sdist' tarball
+
+## Theory of Operation
+
+Versioneer works by adding a special `_version.py` file into your source
+tree, where your `__init__.py` can import it. This `_version.py` knows how to
+dynamically ask the VCS tool for version information at import time.
+
+`_version.py` also contains `$Revision$` markers, and the installation
+process marks `_version.py` to have this marker rewritten with a tag name
+during the `git archive` command. As a result, generated tarballs will
+contain enough information to get the proper version.
+
+To allow `setup.py` to compute a version too, a `versioneer.py` is added to
+the top level of your source tree, next to `setup.py` and the `setup.cfg`
+that configures it. This overrides several distutils/setuptools commands to
+compute the version when invoked, and changes `setup.py build` and `setup.py
+sdist` to replace `_version.py` with a small static file that contains just
+the generated version data.
+
+## Installation
+
+See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
+
+## Version-String Flavors
+
+Code which uses Versioneer can learn about its version string at runtime by
+importing `_version` from your main `__init__.py` file and running the
+`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
+import the top-level `versioneer.py` and run `get_versions()`.
+
+Both functions return a dictionary with different flavors of version
+information:
+
+* `['version']`: A condensed version string, rendered using the selected
+ style. This is the most commonly used value for the project's version
+ string. The default "pep440" style yields strings like `0.11`,
+ `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
+ below for alternative styles.
+
+* `['full-revisionid']`: detailed revision identifier. For Git, this is the
+ full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
+
+* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
+ commit date in ISO 8601 format. This will be None if the date is not
+ available.
+
+* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
+ this is only accurate if run in a VCS checkout, otherwise it is likely to
+ be False or None
+
+* `['error']`: if the version string could not be computed, this will be set
+ to a string describing the problem, otherwise it will be None. It may be
+ useful to throw an exception in setup.py if this is set, to avoid e.g.
+ creating tarballs with a version string of "unknown".
+
+Some variants are more useful than others. Including `full-revisionid` in a
+bug report should allow developers to reconstruct the exact code being tested
+(or indicate the presence of local changes that should be shared with the
+developers). `version` is suitable for display in an "about" box or a CLI
+`--version` output: it can be easily compared against release notes and lists
+of bugs fixed in various releases.
+
+The installer adds the following text to your `__init__.py` to place a basic
+version in `YOURPROJECT.__version__`:
+
+ from ._version import get_versions
+ __version__ = get_versions()['version']
+ del get_versions
+
+## Styles
+
+The setup.cfg `style=` configuration controls how the VCS information is
+rendered into a version string.
+
+The default style, "pep440", produces a PEP440-compliant string, equal to the
+un-prefixed tag name for actual releases, and containing an additional "local
+version" section with more detail for in-between builds. For Git, this is
+TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
+--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
+tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
+that this commit is two revisions ("+2") beyond the "0.11" tag. For released
+software (exactly equal to a known tag), the identifier will only contain the
+stripped tag, e.g. "0.11".
+
+Other styles are available. See [details.md](details.md) in the Versioneer
+source tree for descriptions.
+
+## Debugging
+
+Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
+to return a version of "0+unknown". To investigate the problem, run `setup.py
+version`, which will run the version-lookup code in a verbose mode, and will
+display the full contents of `get_versions()` (including the `error` string,
+which may help identify what went wrong).
+
+## Known Limitations
+
+Some situations are known to cause problems for Versioneer. This details the
+most significant ones. More can be found on Github
+[issues page](https://github.com/warner/python-versioneer/issues).
+
+### Subprojects
+
+Versioneer has limited support for source trees in which `setup.py` is not in
+the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
+two common reasons why `setup.py` might not be in the root:
+
+* Source trees which contain multiple subprojects, such as
+ [Buildbot](https://github.com/buildbot/buildbot), which contains both
+ "master" and "slave" subprojects, each with their own `setup.py`,
+ `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
+ distributions (and upload multiple independently-installable tarballs).
+* Source trees whose main purpose is to contain a C library, but which also
+ provide bindings to Python (and perhaps other langauges) in subdirectories.
+
+Versioneer will look for `.git` in parent directories, and most operations
+should get the right version string. However `pip` and `setuptools` have bugs
+and implementation details which frequently cause `pip install .` from a
+subproject directory to fail to find a correct version string (so it usually
+defaults to `0+unknown`).
+
+`pip install --editable .` should work correctly. `setup.py install` might
+work too.
+
+Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
+some later version.
+
+[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
+this issue. The discussion in
+[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
+issue from the Versioneer side in more detail.
+[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
+[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
+pip to let Versioneer work correctly.
+
+Versioneer-0.16 and earlier only looked for a `.git` directory next to the
+`setup.cfg`, so subprojects were completely unsupported with those releases.
+
+### Editable installs with setuptools <= 18.5
+
+`setup.py develop` and `pip install --editable .` allow you to install a
+project into a virtualenv once, then continue editing the source code (and
+test) without re-installing after every change.
+
+"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
+convenient way to specify executable scripts that should be installed along
+with the python package.
+
+These both work as expected when using modern setuptools. When using
+setuptools-18.5 or earlier, however, certain operations will cause
+`pkg_resources.DistributionNotFound` errors when running the entrypoint
+script, which must be resolved by re-installing the package. This happens
+when the install happens with one version, then the egg_info data is
+regenerated while a different version is checked out. Many setup.py commands
+cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
+a different virtualenv), so this can be surprising.
+
+[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
+this one, but upgrading to a newer version of setuptools should probably
+resolve it.
+
+### Unicode version strings
+
+While Versioneer works (and is continually tested) with both Python 2 and
+Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
+Newer releases probably generate unicode version strings on py2. It's not
+clear that this is wrong, but it may be surprising for applications when then
+write these strings to a network connection or include them in bytes-oriented
+APIs like cryptographic checksums.
+
+[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
+this question.
+
+
+## Updating Versioneer
+
+To upgrade your project to a new release of Versioneer, do the following:
+
+* install the new Versioneer (`pip install -U versioneer` or equivalent)
+* edit `setup.cfg`, if necessary, to include any new configuration settings
+ indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
+* re-run `versioneer install` in your source tree, to replace
+ `SRC/_version.py`
+* commit any changed files
+
+## Future Directions
+
+This tool is designed to make it easily extended to other version-control
+systems: all VCS-specific components are in separate directories like
+src/git/ . The top-level `versioneer.py` script is assembled from these
+components by running make-versioneer.py . In the future, make-versioneer.py
+will take a VCS name as an argument, and will construct a version of
+`versioneer.py` that is specific to the given VCS. It might also take the
+configuration arguments that are currently provided manually during
+installation by editing setup.py . Alternatively, it might go the other
+direction and include code from all supported VCS systems, reducing the
+number of intermediate scripts.
+
+
+## License
+
+To make Versioneer easier to embed, all its code is dedicated to the public
+domain. The `_version.py` that it creates is also in the public domain.
+Specifically, both are released under the Creative Commons "Public Domain
+Dedication" license (CC0-1.0), as described in
+https://creativecommons.org/publicdomain/zero/1.0/ .
+
+"""
+
+from __future__ import print_function
+try:
+ import configparser
+except ImportError:
+ import ConfigParser as configparser
+import errno
+import json
+import os
+import re
+import subprocess
+import sys
+
+
+class VersioneerConfig:
+ """Container for Versioneer configuration parameters."""
+
+
+def get_root():
+ """Get the project root directory.
+
+ We require that all commands are run from the project root, i.e. the
+ directory that contains setup.py, setup.cfg, and versioneer.py .
+ """
+ root = os.path.realpath(os.path.abspath(os.getcwd()))
+ setup_py = os.path.join(root, "setup.py")
+ versioneer_py = os.path.join(root, "versioneer.py")
+ if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
+ # allow 'python path/to/setup.py COMMAND'
+ root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
+ setup_py = os.path.join(root, "setup.py")
+ versioneer_py = os.path.join(root, "versioneer.py")
+ if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
+ err = ("Versioneer was unable to run the project root directory. "
+ "Versioneer requires setup.py to be executed from "
+ "its immediate directory (like 'python setup.py COMMAND'), "
+ "or in a way that lets it use sys.argv[0] to find the root "
+ "(like 'python path/to/setup.py COMMAND').")
+ raise VersioneerBadRootError(err)
+ try:
+ # Certain runtime workflows (setup.py install/develop in a setuptools
+ # tree) execute all dependencies in a single python process, so
+ # "versioneer" may be imported multiple times, and python's shared
+ # module-import table will cache the first one. So we can't use
+ # os.path.dirname(__file__), as that will find whichever
+ # versioneer.py was first imported, even in later projects.
+ me = os.path.realpath(os.path.abspath(__file__))
+ me_dir = os.path.normcase(os.path.splitext(me)[0])
+ vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
+ if me_dir != vsr_dir:
+ print("Warning: build in %s is using versioneer.py from %s"
+ % (os.path.dirname(me), versioneer_py))
+ except NameError:
+ pass
+ return root
+
+
+def get_config_from_root(root):
+ """Read the project setup.cfg file to determine Versioneer config."""
+ # This might raise EnvironmentError (if setup.cfg is missing), or
+ # configparser.NoSectionError (if it lacks a [versioneer] section), or
+ # configparser.NoOptionError (if it lacks "VCS="). See the docstring at
+ # the top of versioneer.py for instructions on writing your setup.cfg .
+ setup_cfg = os.path.join(root, "setup.cfg")
+ parser = configparser.SafeConfigParser()
+ with open(setup_cfg, "r") as f:
+ parser.readfp(f)
+ VCS = parser.get("versioneer", "VCS") # mandatory
+
+ def get(parser, name):
+ if parser.has_option("versioneer", name):
+ return parser.get("versioneer", name)
+ return None
+ cfg = VersioneerConfig()
+ cfg.VCS = VCS
+ cfg.style = get(parser, "style") or ""
+ cfg.versionfile_source = get(parser, "versionfile_source")
+ cfg.versionfile_build = get(parser, "versionfile_build")
+ cfg.tag_prefix = get(parser, "tag_prefix")
+ if cfg.tag_prefix in ("''", '""'):
+ cfg.tag_prefix = ""
+ cfg.parentdir_prefix = get(parser, "parentdir_prefix")
+ cfg.verbose = get(parser, "verbose")
+ return cfg
+
+
+class NotThisMethod(Exception):
+ """Exception raised if a method is not valid for the current scenario."""
+
+
+# these dictionaries contain VCS-specific tools
+LONG_VERSION_PY = {}
+HANDLERS = {}
+
+
+def register_vcs_handler(vcs, method): # decorator
+ """Decorator to mark a method as the handler for a particular VCS."""
+ def decorate(f):
+ """Store f in HANDLERS[vcs][method]."""
+ if vcs not in HANDLERS:
+ HANDLERS[vcs] = {}
+ HANDLERS[vcs][method] = f
+ return f
+ return decorate
+
+
+def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
+ env=None):
+ """Call the given command(s)."""
+ assert isinstance(commands, list)
+ p = None
+ for c in commands:
+ try:
+ dispcmd = str([c] + args)
+ # remember shell=False, so use git.cmd on windows, not just git
+ p = subprocess.Popen([c] + args, cwd=cwd, env=env,
+ stdout=subprocess.PIPE,
+ stderr=(subprocess.PIPE if hide_stderr
+ else None))
+ break
+ except EnvironmentError:
+ e = sys.exc_info()[1]
+ if e.errno == errno.ENOENT:
+ continue
+ if verbose:
+ print("unable to run %s" % dispcmd)
+ print(e)
+ return None, None
+ else:
+ if verbose:
+ print("unable to find command, tried %s" % (commands,))
+ return None, None
+ stdout = p.communicate()[0].strip()
+ if sys.version_info[0] >= 3:
+ stdout = stdout.decode()
+ if p.returncode != 0:
+ if verbose:
+ print("unable to run %s (error)" % dispcmd)
+ print("stdout was %s" % stdout)
+ return None, p.returncode
+ return stdout, p.returncode
+
+
+LONG_VERSION_PY['git'] = '''
+# This file helps to compute a version number in source trees obtained from
+# git-archive tarball (such as those provided by githubs download-from-tag
+# feature). Distribution tarballs (built by setup.py sdist) and build
+# directories (produced by setup.py build) will contain a much shorter file
+# that just contains the computed version number.
+
+# This file is released into the public domain. Generated by
+# versioneer-0.18 (https://github.com/warner/python-versioneer)
+
+"""Git implementation of _version.py."""
+
+import errno
+import os
+import re
+import subprocess
+import sys
+
+
+def get_keywords():
+ """Get the keywords needed to look up the version information."""
+ # these strings will be replaced by git during git-archive.
+ # setup.py/versioneer.py will grep for the variable names, so they must
+ # each be defined on a line of their own. _version.py will just call
+ # get_keywords().
+ git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
+ git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
+ git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
+ keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
+ return keywords
+
+
+class VersioneerConfig:
+ """Container for Versioneer configuration parameters."""
+
+
+def get_config():
+ """Create, populate and return the VersioneerConfig() object."""
+ # these strings are filled in when 'setup.py versioneer' creates
+ # _version.py
+ cfg = VersioneerConfig()
+ cfg.VCS = "git"
+ cfg.style = "%(STYLE)s"
+ cfg.tag_prefix = "%(TAG_PREFIX)s"
+ cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
+ cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
+ cfg.verbose = False
+ return cfg
+
+
+class NotThisMethod(Exception):
+ """Exception raised if a method is not valid for the current scenario."""
+
+
+LONG_VERSION_PY = {}
+HANDLERS = {}
+
+
+def register_vcs_handler(vcs, method): # decorator
+ """Decorator to mark a method as the handler for a particular VCS."""
+ def decorate(f):
+ """Store f in HANDLERS[vcs][method]."""
+ if vcs not in HANDLERS:
+ HANDLERS[vcs] = {}
+ HANDLERS[vcs][method] = f
+ return f
+ return decorate
+
+
+def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
+ env=None):
+ """Call the given command(s)."""
+ assert isinstance(commands, list)
+ p = None
+ for c in commands:
+ try:
+ dispcmd = str([c] + args)
+ # remember shell=False, so use git.cmd on windows, not just git
+ p = subprocess.Popen([c] + args, cwd=cwd, env=env,
+ stdout=subprocess.PIPE,
+ stderr=(subprocess.PIPE if hide_stderr
+ else None))
+ break
+ except EnvironmentError:
+ e = sys.exc_info()[1]
+ if e.errno == errno.ENOENT:
+ continue
+ if verbose:
+ print("unable to run %%s" %% dispcmd)
+ print(e)
+ return None, None
+ else:
+ if verbose:
+ print("unable to find command, tried %%s" %% (commands,))
+ return None, None
+ stdout = p.communicate()[0].strip()
+ if sys.version_info[0] >= 3:
+ stdout = stdout.decode()
+ if p.returncode != 0:
+ if verbose:
+ print("unable to run %%s (error)" %% dispcmd)
+ print("stdout was %%s" %% stdout)
+ return None, p.returncode
+ return stdout, p.returncode
+
+
+def versions_from_parentdir(parentdir_prefix, root, verbose):
+ """Try to determine the version from the parent directory name.
+
+ Source tarballs conventionally unpack into a directory that includes both
+ the project name and a version string. We will also support searching up
+ two directory levels for an appropriately named parent directory
+ """
+ rootdirs = []
+
+ for i in range(3):
+ dirname = os.path.basename(root)
+ if dirname.startswith(parentdir_prefix):
+ return {"version": dirname[len(parentdir_prefix):],
+ "full-revisionid": None,
+ "dirty": False, "error": None, "date": None}
+ else:
+ rootdirs.append(root)
+ root = os.path.dirname(root) # up a level
+
+ if verbose:
+ print("Tried directories %%s but none started with prefix %%s" %%
+ (str(rootdirs), parentdir_prefix))
+ raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
+
+
+ at register_vcs_handler("git", "get_keywords")
+def git_get_keywords(versionfile_abs):
+ """Extract version information from the given file."""
+ # the code embedded in _version.py can just fetch the value of these
+ # keywords. When used from setup.py, we don't want to import _version.py,
+ # so we do it with a regexp instead. This function is not used from
+ # _version.py.
+ keywords = {}
+ try:
+ f = open(versionfile_abs, "r")
+ for line in f.readlines():
+ if line.strip().startswith("git_refnames ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["refnames"] = mo.group(1)
+ if line.strip().startswith("git_full ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["full"] = mo.group(1)
+ if line.strip().startswith("git_date ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["date"] = mo.group(1)
+ f.close()
+ except EnvironmentError:
+ pass
+ return keywords
+
+
+ at register_vcs_handler("git", "keywords")
+def git_versions_from_keywords(keywords, tag_prefix, verbose):
+ """Get version information from git keywords."""
+ if not keywords:
+ raise NotThisMethod("no keywords at all, weird")
+ date = keywords.get("date")
+ if date is not None:
+ # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
+ # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
+ # -like" string, which we must then edit to make compliant), because
+ # it's been around since git-1.5.3, and it's too difficult to
+ # discover which version we're using, or to work around using an
+ # older one.
+ date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
+ refnames = keywords["refnames"].strip()
+ if refnames.startswith("$Format"):
+ if verbose:
+ print("keywords are unexpanded, not using")
+ raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
+ refs = set([r.strip() for r in refnames.strip("()").split(",")])
+ # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
+ # just "foo-1.0". If we see a "tag: " prefix, prefer those.
+ TAG = "tag: "
+ tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ if not tags:
+ # Either we're using git < 1.8.3, or there really are no tags. We use
+ # a heuristic: assume all version tags have a digit. The old git %%d
+ # expansion behaves like git log --decorate=short and strips out the
+ # refs/heads/ and refs/tags/ prefixes that would let us distinguish
+ # between branches and tags. By ignoring refnames without digits, we
+ # filter out many common branch names like "release" and
+ # "stabilization", as well as "HEAD" and "master".
+ tags = set([r for r in refs if re.search(r'\d', r)])
+ if verbose:
+ print("discarding '%%s', no digits" %% ",".join(refs - tags))
+ if verbose:
+ print("likely tags: %%s" %% ",".join(sorted(tags)))
+ for ref in sorted(tags):
+ # sorting will prefer e.g. "2.0" over "2.0rc1"
+ if ref.startswith(tag_prefix):
+ r = ref[len(tag_prefix):]
+ if verbose:
+ print("picking %%s" %% r)
+ return {"version": r,
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False, "error": None,
+ "date": date}
+ # no suitable tags, so version is "0+unknown", but full hex is still there
+ if verbose:
+ print("no suitable tags, using unknown + full revision id")
+ return {"version": "0+unknown",
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False, "error": "no suitable tags", "date": None}
+
+
+ at register_vcs_handler("git", "pieces_from_vcs")
+def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+ """Get version from 'git describe' in the root of the source tree.
+
+ This only gets called if the git-archive 'subst' keywords were *not*
+ expanded, and _version.py hasn't already been rewritten with a short
+ version string, meaning we're inside a checked out source tree.
+ """
+ GITS = ["git"]
+ if sys.platform == "win32":
+ GITS = ["git.cmd", "git.exe"]
+
+ out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
+ hide_stderr=True)
+ if rc != 0:
+ if verbose:
+ print("Directory %%s not under git control" %% root)
+ raise NotThisMethod("'git rev-parse --git-dir' returned error")
+
+ # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
+ # if there isn't one, this yields HEX[-dirty] (no NUM)
+ describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
+ "--always", "--long",
+ "--match", "%%s*" %% tag_prefix],
+ cwd=root)
+ # --long was added in git-1.5.5
+ if describe_out is None:
+ raise NotThisMethod("'git describe' failed")
+ describe_out = describe_out.strip()
+ full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+ if full_out is None:
+ raise NotThisMethod("'git rev-parse' failed")
+ full_out = full_out.strip()
+
+ pieces = {}
+ pieces["long"] = full_out
+ pieces["short"] = full_out[:7] # maybe improved later
+ pieces["error"] = None
+
+ # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
+ # TAG might have hyphens.
+ git_describe = describe_out
+
+ # look for -dirty suffix
+ dirty = git_describe.endswith("-dirty")
+ pieces["dirty"] = dirty
+ if dirty:
+ git_describe = git_describe[:git_describe.rindex("-dirty")]
+
+ # now we have TAG-NUM-gHEX or HEX
+
+ if "-" in git_describe:
+ # TAG-NUM-gHEX
+ mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
+ if not mo:
+ # unparseable. Maybe git-describe is misbehaving?
+ pieces["error"] = ("unable to parse git-describe output: '%%s'"
+ %% describe_out)
+ return pieces
+
+ # tag
+ full_tag = mo.group(1)
+ if not full_tag.startswith(tag_prefix):
+ if verbose:
+ fmt = "tag '%%s' doesn't start with prefix '%%s'"
+ print(fmt %% (full_tag, tag_prefix))
+ pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
+ %% (full_tag, tag_prefix))
+ return pieces
+ pieces["closest-tag"] = full_tag[len(tag_prefix):]
+
+ # distance: number of commits since tag
+ pieces["distance"] = int(mo.group(2))
+
+ # commit: short hex revision ID
+ pieces["short"] = mo.group(3)
+
+ else:
+ # HEX: no tags
+ pieces["closest-tag"] = None
+ count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
+ cwd=root)
+ pieces["distance"] = int(count_out) # total number of commits
+
+ # commit date: see ISO-8601 comment in git_versions_from_keywords()
+ date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
+ cwd=root)[0].strip()
+ pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
+
+ return pieces
+
+
+def plus_or_dot(pieces):
+ """Return a + if we don't already have one, else return a ."""
+ if "+" in pieces.get("closest-tag", ""):
+ return "."
+ return "+"
+
+
+def render_pep440(pieces):
+ """Build up version string, with post-release "local version identifier".
+
+ Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
+ get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
+
+ Exceptions:
+ 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += plus_or_dot(pieces)
+ rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
+ pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def render_pep440_pre(pieces):
+ """TAG[.post.devDISTANCE] -- No -dirty.
+
+ Exceptions:
+ 1: no tags. 0.post.devDISTANCE
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"]:
+ rendered += ".post.dev%%d" %% pieces["distance"]
+ else:
+ # exception #1
+ rendered = "0.post.dev%%d" %% pieces["distance"]
+ return rendered
+
+
+def render_pep440_post(pieces):
+ """TAG[.postDISTANCE[.dev0]+gHEX] .
+
+ The ".dev0" means dirty. Note that .dev0 sorts backwards
+ (a dirty tree will appear "older" than the corresponding clean one),
+ but you shouldn't be releasing software with -dirty anyways.
+
+ Exceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%%d" %% pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "g%%s" %% pieces["short"]
+ else:
+ # exception #1
+ rendered = "0.post%%d" %% pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ rendered += "+g%%s" %% pieces["short"]
+ return rendered
+
+
+def render_pep440_old(pieces):
+ """TAG[.postDISTANCE[.dev0]] .
+
+ The ".dev0" means dirty.
+
+ Eexceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%%d" %% pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ else:
+ # exception #1
+ rendered = "0.post%%d" %% pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ return rendered
+
+
+def render_git_describe(pieces):
+ """TAG[-DISTANCE-gHEX][-dirty].
+
+ Like 'git describe --tags --dirty --always'.
+
+ Exceptions:
+ 1: no tags. HEX[-dirty] (note: no 'g' prefix)
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"]:
+ rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
+ else:
+ # exception #1
+ rendered = pieces["short"]
+ if pieces["dirty"]:
+ rendered += "-dirty"
+ return rendered
+
+
+def render_git_describe_long(pieces):
+ """TAG-DISTANCE-gHEX[-dirty].
+
+ Like 'git describe --tags --dirty --always -long'.
+ The distance/hash is unconditional.
+
+ Exceptions:
+ 1: no tags. HEX[-dirty] (note: no 'g' prefix)
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
+ else:
+ # exception #1
+ rendered = pieces["short"]
+ if pieces["dirty"]:
+ rendered += "-dirty"
+ return rendered
+
+
+def render(pieces, style):
+ """Render the given version pieces into the requested style."""
+ if pieces["error"]:
+ return {"version": "unknown",
+ "full-revisionid": pieces.get("long"),
+ "dirty": None,
+ "error": pieces["error"],
+ "date": None}
+
+ if not style or style == "default":
+ style = "pep440" # the default
+
+ if style == "pep440":
+ rendered = render_pep440(pieces)
+ elif style == "pep440-pre":
+ rendered = render_pep440_pre(pieces)
+ elif style == "pep440-post":
+ rendered = render_pep440_post(pieces)
+ elif style == "pep440-old":
+ rendered = render_pep440_old(pieces)
+ elif style == "git-describe":
+ rendered = render_git_describe(pieces)
+ elif style == "git-describe-long":
+ rendered = render_git_describe_long(pieces)
+ else:
+ raise ValueError("unknown style '%%s'" %% style)
+
+ return {"version": rendered, "full-revisionid": pieces["long"],
+ "dirty": pieces["dirty"], "error": None,
+ "date": pieces.get("date")}
+
+
+def get_versions():
+ """Get version information or return default if unable to do so."""
+ # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
+ # __file__, we can work backwards from there to the root. Some
+ # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
+ # case we can only use expanded keywords.
+
+ cfg = get_config()
+ verbose = cfg.verbose
+
+ try:
+ return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
+ verbose)
+ except NotThisMethod:
+ pass
+
+ try:
+ root = os.path.realpath(__file__)
+ # versionfile_source is the relative path from the top of the source
+ # tree (where the .git directory might live) to this file. Invert
+ # this to find the root from __file__.
+ for i in cfg.versionfile_source.split('/'):
+ root = os.path.dirname(root)
+ except NameError:
+ return {"version": "0+unknown", "full-revisionid": None,
+ "dirty": None,
+ "error": "unable to find root of source tree",
+ "date": None}
+
+ try:
+ pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
+ return render(pieces, cfg.style)
+ except NotThisMethod:
+ pass
+
+ try:
+ if cfg.parentdir_prefix:
+ return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
+ except NotThisMethod:
+ pass
+
+ return {"version": "0+unknown", "full-revisionid": None,
+ "dirty": None,
+ "error": "unable to compute version", "date": None}
+'''
+
+
+ at register_vcs_handler("git", "get_keywords")
+def git_get_keywords(versionfile_abs):
+ """Extract version information from the given file."""
+ # the code embedded in _version.py can just fetch the value of these
+ # keywords. When used from setup.py, we don't want to import _version.py,
+ # so we do it with a regexp instead. This function is not used from
+ # _version.py.
+ keywords = {}
+ try:
+ f = open(versionfile_abs, "r")
+ for line in f.readlines():
+ if line.strip().startswith("git_refnames ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["refnames"] = mo.group(1)
+ if line.strip().startswith("git_full ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["full"] = mo.group(1)
+ if line.strip().startswith("git_date ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["date"] = mo.group(1)
+ f.close()
+ except EnvironmentError:
+ pass
+ return keywords
+
+
+ at register_vcs_handler("git", "keywords")
+def git_versions_from_keywords(keywords, tag_prefix, verbose):
+ """Get version information from git keywords."""
+ if not keywords:
+ raise NotThisMethod("no keywords at all, weird")
+ date = keywords.get("date")
+ if date is not None:
+ # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
+ # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
+ # -like" string, which we must then edit to make compliant), because
+ # it's been around since git-1.5.3, and it's too difficult to
+ # discover which version we're using, or to work around using an
+ # older one.
+ date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
+ refnames = keywords["refnames"].strip()
+ if refnames.startswith("$Format"):
+ if verbose:
+ print("keywords are unexpanded, not using")
+ raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
+ refs = set([r.strip() for r in refnames.strip("()").split(",")])
+ # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
+ # just "foo-1.0". If we see a "tag: " prefix, prefer those.
+ TAG = "tag: "
+ tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ if not tags:
+ # Either we're using git < 1.8.3, or there really are no tags. We use
+ # a heuristic: assume all version tags have a digit. The old git %d
+ # expansion behaves like git log --decorate=short and strips out the
+ # refs/heads/ and refs/tags/ prefixes that would let us distinguish
+ # between branches and tags. By ignoring refnames without digits, we
+ # filter out many common branch names like "release" and
+ # "stabilization", as well as "HEAD" and "master".
+ tags = set([r for r in refs if re.search(r'\d', r)])
+ if verbose:
+ print("discarding '%s', no digits" % ",".join(refs - tags))
+ if verbose:
+ print("likely tags: %s" % ",".join(sorted(tags)))
+ for ref in sorted(tags):
+ # sorting will prefer e.g. "2.0" over "2.0rc1"
+ if ref.startswith(tag_prefix):
+ r = ref[len(tag_prefix):]
+ if verbose:
+ print("picking %s" % r)
+ return {"version": r,
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False, "error": None,
+ "date": date}
+ # no suitable tags, so version is "0+unknown", but full hex is still there
+ if verbose:
+ print("no suitable tags, using unknown + full revision id")
+ return {"version": "0+unknown",
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False, "error": "no suitable tags", "date": None}
+
+
+ at register_vcs_handler("git", "pieces_from_vcs")
+def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+ """Get version from 'git describe' in the root of the source tree.
+
+ This only gets called if the git-archive 'subst' keywords were *not*
+ expanded, and _version.py hasn't already been rewritten with a short
+ version string, meaning we're inside a checked out source tree.
+ """
+ GITS = ["git"]
+ if sys.platform == "win32":
+ GITS = ["git.cmd", "git.exe"]
+
+ out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
+ hide_stderr=True)
+ if rc != 0:
+ if verbose:
+ print("Directory %s not under git control" % root)
+ raise NotThisMethod("'git rev-parse --git-dir' returned error")
+
+ # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
+ # if there isn't one, this yields HEX[-dirty] (no NUM)
+ describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
+ "--always", "--long",
+ "--match", "%s*" % tag_prefix],
+ cwd=root)
+ # --long was added in git-1.5.5
+ if describe_out is None:
+ raise NotThisMethod("'git describe' failed")
+ describe_out = describe_out.strip()
+ full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+ if full_out is None:
+ raise NotThisMethod("'git rev-parse' failed")
+ full_out = full_out.strip()
+
+ pieces = {}
+ pieces["long"] = full_out
+ pieces["short"] = full_out[:7] # maybe improved later
+ pieces["error"] = None
+
+ # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
+ # TAG might have hyphens.
+ git_describe = describe_out
+
+ # look for -dirty suffix
+ dirty = git_describe.endswith("-dirty")
+ pieces["dirty"] = dirty
+ if dirty:
+ git_describe = git_describe[:git_describe.rindex("-dirty")]
+
+ # now we have TAG-NUM-gHEX or HEX
+
+ if "-" in git_describe:
+ # TAG-NUM-gHEX
+ mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
+ if not mo:
+ # unparseable. Maybe git-describe is misbehaving?
+ pieces["error"] = ("unable to parse git-describe output: '%s'"
+ % describe_out)
+ return pieces
+
+ # tag
+ full_tag = mo.group(1)
+ if not full_tag.startswith(tag_prefix):
+ if verbose:
+ fmt = "tag '%s' doesn't start with prefix '%s'"
+ print(fmt % (full_tag, tag_prefix))
+ pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
+ % (full_tag, tag_prefix))
+ return pieces
+ pieces["closest-tag"] = full_tag[len(tag_prefix):]
+
+ # distance: number of commits since tag
+ pieces["distance"] = int(mo.group(2))
+
+ # commit: short hex revision ID
+ pieces["short"] = mo.group(3)
+
+ else:
+ # HEX: no tags
+ pieces["closest-tag"] = None
+ count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
+ cwd=root)
+ pieces["distance"] = int(count_out) # total number of commits
+
+ # commit date: see ISO-8601 comment in git_versions_from_keywords()
+ date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
+ cwd=root)[0].strip()
+ pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
+
+ return pieces
+
+
+def do_vcs_install(manifest_in, versionfile_source, ipy):
+ """Git-specific installation logic for Versioneer.
+
+ For Git, this means creating/changing .gitattributes to mark _version.py
+ for export-subst keyword substitution.
+ """
+ GITS = ["git"]
+ if sys.platform == "win32":
+ GITS = ["git.cmd", "git.exe"]
+ files = [manifest_in, versionfile_source]
+ if ipy:
+ files.append(ipy)
+ try:
+ me = __file__
+ if me.endswith(".pyc") or me.endswith(".pyo"):
+ me = os.path.splitext(me)[0] + ".py"
+ versioneer_file = os.path.relpath(me)
+ except NameError:
+ versioneer_file = "versioneer.py"
+ files.append(versioneer_file)
+ present = False
+ try:
+ f = open(".gitattributes", "r")
+ for line in f.readlines():
+ if line.strip().startswith(versionfile_source):
+ if "export-subst" in line.strip().split()[1:]:
+ present = True
+ f.close()
+ except EnvironmentError:
+ pass
+ if not present:
+ f = open(".gitattributes", "a+")
+ f.write("%s export-subst\n" % versionfile_source)
+ f.close()
+ files.append(".gitattributes")
+ run_command(GITS, ["add", "--"] + files)
+
+
+def versions_from_parentdir(parentdir_prefix, root, verbose):
+ """Try to determine the version from the parent directory name.
+
+ Source tarballs conventionally unpack into a directory that includes both
+ the project name and a version string. We will also support searching up
+ two directory levels for an appropriately named parent directory
+ """
+ rootdirs = []
+
+ for i in range(3):
+ dirname = os.path.basename(root)
+ if dirname.startswith(parentdir_prefix):
+ return {"version": dirname[len(parentdir_prefix):],
+ "full-revisionid": None,
+ "dirty": False, "error": None, "date": None}
+ else:
+ rootdirs.append(root)
+ root = os.path.dirname(root) # up a level
+
+ if verbose:
+ print("Tried directories %s but none started with prefix %s" %
+ (str(rootdirs), parentdir_prefix))
+ raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
+
+
+SHORT_VERSION_PY = """
+# This file was generated by 'versioneer.py' (0.18) from
+# revision-control system data, or from the parent directory name of an
+# unpacked source archive. Distribution tarballs contain a pre-generated copy
+# of this file.
+
+import json
+
+version_json = '''
+%s
+''' # END VERSION_JSON
+
+
+def get_versions():
+ return json.loads(version_json)
+"""
+
+
+def versions_from_file(filename):
+ """Try to determine the version from _version.py if present."""
+ try:
+ with open(filename) as f:
+ contents = f.read()
+ except EnvironmentError:
+ raise NotThisMethod("unable to read _version.py")
+ mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
+ contents, re.M | re.S)
+ if not mo:
+ mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON",
+ contents, re.M | re.S)
+ if not mo:
+ raise NotThisMethod("no version_json in _version.py")
+ return json.loads(mo.group(1))
+
+
+def write_to_version_file(filename, versions):
+ """Write the given version number to the given _version.py file."""
+ os.unlink(filename)
+ contents = json.dumps(versions, sort_keys=True,
+ indent=1, separators=(",", ": "))
+ with open(filename, "w") as f:
+ f.write(SHORT_VERSION_PY % contents)
+
+ print("set %s to '%s'" % (filename, versions["version"]))
+
+
+def plus_or_dot(pieces):
+ """Return a + if we don't already have one, else return a ."""
+ if "+" in pieces.get("closest-tag", ""):
+ return "."
+ return "+"
+
+
+def render_pep440(pieces):
+ """Build up version string, with post-release "local version identifier".
+
+ Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
+ get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
+
+ Exceptions:
+ 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += plus_or_dot(pieces)
+ rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0+untagged.%d.g%s" % (pieces["distance"],
+ pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def render_pep440_pre(pieces):
+ """TAG[.post.devDISTANCE] -- No -dirty.
+
+ Exceptions:
+ 1: no tags. 0.post.devDISTANCE
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"]:
+ rendered += ".post.dev%d" % pieces["distance"]
+ else:
+ # exception #1
+ rendered = "0.post.dev%d" % pieces["distance"]
+ return rendered
+
+
+def render_pep440_post(pieces):
+ """TAG[.postDISTANCE[.dev0]+gHEX] .
+
+ The ".dev0" means dirty. Note that .dev0 sorts backwards
+ (a dirty tree will appear "older" than the corresponding clean one),
+ but you shouldn't be releasing software with -dirty anyways.
+
+ Exceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%d" % pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "g%s" % pieces["short"]
+ else:
+ # exception #1
+ rendered = "0.post%d" % pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ rendered += "+g%s" % pieces["short"]
+ return rendered
+
+
+def render_pep440_old(pieces):
+ """TAG[.postDISTANCE[.dev0]] .
+
+ The ".dev0" means dirty.
+
+ Eexceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%d" % pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ else:
+ # exception #1
+ rendered = "0.post%d" % pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ return rendered
+
+
+def render_git_describe(pieces):
+ """TAG[-DISTANCE-gHEX][-dirty].
+
+ Like 'git describe --tags --dirty --always'.
+
+ Exceptions:
+ 1: no tags. HEX[-dirty] (note: no 'g' prefix)
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"]:
+ rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
+ else:
+ # exception #1
+ rendered = pieces["short"]
+ if pieces["dirty"]:
+ rendered += "-dirty"
+ return rendered
+
+
+def render_git_describe_long(pieces):
+ """TAG-DISTANCE-gHEX[-dirty].
+
+ Like 'git describe --tags --dirty --always -long'.
+ The distance/hash is unconditional.
+
+ Exceptions:
+ 1: no tags. HEX[-dirty] (note: no 'g' prefix)
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
+ else:
+ # exception #1
+ rendered = pieces["short"]
+ if pieces["dirty"]:
+ rendered += "-dirty"
+ return rendered
+
+
+def render(pieces, style):
+ """Render the given version pieces into the requested style."""
+ if pieces["error"]:
+ return {"version": "unknown",
+ "full-revisionid": pieces.get("long"),
+ "dirty": None,
+ "error": pieces["error"],
+ "date": None}
+
+ if not style or style == "default":
+ style = "pep440" # the default
+
+ if style == "pep440":
+ rendered = render_pep440(pieces)
+ elif style == "pep440-pre":
+ rendered = render_pep440_pre(pieces)
+ elif style == "pep440-post":
+ rendered = render_pep440_post(pieces)
+ elif style == "pep440-old":
+ rendered = render_pep440_old(pieces)
+ elif style == "git-describe":
+ rendered = render_git_describe(pieces)
+ elif style == "git-describe-long":
+ rendered = render_git_describe_long(pieces)
+ else:
+ raise ValueError("unknown style '%s'" % style)
+
+ return {"version": rendered, "full-revisionid": pieces["long"],
+ "dirty": pieces["dirty"], "error": None,
+ "date": pieces.get("date")}
+
+
+class VersioneerBadRootError(Exception):
+ """The project root directory is unknown or missing key files."""
+
+
+def get_versions(verbose=False):
+ """Get the project version from whatever source is available.
+
+ Returns dict with two keys: 'version' and 'full'.
+ """
+ if "versioneer" in sys.modules:
+ # see the discussion in cmdclass.py:get_cmdclass()
+ del sys.modules["versioneer"]
+
+ root = get_root()
+ cfg = get_config_from_root(root)
+
+ assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
+ handlers = HANDLERS.get(cfg.VCS)
+ assert handlers, "unrecognized VCS '%s'" % cfg.VCS
+ verbose = verbose or cfg.verbose
+ assert cfg.versionfile_source is not None, \
+ "please set versioneer.versionfile_source"
+ assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
+
+ versionfile_abs = os.path.join(root, cfg.versionfile_source)
+
+ # extract version from first of: _version.py, VCS command (e.g. 'git
+ # describe'), parentdir. This is meant to work for developers using a
+ # source checkout, for users of a tarball created by 'setup.py sdist',
+ # and for users of a tarball/zipball created by 'git archive' or github's
+ # download-from-tag feature or the equivalent in other VCSes.
+
+ get_keywords_f = handlers.get("get_keywords")
+ from_keywords_f = handlers.get("keywords")
+ if get_keywords_f and from_keywords_f:
+ try:
+ keywords = get_keywords_f(versionfile_abs)
+ ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
+ if verbose:
+ print("got version from expanded keyword %s" % ver)
+ return ver
+ except NotThisMethod:
+ pass
+
+ try:
+ ver = versions_from_file(versionfile_abs)
+ if verbose:
+ print("got version from file %s %s" % (versionfile_abs, ver))
+ return ver
+ except NotThisMethod:
+ pass
+
+ from_vcs_f = handlers.get("pieces_from_vcs")
+ if from_vcs_f:
+ try:
+ pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
+ ver = render(pieces, cfg.style)
+ if verbose:
+ print("got version from VCS %s" % ver)
+ return ver
+ except NotThisMethod:
+ pass
+
+ try:
+ if cfg.parentdir_prefix:
+ ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
+ if verbose:
+ print("got version from parentdir %s" % ver)
+ return ver
+ except NotThisMethod:
+ pass
+
+ if verbose:
+ print("unable to compute version")
+
+ return {"version": "0+unknown", "full-revisionid": None,
+ "dirty": None, "error": "unable to compute version",
+ "date": None}
+
+
+def get_version():
+ """Get the short version string for this project."""
+ return get_versions()["version"]
+
+
+def get_cmdclass():
+ """Get the custom setuptools/distutils subclasses used by Versioneer."""
+ if "versioneer" in sys.modules:
+ del sys.modules["versioneer"]
+ # this fixes the "python setup.py develop" case (also 'install' and
+ # 'easy_install .'), in which subdependencies of the main project are
+ # built (using setup.py bdist_egg) in the same python process. Assume
+ # a main project A and a dependency B, which use different versions
+ # of Versioneer. A's setup.py imports A's Versioneer, leaving it in
+ # sys.modules by the time B's setup.py is executed, causing B to run
+ # with the wrong versioneer. Setuptools wraps the sub-dep builds in a
+ # sandbox that restores sys.modules to it's pre-build state, so the
+ # parent is protected against the child's "import versioneer". By
+ # removing ourselves from sys.modules here, before the child build
+ # happens, we protect the child from the parent's versioneer too.
+ # Also see https://github.com/warner/python-versioneer/issues/52
+
+ cmds = {}
+
+ # we add "version" to both distutils and setuptools
+ from distutils.core import Command
+
+ class cmd_version(Command):
+ description = "report generated version string"
+ user_options = []
+ boolean_options = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ vers = get_versions(verbose=True)
+ print("Version: %s" % vers["version"])
+ print(" full-revisionid: %s" % vers.get("full-revisionid"))
+ print(" dirty: %s" % vers.get("dirty"))
+ print(" date: %s" % vers.get("date"))
+ if vers["error"]:
+ print(" error: %s" % vers["error"])
+ cmds["version"] = cmd_version
+
+ # we override "build_py" in both distutils and setuptools
+ #
+ # most invocation pathways end up running build_py:
+ # distutils/build -> build_py
+ # distutils/install -> distutils/build ->..
+ # setuptools/bdist_wheel -> distutils/install ->..
+ # setuptools/bdist_egg -> distutils/install_lib -> build_py
+ # setuptools/install -> bdist_egg ->..
+ # setuptools/develop -> ?
+ # pip install:
+ # copies source tree to a tempdir before running egg_info/etc
+ # if .git isn't copied too, 'git describe' will fail
+ # then does setup.py bdist_wheel, or sometimes setup.py install
+ # setup.py egg_info -> ?
+
+ # we override different "build_py" commands for both environments
+ if "setuptools" in sys.modules:
+ from setuptools.command.build_py import build_py as _build_py
+ else:
+ from distutils.command.build_py import build_py as _build_py
+
+ class cmd_build_py(_build_py):
+ def run(self):
+ root = get_root()
+ cfg = get_config_from_root(root)
+ versions = get_versions()
+ _build_py.run(self)
+ # now locate _version.py in the new build/ directory and replace
+ # it with an updated value
+ if cfg.versionfile_build:
+ target_versionfile = os.path.join(self.build_lib,
+ cfg.versionfile_build)
+ print("UPDATING %s" % target_versionfile)
+ write_to_version_file(target_versionfile, versions)
+ cmds["build_py"] = cmd_build_py
+
+ if "cx_Freeze" in sys.modules: # cx_freeze enabled?
+ from cx_Freeze.dist import build_exe as _build_exe
+ # nczeczulin reports that py2exe won't like the pep440-style string
+ # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
+ # setup(console=[{
+ # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
+ # "product_version": versioneer.get_version(),
+ # ...
+
+ class cmd_build_exe(_build_exe):
+ def run(self):
+ root = get_root()
+ cfg = get_config_from_root(root)
+ versions = get_versions()
+ target_versionfile = cfg.versionfile_source
+ print("UPDATING %s" % target_versionfile)
+ write_to_version_file(target_versionfile, versions)
+
+ _build_exe.run(self)
+ os.unlink(target_versionfile)
+ with open(cfg.versionfile_source, "w") as f:
+ LONG = LONG_VERSION_PY[cfg.VCS]
+ f.write(LONG %
+ {"DOLLAR": "$",
+ "STYLE": cfg.style,
+ "TAG_PREFIX": cfg.tag_prefix,
+ "PARENTDIR_PREFIX": cfg.parentdir_prefix,
+ "VERSIONFILE_SOURCE": cfg.versionfile_source,
+ })
+ cmds["build_exe"] = cmd_build_exe
+ del cmds["build_py"]
+
+ if 'py2exe' in sys.modules: # py2exe enabled?
+ try:
+ from py2exe.distutils_buildexe import py2exe as _py2exe # py3
+ except ImportError:
+ from py2exe.build_exe import py2exe as _py2exe # py2
+
+ class cmd_py2exe(_py2exe):
+ def run(self):
+ root = get_root()
+ cfg = get_config_from_root(root)
+ versions = get_versions()
+ target_versionfile = cfg.versionfile_source
+ print("UPDATING %s" % target_versionfile)
+ write_to_version_file(target_versionfile, versions)
+
+ _py2exe.run(self)
+ os.unlink(target_versionfile)
+ with open(cfg.versionfile_source, "w") as f:
+ LONG = LONG_VERSION_PY[cfg.VCS]
+ f.write(LONG %
+ {"DOLLAR": "$",
+ "STYLE": cfg.style,
+ "TAG_PREFIX": cfg.tag_prefix,
+ "PARENTDIR_PREFIX": cfg.parentdir_prefix,
+ "VERSIONFILE_SOURCE": cfg.versionfile_source,
+ })
+ cmds["py2exe"] = cmd_py2exe
+
+ # we override different "sdist" commands for both environments
+ if "setuptools" in sys.modules:
+ from setuptools.command.sdist import sdist as _sdist
+ else:
+ from distutils.command.sdist import sdist as _sdist
+
+ class cmd_sdist(_sdist):
+ def run(self):
+ versions = get_versions()
+ self._versioneer_generated_versions = versions
+ # unless we update this, the command will keep using the old
+ # version
+ self.distribution.metadata.version = versions["version"]
+ return _sdist.run(self)
+
+ def make_release_tree(self, base_dir, files):
+ root = get_root()
+ cfg = get_config_from_root(root)
+ _sdist.make_release_tree(self, base_dir, files)
+ # now locate _version.py in the new base_dir directory
+ # (remembering that it may be a hardlink) and replace it with an
+ # updated value
+ target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
+ print("UPDATING %s" % target_versionfile)
+ write_to_version_file(target_versionfile,
+ self._versioneer_generated_versions)
+ cmds["sdist"] = cmd_sdist
+
+ return cmds
+
+
+CONFIG_ERROR = """
+setup.cfg is missing the necessary Versioneer configuration. You need
+a section like:
+
+ [versioneer]
+ VCS = git
+ style = pep440
+ versionfile_source = src/myproject/_version.py
+ versionfile_build = myproject/_version.py
+ tag_prefix =
+ parentdir_prefix = myproject-
+
+You will also need to edit your setup.py to use the results:
+
+ import versioneer
+ setup(version=versioneer.get_version(),
+ cmdclass=versioneer.get_cmdclass(), ...)
+
+Please read the docstring in ./versioneer.py for configuration instructions,
+edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
+"""
+
+SAMPLE_CONFIG = """
+# See the docstring in versioneer.py for instructions. Note that you must
+# re-run 'versioneer.py setup' after changing this section, and commit the
+# resulting files.
+
+[versioneer]
+#VCS = git
+#style = pep440
+#versionfile_source =
+#versionfile_build =
+#tag_prefix =
+#parentdir_prefix =
+
+"""
+
+INIT_PY_SNIPPET = """
+from ._version import get_versions
+__version__ = get_versions()['version']
+del get_versions
+"""
+
+
+def do_setup():
+ """Main VCS-independent setup function for installing Versioneer."""
+ root = get_root()
+ try:
+ cfg = get_config_from_root(root)
+ except (EnvironmentError, configparser.NoSectionError,
+ configparser.NoOptionError) as e:
+ if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
+ print("Adding sample versioneer config to setup.cfg",
+ file=sys.stderr)
+ with open(os.path.join(root, "setup.cfg"), "a") as f:
+ f.write(SAMPLE_CONFIG)
+ print(CONFIG_ERROR, file=sys.stderr)
+ return 1
+
+ print(" creating %s" % cfg.versionfile_source)
+ with open(cfg.versionfile_source, "w") as f:
+ LONG = LONG_VERSION_PY[cfg.VCS]
+ f.write(LONG % {"DOLLAR": "$",
+ "STYLE": cfg.style,
+ "TAG_PREFIX": cfg.tag_prefix,
+ "PARENTDIR_PREFIX": cfg.parentdir_prefix,
+ "VERSIONFILE_SOURCE": cfg.versionfile_source,
+ })
+
+ ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
+ "__init__.py")
+ if os.path.exists(ipy):
+ try:
+ with open(ipy, "r") as f:
+ old = f.read()
+ except EnvironmentError:
+ old = ""
+ if INIT_PY_SNIPPET not in old:
+ print(" appending to %s" % ipy)
+ with open(ipy, "a") as f:
+ f.write(INIT_PY_SNIPPET)
+ else:
+ print(" %s unmodified" % ipy)
+ else:
+ print(" %s doesn't exist, ok" % ipy)
+ ipy = None
+
+ # Make sure both the top-level "versioneer.py" and versionfile_source
+ # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
+ # they'll be copied into source distributions. Pip won't be able to
+ # install the package without this.
+ manifest_in = os.path.join(root, "MANIFEST.in")
+ simple_includes = set()
+ try:
+ with open(manifest_in, "r") as f:
+ for line in f:
+ if line.startswith("include "):
+ for include in line.split()[1:]:
+ simple_includes.add(include)
+ except EnvironmentError:
+ pass
+ # That doesn't cover everything MANIFEST.in can do
+ # (http://docs.python.org/2/distutils/sourcedist.html#commands), so
+ # it might give some false negatives. Appending redundant 'include'
+ # lines is safe, though.
+ if "versioneer.py" not in simple_includes:
+ print(" appending 'versioneer.py' to MANIFEST.in")
+ with open(manifest_in, "a") as f:
+ f.write("include versioneer.py\n")
+ else:
+ print(" 'versioneer.py' already in MANIFEST.in")
+ if cfg.versionfile_source not in simple_includes:
+ print(" appending versionfile_source ('%s') to MANIFEST.in" %
+ cfg.versionfile_source)
+ with open(manifest_in, "a") as f:
+ f.write("include %s\n" % cfg.versionfile_source)
+ else:
+ print(" versionfile_source already in MANIFEST.in")
+
+ # Make VCS-specific changes. For git, this means creating/changing
+ # .gitattributes to mark _version.py for export-subst keyword
+ # substitution.
+ do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
+ return 0
+
+
+def scan_setup_py():
+ """Validate the contents of setup.py against Versioneer's expectations."""
+ found = set()
+ setters = False
+ errors = 0
+ with open("setup.py", "r") as f:
+ for line in f.readlines():
+ if "import versioneer" in line:
+ found.add("import")
+ if "versioneer.get_cmdclass()" in line:
+ found.add("cmdclass")
+ if "versioneer.get_version()" in line:
+ found.add("get_version")
+ if "versioneer.VCS" in line:
+ setters = True
+ if "versioneer.versionfile_source" in line:
+ setters = True
+ if len(found) != 3:
+ print("")
+ print("Your setup.py appears to be missing some important items")
+ print("(but I might be wrong). Please make sure it has something")
+ print("roughly like the following:")
+ print("")
+ print(" import versioneer")
+ print(" setup( version=versioneer.get_version(),")
+ print(" cmdclass=versioneer.get_cmdclass(), ...)")
+ print("")
+ errors += 1
+ if setters:
+ print("You should remove lines like 'versioneer.VCS = ' and")
+ print("'versioneer.versionfile_source = ' . This configuration")
+ print("now lives in setup.cfg, and should be removed from setup.py")
+ print("")
+ errors += 1
+ return errors
+
+
+if __name__ == "__main__":
+ cmd = sys.argv[1]
+ if cmd == "setup":
+ errors = do_setup()
+ errors += scan_setup_py()
+ if errors:
+ sys.exit(1)
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/gsw.git
More information about the debian-science-commits
mailing list