[segyio] 27/376: Converted C extensions from libcwrap to Python.h in addition to minor refactoring.
Jørgen Kvalsvik
jokva-guest at moszumanska.debian.org
Wed Sep 20 08:04:02 UTC 2017
This is an automated email from the git hooks/post-receive script.
jokva-guest pushed a commit to branch debian
in repository segyio.
commit 6e538a20f5f330cce04081ac5530f13c74a0ffc6
Author: Jean-Paul Balabanian <jepebe at users.noreply.github.com>
Date: Mon Oct 10 08:22:00 2016 +0200
Converted C extensions from libcwrap to Python.h in addition to minor refactoring.
---
CMakeLists.txt | 37 +-
examples/make-file.py | 2 +-
python/CMakeLists.txt | 1 -
python/cwrap/CMakeLists.txt | 10 -
python/cwrap/__init__.py | 5 -
python/cwrap/basecclass.py | 100 ----
python/cwrap/basecenum.py | 119 -----
python/cwrap/basecvalue.py | 47 --
python/cwrap/metacwrap.py | 46 --
python/cwrap/prototype.py | 147 ------
python/segyio/CMakeLists.txt | 5 +
python/segyio/__init__.py | 42 +-
python/segyio/_depth_plane.py | 18 +
python/segyio/_field.py | 81 +++
python/segyio/_header.py | 117 +++++
python/segyio/_line.py | 90 ++++
python/segyio/_segyio.c | 847 +++++++++++++++++++++++++++++++
python/segyio/_trace.py | 101 ++++
python/segyio/binfield.py | 102 ++--
python/segyio/create.py | 45 +-
python/segyio/open.py | 45 +-
python/segyio/segy.py | 968 +++++-------------------------------
python/segyio/segysampleformat.py | 36 +-
python/segyio/tracefield.py | 280 ++++-------
python/segyio/tracesortingformat.py | 16 +-
src/segyio/segy.c | 28 +-
src/segyio/segy.h | 5 +-
src/spec/segyspec.c | 2 +-
tests/CMakeLists.txt | 6 +-
tests/test_enum.py | 24 +
tests/test_segy.c | 4 +-
tests/test_segy.py | 282 ++++++-----
tests/test_segyio_c.py | 425 ++++++++++++++++
tests/unittest.h | 4 +-
34 files changed, 2309 insertions(+), 1778 deletions(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 5b93f8e..a3e37e2 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,12 +1,17 @@
cmake_minimum_required(VERSION 2.8)
project(segyio)
-option(BUILD_MEX "Build matlab mex files" OFF)
+if(POLICY CMP0042)
+ cmake_policy(SET CMP0042 NEW)
+endif()
+
+option(BUILD_MEX "Build Matlab mex files" OFF)
+option(BUILD_PYTHON "Build Python wrappers" ON)
include(cmake/python.cmake)
enable_testing()
-set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c99")
+set(CMAKE_C_FLAGS "-std=c99 ${CMAKE_C_FLAGS}")
if (CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
set (CMAKE_INSTALL_PREFIX "${CMAKE_BINARY_DIR}/install" CACHE PATH "Default install path" FORCE )
@@ -27,6 +32,31 @@ set_target_properties(segyio-static PROPERTIES COMPILE_FLAGS "-fPIC")
add_library(segyio-shared SHARED ${SOURCE_FILES})
set_target_properties(segyio-shared PROPERTIES OUTPUT_NAME segyio CLEAN_DIRECT_OUTPUT 1)
+
+if(BUILD_PYTHON)
+ find_package(PythonInterp)
+ find_package(PythonLibs REQUIRED)
+
+ if (PYTHONLIBS_FOUND)
+ include_directories(${PYTHON_INCLUDE_DIRS})
+ add_library(_segyio SHARED python/segyio/_segyio.c ${SOURCE_FILES})
+
+ if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
+ set_target_properties(_segyio PROPERTIES LINK_FLAGS "-undefined dynamic_lookup")
+ else()
+ set_target_properties(_segyio PROPERTIES LINK_FLAGS "-Xlinker -export-dynamic")
+ endif()
+
+ set_target_properties(_segyio PROPERTIES PREFIX "" SUFFIX ".so")
+ install(TARGETS _segyio DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/python2.7/site-packages/segyio)
+
+ add_custom_command(TARGET _segyio POST_BUILD
+ COMMAND ${CMAKE_COMMAND} -E copy
+ ${CMAKE_BINARY_DIR}/_segyio.so ${CMAKE_BINARY_DIR}/python/segyio/_segyio.so)
+ endif()
+ add_subdirectory(python)
+endif()
+
install(TARGETS segyio-static segyio-shared DESTINATION ${CMAKE_INSTALL_PREFIX}/lib)
add_executable(segyinfo src/applications/segyinfo.c)
@@ -42,7 +72,7 @@ install(TARGETS segyinspect DESTINATION ${CMAKE_INSTALL_PREFIX}/bin)
if (BUILD_MEX)
add_subdirectory(mex)
- option(BUILD_MEX_TESTS "Build matlab mex tests" ON)
+ option(BUILD_MEX_TESTS "Build matlab mex tests" ON)
else (BUILD_MEX)
unset(MATLAB_MCC CACHE)
unset(MATLAB_MEX CACHE)
@@ -51,7 +81,6 @@ else (BUILD_MEX)
unset(BUILD_MEX_TESTS CACHE)
endif()
-add_subdirectory(python)
add_subdirectory(tests)
add_subdirectory(examples)
add_subdirectory(applications)
diff --git a/examples/make-file.py b/examples/make-file.py
index 8179b18..451de8f 100644
--- a/examples/make-file.py
+++ b/examples/make-file.py
@@ -33,7 +33,7 @@ def main():
trace = np.arange(start = start,
stop = start + step * spec.samples,
step = step,
- dtype = np.float32)
+ dtype = np.single)
# one inline is N traces concatenated. We fill in the xline number
line = np.concatenate([trace + (xl / 100.0) for xl in spec.xlines])
diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt
index adbf33a..69dade6 100644
--- a/python/CMakeLists.txt
+++ b/python/CMakeLists.txt
@@ -1,2 +1 @@
-add_subdirectory(cwrap)
add_subdirectory(segyio)
\ No newline at end of file
diff --git a/python/cwrap/CMakeLists.txt b/python/cwrap/CMakeLists.txt
deleted file mode 100644
index 4f70026..0000000
--- a/python/cwrap/CMakeLists.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-set(PYTHON_SOURCES
- __init__.py
- basecclass.py
- basecenum.py
- basecvalue.py
- metacwrap.py
- prototype.py
-)
-
-add_python_package(cwrap cwrap "${PYTHON_SOURCES}")
\ No newline at end of file
diff --git a/python/cwrap/__init__.py b/python/cwrap/__init__.py
deleted file mode 100644
index 788249c..0000000
--- a/python/cwrap/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from .metacwrap import MetaCWrap
-from .prototype import Prototype
-from .basecclass import BaseCClass
-from .basecenum import BaseCEnum
-from .basecvalue import BaseCValue
diff --git a/python/cwrap/basecclass.py b/python/cwrap/basecclass.py
deleted file mode 100644
index e2f9549..0000000
--- a/python/cwrap/basecclass.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import ctypes
-from cwrap import MetaCWrap
-
-
-class BaseCClass(object):
- __metaclass__ = MetaCWrap
-
- def __init__(self, c_pointer, parent=None, is_reference=False):
- if c_pointer == 0 or c_pointer is None:
- raise ValueError("Must have a valid (not null) pointer value!")
-
- if c_pointer < 0:
- raise ValueError("The pointer value is negative! This may be correct, but usually is not!")
-
- self.__c_pointer = c_pointer
- self.__parent = parent
- self.__is_reference = is_reference
-
- def __new__(cls, *more, **kwargs):
- obj = super(BaseCClass, cls).__new__(cls)
- obj.__c_pointer = None
- obj.__parent = None
- obj.__is_reference = False
-
- return obj
-
- @classmethod
- def from_param(cls, c_class_object):
- if c_class_object is not None and not isinstance(c_class_object, BaseCClass):
- raise ValueError("c_class_object must be a BaseCClass instance!")
-
- if c_class_object is None:
- return ctypes.c_void_p()
- else:
- return ctypes.c_void_p(c_class_object.__c_pointer)
-
- @classmethod
- def createPythonObject(cls, c_pointer):
- if c_pointer is not None:
- new_obj = cls.__new__(cls)
- BaseCClass.__init__(new_obj, c_pointer=c_pointer, parent=None, is_reference=False)
- return new_obj
- else:
- return None
-
- @classmethod
- def createCReference(cls, c_pointer, parent=None):
- if c_pointer is not None:
- new_obj = cls.__new__(cls)
- BaseCClass.__init__(new_obj, c_pointer=c_pointer, parent=parent, is_reference=True)
- return new_obj
- else:
- return None
-
- @classmethod
- def storageType(cls):
- return ctypes.c_void_p
-
- def convertToCReference(self, parent):
- self.__is_reference = True
- self.__parent = parent
-
-
- def setParent(self, parent=None):
- if self.__is_reference:
- self.__parent = parent
- else:
- raise UserWarning("Can only set parent on reference types!")
-
- return self
-
- def isReference(self):
- """ @rtype: bool """
- return self.__is_reference
-
- def parent(self):
- return self.__parent
-
- def __eq__(self, other):
- # This is the last resort comparison function; it will do a
- # plain pointer comparison on the underlying C object; or
- # Python is-same-object comparison.
- if isinstance(other, BaseCClass):
- return self.__c_pointer == other.__c_pointer
- else:
- return super(BaseCClass , self).__eq__(other)
-
-
- def free(self):
- raise NotImplementedError("A BaseCClass requires a free method implementation!")
-
-
-
- def __del__(self):
- if self.free is not None:
- if not self.__is_reference:
- # Important to check the c_pointer; in the case of failed object creation
- # we can have a Python object with c_pointer == None.
- if self.__c_pointer > 0:
- self.free()
diff --git a/python/cwrap/basecenum.py b/python/cwrap/basecenum.py
deleted file mode 100644
index 29178a4..0000000
--- a/python/cwrap/basecenum.py
+++ /dev/null
@@ -1,119 +0,0 @@
-from cwrap import MetaCWrap, Prototype
-
-class BaseCEnum(object):
- __metaclass__ = MetaCWrap
- enum_namespace = {}
-
- def __init__(self, *args, **kwargs):
- if not self in self.enum_namespace[self.__class__]:
- raise NotImplementedError("Can not be instantiated directly!")
-
- def __new__(cls, *args, **kwargs):
- if len(args) == 1:
- enum = cls.__resolveEnum(args[0])
-
- if enum is None:
- raise ValueError("Unknown enum value: %i" % args[0])
-
- return enum
- else:
- obj = super(BaseCEnum, cls).__new__(cls, *args)
- obj.name = None
- obj.value = None
- return obj
-
- @classmethod
- def from_param(cls, c_class_object):
- if not isinstance(c_class_object, BaseCEnum):
- raise ValueError("c_class_object must be an BaseCEnum instance!")
- return c_class_object.value
-
- @classmethod
- def addEnum(cls, name, value):
- if not isinstance(value, int):
- raise ValueError("Value must be an integer!")
-
- enum = cls.__new__(cls)
- enum.name = name
- enum.value = value
-
- setattr(cls, name, enum)
-
- if not cls.enum_namespace.has_key(cls):
- cls.enum_namespace[cls] = []
-
- cls.enum_namespace[cls].append(enum)
-
- @classmethod
- def enums(cls):
- return list(cls.enum_namespace[cls])
-
- def __eq__(self, other):
- if isinstance(other, self.__class__):
- return self.value == other.value
-
- if isinstance(other, int):
- return self.value == other
-
- return False
-
- def __str__(self):
- return self.name
-
- def __add__(self, other):
- self.__assertOtherIsSameType(other)
- value = self.value + other.value
- return self.__resolveOrCreateEnum(value)
-
- def __or__(self, other):
- self.__assertOtherIsSameType(other)
- value = self.value | other.value
- return self.__resolveOrCreateEnum(value)
-
-
- def __xor__(self, other):
- self.__assertOtherIsSameType(other)
- value = self.value ^ other.value
- return self.__resolveOrCreateEnum(value)
-
- def __and__(self, other):
- self.__assertOtherIsSameType(other)
- value = self.value & other.value
- return self.__resolveOrCreateEnum(value)
-
- def __int__(self):
- return self.value
-
- def __contains__(self, item):
- return self & item == item
-
- @classmethod
- def __createEnum(cls, value):
- enum = cls.__new__(cls)
- enum.name = "Unnamed '%s' enum with value: %i" % (str(cls.__name__), value)
- enum.value = value
- return enum
-
- @classmethod
- def __resolveOrCreateEnum(cls, value):
- enum = cls.__resolveEnum(value)
-
- if enum is not None:
- return enum
-
- return cls.__createEnum(value)
-
- @classmethod
- def __resolveEnum(cls, value):
- for enum in cls.enum_namespace[cls]:
- if enum.value == value:
- return enum
- return None
-
- def __assertOtherIsSameType(self, other):
- assert isinstance(other, self.__class__), "Can only operate on enums of same type: %s =! %s" % (
- self.__class__.__name__, other.__class__.__name__)
-
- @classmethod
- def registerEnum(cls, library, enum_name):
- Prototype.registerType(enum_name, cls)
diff --git a/python/cwrap/basecvalue.py b/python/cwrap/basecvalue.py
deleted file mode 100644
index e885ac0..0000000
--- a/python/cwrap/basecvalue.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from ctypes import pointer, c_long, c_int, c_bool, c_float, c_double, c_byte, c_short, c_char, c_ubyte, c_ushort, c_uint, c_ulong, c_size_t
-
-from cwrap import MetaCWrap
-
-
-class BaseCValue(object):
- __metaclass__ = MetaCWrap
- DATA_TYPE = None
- LEGAL_TYPES = [c_byte, c_ubyte, c_short, c_ushort, c_int, c_uint, c_long, c_ulong, c_bool, c_char, c_float, c_double, c_size_t]
-
- def __init__(self, value):
- super(BaseCValue, self).__init__()
-
- if not self.DATA_TYPE in self.LEGAL_TYPES:
- raise ValueError("DATA_TYPE must be one of these CTypes classes: %s" % BaseCValue.LEGAL_TYPES)
-
- self.__value = self.cast(value)
-
-
- def value(self):
- return self.__value.value
-
- @classmethod
- def storageType(cls):
- return cls.type()
-
- @classmethod
- def type(cls):
- return cls.DATA_TYPE
-
- @classmethod
- def cast(cls, value):
- return cls.DATA_TYPE(value)
-
- def setValue(self, value):
- self.__value = self.cast(value)
-
- def asPointer(self):
- return pointer(self.__value)
-
- @classmethod
- def from_param(cls, c_value_object):
- if c_value_object is not None and not isinstance(c_value_object, BaseCValue):
- raise ValueError("c_class_object must be a BaseCValue instance!")
-
- return c_value_object.__value
-
diff --git a/python/cwrap/metacwrap.py b/python/cwrap/metacwrap.py
deleted file mode 100644
index da31e66..0000000
--- a/python/cwrap/metacwrap.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import re
-from types import MethodType
-
-from cwrap.prototype import registerType, Prototype
-
-
-def snakeCase(name):
- s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
- return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
-
-
-class MetaCWrap(type):
- def __init__(cls, name, bases, attrs):
- super(MetaCWrap, cls).__init__(name, bases, attrs)
-
- is_return_type = False
- storage_type = None
-
- if "TYPE_NAME" in attrs:
- type_name = attrs["TYPE_NAME"]
- else:
- type_name = snakeCase(name)
-
- if hasattr(cls, "DATA_TYPE") or hasattr(cls, "enums"):
- is_return_type = True
-
- if hasattr(cls, "storageType"):
- storage_type = cls.storageType()
-
- registerType(type_name, cls, is_return_type=is_return_type, storage_type=storage_type)
-
- if hasattr(cls, "createCReference"):
- registerType("%s_ref" % type_name, cls.createCReference, is_return_type=True, storage_type=storage_type)
-
- if hasattr(cls, "createPythonObject"):
- registerType("%s_obj" % type_name, cls.createPythonObject, is_return_type=True, storage_type=storage_type)
-
-
- for key, attr in attrs.items():
- if isinstance(attr, Prototype):
- attr.resolve()
- attr.__name__ = key
-
- if attr.shouldBeBound():
- method = MethodType(attr, None, cls)
- setattr(cls, key, method)
diff --git a/python/cwrap/prototype.py b/python/cwrap/prototype.py
deleted file mode 100644
index 34aaf82..0000000
--- a/python/cwrap/prototype.py
+++ /dev/null
@@ -1,147 +0,0 @@
-import ctypes
-import inspect
-import re
-
-import sys
-
-class TypeDefinition(object):
- def __init__(self, type_class_or_function, is_return_type, storage_type):
- self.storage_type = storage_type
- self.is_return_type = is_return_type
- self.type_class_or_function = type_class_or_function
-
-
-REGISTERED_TYPES = {}
-""":type: dict[str,TypeDefinition]"""
-
-
-def registerType(type_name, type_class_or_function, is_return_type=True, storage_type=None):
- if type_name in REGISTERED_TYPES:
- raise PrototypeError("Type: '%s' already registered!" % type_name)
-
- REGISTERED_TYPES[type_name] = TypeDefinition(type_class_or_function, is_return_type, storage_type)
-
- # print("Registered: %s for class: %s" % (type_name, repr(type_class_or_function)))
-
-registerType("void", None)
-registerType("void*", ctypes.c_void_p)
-registerType("uint", ctypes.c_uint)
-registerType("uint*", ctypes.POINTER(ctypes.c_uint))
-registerType("int", ctypes.c_int)
-registerType("int*", ctypes.POINTER(ctypes.c_int))
-registerType("int64", ctypes.c_int64)
-registerType("int64*", ctypes.POINTER(ctypes.c_int64))
-registerType("size_t", ctypes.c_size_t)
-registerType("size_t*", ctypes.POINTER(ctypes.c_size_t))
-registerType("bool", ctypes.c_bool)
-registerType("bool*", ctypes.POINTER(ctypes.c_bool))
-registerType("long", ctypes.c_long)
-registerType("long*", ctypes.POINTER(ctypes.c_long))
-registerType("char", ctypes.c_char)
-registerType("char*", ctypes.c_char_p)
-registerType("char**", ctypes.POINTER(ctypes.c_char_p))
-registerType("float", ctypes.c_float)
-registerType("float*", ctypes.POINTER(ctypes.c_float))
-registerType("double", ctypes.c_double)
-registerType("double*", ctypes.POINTER(ctypes.c_double))
-registerType("py_object", ctypes.py_object)
-
-PROTOTYPE_PATTERN = "(?P<return>[a-zA-Z][a-zA-Z0-9_*]*) +(?P<function>[a-zA-Z]\w*) *[(](?P<arguments>[a-zA-Z0-9_*, ]*)[)]"
-
-class PrototypeError(Exception):
- pass
-
-
-class Prototype(object):
- pattern = re.compile(PROTOTYPE_PATTERN)
-
- def __init__(self, lib, prototype, bind=False):
- super(Prototype, self).__init__()
- self._lib = lib
- self._prototype = prototype
- self._bind = bind
- self._func = None
- self.__name__ = prototype
- self._resolved = False
-
-
- def _parseType(self, type_name):
- """Convert a prototype definition type from string to a ctypes legal type."""
- type_name = type_name.strip()
-
- if type_name in REGISTERED_TYPES:
- type_definition = REGISTERED_TYPES[type_name]
- return type_definition.type_class_or_function, type_definition.storage_type
- raise ValueError("Unknown type: %s" % type_name)
-
-
- def shouldBeBound(self):
- return self._bind
-
- def resolve(self):
- match = re.match(Prototype.pattern, self._prototype)
- if not match:
- raise PrototypeError("Illegal prototype definition: %s\n" % self._prototype)
- else:
- restype = match.groupdict()["return"]
- function_name = match.groupdict()["function"]
- self.__name__ = function_name
- arguments = match.groupdict()["arguments"].split(",")
-
- try:
- func = getattr(self._lib, function_name)
- except AttributeError:
- raise PrototypeError("Can not find function: %s in library: %s" % (function_name , self._lib))
-
- if not restype in REGISTERED_TYPES or not REGISTERED_TYPES[restype].is_return_type:
- sys.stderr.write("The type used as return type: %s is not registered as a return type.\n" % restype)
-
- return_type = self._parseType(restype)
-
- if inspect.isclass(return_type):
- sys.stderr.write(" Correct type may be: %s_ref or %s_obj.\n" % (restype, restype))
-
- return None
-
- return_type, storage_type = self._parseType(restype)
-
- func.restype = return_type
-
- if storage_type is not None:
- func.restype = storage_type
-
- def returnFunction(result, func, arguments):
- return return_type(result)
-
- func.errcheck = returnFunction
-
- if len(arguments) == 1 and arguments[0].strip() == "":
- func.argtypes = []
- else:
- argtypes = [self._parseType(arg)[0] for arg in arguments]
- if len(argtypes) == 1 and argtypes[0] is None:
- argtypes = []
- func.argtypes = argtypes
-
- self._func = func
-
-
- def __call__(self, *args):
- if not self._resolved:
- self.resolve()
- self._resolved = True
-
- if self._func is None:
- raise PrototypeError("Prototype has not been properly resolved!")
- return self._func(*args)
-
- def __repr__(self):
- bound = ""
- if self.shouldBeBound():
- bound = ", bind=True"
-
- return 'Prototype("%s"%s)' % (self._prototype, bound)
-
- @classmethod
- def registerType(cls, type_name, type_class_or_function, is_return_type=True, storage_type=None):
- registerType(type_name, type_class_or_function, is_return_type=is_return_type, storage_type=storage_type)
diff --git a/python/segyio/CMakeLists.txt b/python/segyio/CMakeLists.txt
index 7345f1a..c2a0671 100644
--- a/python/segyio/CMakeLists.txt
+++ b/python/segyio/CMakeLists.txt
@@ -1,5 +1,10 @@
set(PYTHON_SOURCES
__init__.py
+ _depth_plane.py
+ _header.py
+ _line.py
+ _field.py
+ _trace.py
segy.py
tracefield.py
binfield.py
diff --git a/python/segyio/__init__.py b/python/segyio/__init__.py
index b2996f0..55294d6 100644
--- a/python/segyio/__init__.py
+++ b/python/segyio/__init__.py
@@ -31,12 +31,52 @@ map to segy operations are written to behave similarly. That means that
sequences of data support list lookup, slicing (`f.trace[0:10:2]`), `for x in`
etc. Please refer to the individual mode's documentation for a more exhaustive
list with examples.
+
+For all slicing operations that segyio provides the underlying buffer is reused,
+so if you want to keep the data between iterations it is necessary to manually
+copy the data. Please refer to the examples. (e.g. numpy.copy())
"""
+
+class Enum(object):
+ def __init__(self, enum_value):
+ super(Enum, self).__init__()
+ self._value = int(enum_value)
+
+ def __int__(self):
+ return int(self._value)
+
+ def __str__(self):
+ for k, v in self.__class__.__dict__.items():
+ if isinstance(v, int) and self._value == v:
+ return k
+ return "Unknown Enum"
+
+ def __hash__(self):
+ return hash(self._value)
+
+ def __eq__(self, other):
+ try:
+ o = int(other)
+ except ValueError:
+ return super(Enum, self).__eq__(other)
+ else:
+ return self._value == o
+
+ @classmethod
+ def enums(cls):
+ result = []
+ for k, v in cls.__dict__.items():
+ if isinstance(v, int):
+ result.append(cls(v))
+
+ return sorted(result, key=int)
+
+
from .segysampleformat import SegySampleFormat
from .tracesortingformat import TraceSortingFormat
from .tracefield import TraceField
from .binfield import BinField
from .open import open
from .create import create
-from .segy import file, spec
+from .segy import SegyFile, spec
diff --git a/python/segyio/_depth_plane.py b/python/segyio/_depth_plane.py
new file mode 100644
index 0000000..cccd173
--- /dev/null
+++ b/python/segyio/_depth_plane.py
@@ -0,0 +1,18 @@
+class DepthPlane:
+
+ def __init__(self, samples, sorting, read_fn):
+ self.samples = samples
+ self.sorting = sorting
+ self.read_fn = read_fn
+
+ def __getitem__(self, depth):
+ if isinstance(depth, int):
+ return self.read_fn(self.sorting, depth)
+ raise TypeError("Expected an int as index")
+
+ def __len__(self):
+ return len(self.samples)
+
+ def __iter__(self):
+ for i in xrange(self.samples):
+ yield self[i]
diff --git a/python/segyio/_field.py b/python/segyio/_field.py
new file mode 100644
index 0000000..145d5fb
--- /dev/null
+++ b/python/segyio/_field.py
@@ -0,0 +1,81 @@
+import segyio
+from segyio import BinField
+from segyio import TraceField
+
+
+class Field:
+ def __init__(self, buf, write, field_type, traceno=None):
+ self.buf = buf
+ self.traceno = traceno
+ self._get_field = segyio._segyio.get_field
+ self._set_field = segyio._segyio.set_field
+ self._field_type = field_type
+ self._write = write
+
+ def __getitem__(self, field):
+
+ # add some structure so we can always iterate over fields
+ if isinstance(field, int) or isinstance(field, self._field_type):
+ field = [field]
+
+ d = {self._field_type(f): self._get_field(self.buf, f) for f in field}
+
+ # unpack the dictionary. if header[field] is requested, a
+ # plain, unstructed output is expected, but header[f1,f2,f3]
+ # yields a dict
+ if len(d) == 1:
+ return d.values()[0]
+
+ return d
+
+ def __setitem__(self, field, val):
+ self._set_field(self.buf, field, val)
+ self._write(self.buf, self.traceno)
+
+ def update(self, value):
+ buf = self.buf
+ if isinstance(value, dict):
+ for k, v in value.items():
+ self._set_field(buf, int(k), v)
+ else:
+ buf = value.buf
+
+ self._write(buf, self.traceno)
+
+ @classmethod
+ def binary(cls, segy):
+ try:
+ buf = segyio._segyio.read_binaryheader(segy.xfd)
+ except IOError:
+ # the file was probably newly created and the binary header hasn't
+ # been written yet. if this is the case we want to try and write
+ # it. if the file was broken, permissions were wrong etc writing
+ # will fail too
+ buf = segyio._segyio.empty_binaryheader()
+
+ def wr(buf, *_):
+ segyio._segyio.write_binaryheader(segy.xfd, buf)
+
+ return Field(buf, write=wr, field_type=BinField)
+
+ @classmethod
+ def trace(cls, buf, traceno, segy):
+ if traceno >= segy.tracecount:
+ raise IndexError("Trace number out of range: 0 <= %d < %d" % (traceno, segy.tracecount))
+
+ if buf is None:
+ buf = segyio._segyio.empty_traceheader()
+
+ try:
+ segyio._segyio.read_traceheader(segy.xfd, traceno, buf, segy._tr0, segy._bsz)
+ except IOError:
+ # the file was probably newly created and the trace header hasn't
+ # been written yet. if this is the case we want to try and write
+ # it. if the file was broken, permissions were wrong etc writing
+ # will fail too
+ pass
+
+ def wr(buf, traceno):
+ segyio._segyio.write_traceheader(segy.xfd, traceno, buf, segy._tr0, segy._bsz)
+
+ return Field(buf, traceno=traceno, write=wr, field_type=TraceField)
\ No newline at end of file
diff --git a/python/segyio/_header.py b/python/segyio/_header.py
new file mode 100644
index 0000000..a773dfd
--- /dev/null
+++ b/python/segyio/_header.py
@@ -0,0 +1,117 @@
+import itertools
+
+import segyio
+from segyio._line import Line
+from segyio._field import Field
+
+
+class Header(object):
+ def __init__(self, segy):
+ self.segy = segy
+
+ @staticmethod
+ def _header_buffer(buf=None):
+ if buf is None:
+ buf = segyio._segyio.empty_traceheader()
+ return buf
+
+ def __getitem__(self, traceno, buf=None):
+ if isinstance(traceno, tuple):
+ return self.__getitem__(traceno[0], traceno[1])
+
+ if isinstance(traceno, slice):
+ gen_buf = self._header_buffer(buf)
+
+ def gen():
+ for i in xrange(*traceno.indices(self.segy.tracecount)):
+ yield self.__getitem__(i, gen_buf)
+
+ return gen()
+
+ return Field.trace(buf, traceno=traceno, segy=self.segy)
+
+ def __setitem__(self, traceno, val):
+ buf = None
+
+ # library-provided loops can re-use a buffer for the lookup, even in
+ # __setitem__, so we might need to unpack the tuple to reuse the buffer
+ if isinstance(traceno, tuple):
+ buf = traceno[1]
+ traceno = traceno[0]
+
+ self.__getitem__(traceno, buf).update(val)
+
+ def __iter__(self):
+ return self[:]
+
+ def readfn(self, t0, length, stride, buf):
+ def gen():
+ start = t0
+ stop = t0 + (length * stride)
+ for i in xrange(start, stop, stride):
+ yield Field.trace(buf, traceno=i, segy=self.segy)
+
+ return gen()
+
+ def writefn(self, t0, length, stride, val):
+ start = t0
+ stop = t0 + (length * stride)
+
+ if isinstance(val, Field) or isinstance(val, dict):
+ val = itertools.repeat(val)
+
+ for i, x in itertools.izip(xrange(start, stop, stride), val):
+ self[i] = x
+
+ @property
+ def iline(self):
+ """:rtype: Line"""
+ segy = self.segy
+ length = segy._iline_length
+ stride = segy._iline_stride
+ lines = segy.ilines
+ other_lines = segy.xlines
+ buffn = self._header_buffer
+
+ return Line(segy, length, stride, lines, other_lines, buffn, self.readfn, self.writefn, "Inline")
+
+ @iline.setter
+ def iline(self, value):
+ """Write iterables to lines
+
+ Examples:
+ Supports writing to *all* crosslines via assignment, regardless of
+ data source and format. Will respect the sample size and structure
+ of the file being assigned to, so if the argument traces are longer
+ than that of the file being written to the surplus data will be
+ ignored. Uses same rules for writing as `f.iline[i] = x`.
+ """
+ for i, src in itertools.izip(self.segy.ilines, value):
+ self.iline[i] = src
+
+ @property
+ def xline(self):
+ """:rtype: Line"""
+ segy = self.segy
+ length = segy._xline_length
+ stride = segy._xline_stride
+ lines = segy.xlines
+ other_lines = segy.ilines
+ buffn = self._header_buffer
+
+ return Line(segy, length, stride, lines, other_lines, buffn, self.readfn, self.writefn, "Crossline")
+
+ @xline.setter
+ def xline(self, value):
+ """Write iterables to lines
+
+ Examples:
+ Supports writing to *all* crosslines via assignment, regardless of
+ data source and format. Will respect the sample size and structure
+ of the file being assigned to, so if the argument traces are longer
+ than that of the file being written to the surplus data will be
+ ignored. Uses same rules for writing as `f.xline[i] = x`.
+ """
+
+ for i, src in itertools.izip(self.segy.xlines, value):
+ self.xline[i] = src
diff --git a/python/segyio/_line.py b/python/segyio/_line.py
new file mode 100644
index 0000000..b20105e
--- /dev/null
+++ b/python/segyio/_line.py
@@ -0,0 +1,90 @@
+import itertools
+
+import segyio
+
+
+class Line:
+ """ Line mode for traces and trace headers. Internal.
+
+ The _line class provides an interface for line-oriented operations. The
+ line reading operations themselves are not streaming - it's assumed than
+ when a line is queried it's somewhat limited in size and will comfortably
+ fit in memory, and that the full line is interesting. This also applies to
+ line headers; however, all returned values support the iterable protocol so
+ they work fine together with the streaming bits of this library.
+
+ _line should not be instantiated directly by users, but rather returned
+ from the iline/xline properties of file or from the header mode. Any
+ direct construction of this should be considered an error.
+ """
+
+ def __init__(self, segy, length, stride, lines, other_lines, buffn, readfn, writefn, name):
+ self.segy = segy
+ self.len = length
+ self.stride = stride
+ self.lines = lines
+ self.other_lines = other_lines
+ self.name = name
+ self.buffn = buffn
+ self.readfn = readfn
+ self.writefn = writefn
+
+ def __getitem__(self, lineno, buf=None):
+ """ :rtype: numpy.ndarray|collections.Iterable[numpy.ndarray]"""
+ if isinstance(lineno, tuple):
+ return self.__getitem__(lineno[0], lineno[1])
+
+ buf = self.buffn(buf)
+
+ if isinstance(lineno, slice):
+ # in order to support [:end] syntax, we must make sure
+ # start has a non-None value. lineno.indices() would set it
+ # to 0, but we don't know if that's a reasonable value or
+ # not. If start is None we set it to the first line
+ if lineno.start is None:
+ lineno = slice(self.lines[0], lineno.stop, lineno.step)
+
+ def gen():
+ s = set(self.lines)
+ rng = xrange(*lineno.indices(self.lines[-1] + 1))
+
+ # use __getitem__ lookup to avoid tuple
+ # construction and unpacking and fast-forward
+ # into the interesting code path
+ for i in itertools.ifilter(s.__contains__, rng):
+ yield self.__getitem__(i, buf)
+
+ return gen()
+
+ else:
+ try:
+ lineno = int(lineno)
+ except TypeError:
+ raise TypeError("Must be int or slice")
+ else:
+ t0 = segyio._segyio.fread_trace0(lineno, len(self.other_lines), self.stride, self.lines, self.name)
+ return self.readfn(t0, self.len, self.stride, buf)
+
+ def __setitem__(self, lineno, val):
+ if isinstance(lineno, slice):
+ if lineno.start is None:
+ lineno = slice(self.lines[0], lineno.stop, lineno.step)
+
+ rng = xrange(*lineno.indices(self.lines[-1] + 1))
+ s = set(self.lines)
+
+ for i, x in itertools.izip(filter(s.__contains__, rng), val):
+ self.__setitem__(i, x)
+
+ return
+
+ t0 = segyio._segyio.fread_trace0(lineno, len(self.other_lines), self.stride, self.lines, self.name)
+ self.writefn(t0, self.len, self.stride, val)
+
+ def __len__(self):
+ return len(self.lines)
+
+ def __iter__(self):
+ buf = self.buffn()
+ for i in self.lines:
+ yield self.__getitem__(i, buf)
diff --git a/python/segyio/_segyio.c b/python/segyio/_segyio.c
new file mode 100644
index 0000000..43304a0
--- /dev/null
+++ b/python/segyio/_segyio.c
@@ -0,0 +1,847 @@
+#include <Python.h>
+#include "segyio/segy.h"
+#include <assert.h>
+
+// --------------- FILE Handling ------------
+static FILE *get_FILE_pointer_from_capsule(PyObject *capsule) {
+ if (!PyCapsule_IsValid(capsule, "FILE*")) {
+ PyErr_SetString(PyExc_TypeError, "The object was not of type FILE");
+ return NULL;
+ }
+
+ if(PyCapsule_GetDestructor(capsule) == NULL) {
+ PyErr_SetString(PyExc_IOError, "The file has already been closed");
+ return NULL;
+ }
+
+ FILE *p_FILE = PyCapsule_GetPointer(capsule, "FILE*");
+
+ if (!p_FILE) {
+ PyErr_SetString(PyExc_ValueError, "File Handle is NULL");
+ return NULL;
+ }
+ return p_FILE;
+}
+
+static void *py_FILE_destructor(PyObject *capsule) {
+#ifndef NDEBUG
+ fputs("FILE* destructed before calling close()\n", stderr);
+#endif
+ return NULL;
+}
+
+static PyObject *py_FILE_open(PyObject *self, PyObject *args) {
+ char *filename = NULL;
+ char *mode = NULL;
+ PyArg_ParseTuple(args, "ss", &filename, &mode);
+
+ FILE *p_FILE = fopen(filename, mode);
+
+ if (p_FILE == NULL) {
+ return PyErr_SetFromErrnoWithFilename(PyExc_IOError, filename);
+ }
+ return PyCapsule_New(p_FILE, "FILE*", (PyCapsule_Destructor) py_FILE_destructor);
+}
+
+static PyObject *py_FILE_close(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *file_capsule = NULL;
+ PyArg_ParseTuple(args, "O", &file_capsule);
+
+ FILE *p_FILE = get_FILE_pointer_from_capsule(file_capsule);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ fclose(p_FILE);
+
+ if (errno != 0) {
+ return PyErr_SetFromErrno(PyExc_IOError);
+ }
+
+ PyCapsule_SetDestructor(file_capsule, NULL);
+
+ return Py_BuildValue("");
+}
+
+static PyObject *py_FILE_flush(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *file_capsule = NULL;
+ PyArg_ParseTuple(args, "O", &file_capsule);
+
+ FILE *p_FILE = NULL;
+ if(file_capsule != Py_None) {
+ p_FILE = get_FILE_pointer_from_capsule(file_capsule);
+ }
+
+ fflush(p_FILE);
+
+ if (errno != 0) {
+ return PyErr_SetFromErrno(PyExc_IOError);
+ }
+
+ return Py_BuildValue("");
+}
+
+
+// ------------- ERROR Handling -------------
+struct error_args {
+ int error;
+ int errno_err;
+ int field_1;
+ int field_2;
+ int field_count;
+ const char *name;
+};
+
+static PyObject *py_handle_segy_error_(struct error_args args) {
+ switch (args.error) {
+ case SEGY_TRACE_SIZE_MISMATCH:
+ return PyErr_Format(PyExc_RuntimeError,
+ "Number of traces is not consistent with file size. File may be corrupt.");
+
+ case SEGY_INVALID_FIELD:
+ if (args.field_count == 1) {
+ return PyErr_Format(PyExc_IndexError, "Field value out of range: %d", args.field_1);
+ } else {
+ int inline_field = args.field_1;
+ int crossline_field = args.field_2;
+ return PyErr_Format(PyExc_IndexError, "Invalid inline (%d) or crossline (%d) field/byte offset. "
+ "Too large or between valid byte offsets.", inline_field, crossline_field);
+ }
+ case SEGY_INVALID_OFFSETS:
+ return PyErr_Format(PyExc_RuntimeError, "Found more offsets than traces. File may be corrupt.");
+
+ case SEGY_INVALID_SORTING:
+ return PyErr_Format(PyExc_RuntimeError, "Unable to determine sorting. File may be corrupt.");
+
+ case SEGY_INVALID_ARGS:
+ return PyErr_Format(PyExc_RuntimeError, "Input arguments are invalid.");
+
+ case SEGY_MISSING_LINE_INDEX:
+ return PyErr_Format(PyExc_KeyError, "%s number %d does not exist.", args.name, args.field_1);
+
+ default:
+ errno = args.errno_err;
+ return PyErr_SetFromErrno(PyExc_IOError);
+ }
+}
+
+static PyObject *py_handle_segy_error(int error, int errno_err) {
+ struct error_args args;
+ args.error = error;
+ args.errno_err = errno_err;
+ args.field_1 = 0;
+ args.field_2 = 0;
+ args.field_count = 0;
+ args.name = "";
+ return py_handle_segy_error_(args);
+}
+
+static PyObject *py_handle_segy_error_with_fields(int error, int errno_err, int field_1, int field_2, int field_count) {
+ struct error_args args;
+ args.error = error;
+ args.errno_err = errno_err;
+ args.field_1 = field_1;
+ args.field_2 = field_2;
+ args.field_count = field_count;
+ args.name = "";
+ return py_handle_segy_error_(args);
+}
+
+static PyObject *py_handle_segy_error_with_index_and_name(int error, int errno_err, int index, const char *name) {
+ struct error_args args;
+ args.error = error;
+ args.errno_err = errno_err;
+ args.field_1 = index;
+ args.field_2 = 0;
+ args.field_count = 1;
+ args.name = name;
+ return py_handle_segy_error_(args);
+}
+
+// ------------ Text Header -------------
+
+static PyObject *py_textheader_size(PyObject *self) {
+ return Py_BuildValue("i", segy_textheader_size());
+}
+
+static PyObject *py_read_texthdr(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *file_capsule = NULL;
+ int index;
+
+ PyArg_ParseTuple(args, "Oi", &file_capsule, &index);
+
+ FILE *p_FILE = get_FILE_pointer_from_capsule(file_capsule);
+
+ char *buffer = malloc(sizeof(char) * (segy_textheader_size()));
+
+ int error = segy_read_textheader(p_FILE, buffer);
+
+ if (error != 0) {
+ free(buffer);
+ return PyErr_Format(PyExc_Exception, "Could not read text header: %s", strerror(errno));
+ }
+
+ PyObject *result = Py_BuildValue("s", buffer);
+ free(buffer);
+ return result;
+}
+
+static PyObject *py_write_texthdr(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *file_capsule = NULL;
+ unsigned int index;
+ char *buffer;
+ int size;
+
+ PyArg_ParseTuple(args, "Ois#", &file_capsule, &index, &buffer, &size);
+
+ if (size < segy_textheader_size() - 1) {
+ return PyErr_Format(PyExc_ValueError, "String must have at least 3200 characters. Received count: %d", size);
+ }
+
+ FILE *p_FILE = get_FILE_pointer_from_capsule(file_capsule);
+
+ int error = segy_write_textheader(p_FILE, index, buffer);
+
+ if (error == 0) {
+ return Py_BuildValue("");
+ } else {
+ return py_handle_segy_error(error, errno);
+ }
+}
+
+// ------------ Binary and Trace Header ------------
+static char *get_header_pointer_from_capsule(PyObject *capsule, unsigned int *length) {
+ if (PyCapsule_IsValid(capsule, "BinaryHeader=char*")) {
+ if (length) {
+ *length = segy_binheader_size();
+ }
+ return PyCapsule_GetPointer(capsule, "BinaryHeader=char*");
+
+ } else if (PyCapsule_IsValid(capsule, "TraceHeader=char*")) {
+ if (length) {
+ *length = SEGY_TRACE_HEADER_SIZE;
+ }
+ return PyCapsule_GetPointer(capsule, "TraceHeader=char*");
+ }
+ PyErr_SetString(PyExc_TypeError, "The object was not a header type");
+ return NULL;
+}
+
+
+static PyObject *py_get_field(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *header_capsule = NULL;
+ int field;
+
+ PyArg_ParseTuple(args, "Oi", &header_capsule, &field);
+
+ unsigned int length;
+ char *header = get_header_pointer_from_capsule(header_capsule, &length);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ int value;
+ int error;
+ if (length == segy_binheader_size()) {
+ error = segy_get_bfield(header, field, &value);
+ } else {
+ error = segy_get_field(header, field, &value);
+ }
+
+ if (error == 0) {
+ return Py_BuildValue("i", value);
+ } else {
+ return py_handle_segy_error_with_fields(error, errno, field, 0, 1);
+ }
+}
+
+static PyObject *py_set_field(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *header_capsule = NULL;
+ int field;
+ int value;
+
+ PyArg_ParseTuple(args, "Oii", &header_capsule, &field, &value);
+
+ unsigned int length;
+ char *header = get_header_pointer_from_capsule(header_capsule, &length);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ int error;
+ if (length == segy_binheader_size()) {
+ error = segy_set_bfield(header, field, value);
+ } else {
+ error = segy_set_field(header, field, value);
+ }
+
+ if (error == 0) {
+ return Py_BuildValue("");
+ } else {
+ return py_handle_segy_error_with_fields(error, errno, field, 0, 1);
+ }
+}
+
+// ------------ Binary Header -------------
+static PyObject *py_binheader_size(PyObject *self) {
+ return Py_BuildValue("i", segy_binheader_size());
+}
+
+static void *py_binary_header_destructor(PyObject *capsule) {
+ char *binary_header = get_header_pointer_from_capsule(capsule, NULL);
+ free(binary_header);
+ return NULL;
+}
+
+static PyObject *py_empty_binaryhdr(PyObject *self) {
+ errno = 0;
+ char *buffer = calloc(segy_binheader_size(), sizeof(char));
+ return PyCapsule_New(buffer, "BinaryHeader=char*", (PyCapsule_Destructor) py_binary_header_destructor);
+}
+
+static PyObject *py_read_binaryhdr(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *file_capsule = NULL;
+
+ PyArg_ParseTuple(args, "O", &file_capsule);
+
+ FILE *p_FILE = get_FILE_pointer_from_capsule(file_capsule);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ char *buffer = malloc(sizeof(char) * (segy_binheader_size()));
+
+ int error = segy_binheader(p_FILE, buffer);
+
+ if (error == 0) {
+ return PyCapsule_New(buffer, "BinaryHeader=char*", (PyCapsule_Destructor) py_binary_header_destructor);
+ } else {
+ free(buffer);
+ return py_handle_segy_error(error, errno);
+ }
+}
+
+static PyObject *py_write_binaryhdr(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *file_capsule = NULL;
+ PyObject *binary_header_capsule = NULL;
+
+ PyArg_ParseTuple(args, "OO", &file_capsule, &binary_header_capsule);
+
+ FILE *p_FILE = get_FILE_pointer_from_capsule(file_capsule);
+ char *binary_header = get_header_pointer_from_capsule(binary_header_capsule, NULL);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ int error = segy_write_binheader(p_FILE, binary_header);
+
+ if (error == 0) {
+ return Py_BuildValue("");
+ } else {
+ return py_handle_segy_error(error, errno);
+ }
+}
+
+// -------------- Trace Headers ----------
+static char *get_trace_header_pointer_from_capsule(PyObject *capsule) {
+ if (!PyCapsule_IsValid(capsule, "TraceHeader=char*")) {
+ PyErr_Format(PyExc_TypeError, "The object was not of type TraceHeader.");
+ return NULL;
+ }
+ return PyCapsule_GetPointer(capsule, "TraceHeader=char*");
+}
+
+static void *py_trace_header_destructor(PyObject *capsule) {
+ char *trace_header = get_trace_header_pointer_from_capsule(capsule);
+ free(trace_header);
+ return NULL;
+}
+
+static PyObject *py_empty_trace_header(PyObject *self) {
+ errno = 0;
+ char *buffer = calloc(SEGY_TRACE_HEADER_SIZE, sizeof(char));
+ return PyCapsule_New(buffer, "TraceHeader=char*", (PyCapsule_Destructor) py_trace_header_destructor);
+}
+
+static PyObject *py_read_trace_header(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *file_capsule = NULL;
+ unsigned int traceno;
+ PyObject *trace_header_capsule = NULL;
+ long trace0;
+ unsigned int trace_bsize;
+
+ PyArg_ParseTuple(args, "OIOlI", &file_capsule, &traceno, &trace_header_capsule, &trace0, &trace_bsize);
+
+ FILE *p_FILE = get_FILE_pointer_from_capsule(file_capsule);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ char *buffer = get_trace_header_pointer_from_capsule(trace_header_capsule);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ int error = segy_traceheader(p_FILE, traceno, buffer, trace0, trace_bsize);
+
+ if (error == 0) {
+ Py_IncRef(trace_header_capsule);
+ return trace_header_capsule;
+ } else {
+ return py_handle_segy_error(error, errno);
+ }
+}
+
+static PyObject *py_write_trace_header(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *file_capsule = NULL;
+ unsigned int traceno;
+ PyObject *trace_header_capsule = NULL;
+ long trace0;
+ unsigned int trace_bsize;
+
+ PyArg_ParseTuple(args, "OIOlI", &file_capsule, &traceno, &trace_header_capsule, &trace0, &trace_bsize);
+
+ FILE *p_FILE = get_FILE_pointer_from_capsule(file_capsule);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ char *buffer = get_trace_header_pointer_from_capsule(trace_header_capsule);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ int error = segy_write_traceheader(p_FILE, traceno, buffer, trace0, trace_bsize);
+
+ if (error == 0) {
+ return Py_BuildValue("");
+ } else {
+ return py_handle_segy_error(error, errno);
+ }
+}
+
+static PyObject *py_trace_bsize(PyObject *self, PyObject *args) {
+ errno = 0;
+ unsigned int sample_count;
+
+ PyArg_ParseTuple(args, "I", &sample_count);
+
+ unsigned int byte_count = segy_trace_bsize(sample_count);
+
+ return Py_BuildValue("I", byte_count);
+}
+
+
+static PyObject *py_init_line_metrics(PyObject *self, PyObject *args) {
+ errno = 0;
+ SEGY_SORTING sorting;
+ unsigned int trace_count;
+ unsigned int inline_count;
+ unsigned int crossline_count;
+ unsigned int offset_count;
+
+ PyArg_ParseTuple(args, "iIIII", &sorting, &trace_count, &inline_count, &crossline_count, &offset_count);
+
+ unsigned int iline_length;
+ int error = segy_inline_length(sorting, trace_count, crossline_count, offset_count, &iline_length);
+
+ //Only check first call since the only error that can occur is SEGY_INVALID_SORTING
+ if (error != 0) {
+ return py_handle_segy_error(error, errno);
+ }
+
+ unsigned int xline_length;
+ segy_crossline_length(sorting, trace_count, inline_count, offset_count, &xline_length);
+
+ unsigned int iline_stride;
+ segy_inline_stride(sorting, inline_count, &iline_stride);
+
+ unsigned int xline_stride;
+ segy_crossline_stride(sorting, crossline_count, &xline_stride);
+
+ PyObject *dict = PyDict_New();
+ PyDict_SetItemString(dict, "xline_length", Py_BuildValue("I", xline_length));
+ PyDict_SetItemString(dict, "xline_stride", Py_BuildValue("I", xline_stride));
+ PyDict_SetItemString(dict, "iline_length", Py_BuildValue("I", iline_length));
+ PyDict_SetItemString(dict, "iline_stride", Py_BuildValue("I", iline_stride));
+
+ return Py_BuildValue("O", dict);
+}
+
+
+static PyObject *py_init_metrics(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *file_capsule = NULL;
+ PyObject *binary_header_capsule = NULL;
+ int il_field;
+ int xl_field;
+
+ PyArg_ParseTuple(args, "OOii", &file_capsule, &binary_header_capsule, &il_field, &xl_field);
+
+ FILE *p_FILE = get_FILE_pointer_from_capsule(file_capsule);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ char *binary_header = get_header_pointer_from_capsule(binary_header_capsule, NULL);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ long trace0 = segy_trace0(binary_header);
+ unsigned int sample_count = segy_samples(binary_header);
+ int format = segy_format(binary_header);
+ unsigned int trace_bsize = segy_trace_bsize(sample_count);
+
+ int sorting;
+ int error = segy_sorting(p_FILE, il_field, xl_field, &sorting, trace0, trace_bsize);
+
+ if (error != 0) {
+ return py_handle_segy_error_with_fields(error, errno, il_field, xl_field, 2);
+ }
+
+ size_t trace_count;
+ error = segy_traces(p_FILE, &trace_count, trace0, trace_bsize);
+
+ if (error != 0) {
+ return py_handle_segy_error(error, errno);
+ }
+
+ unsigned int offset_count;
+ error = segy_offsets(p_FILE, il_field, xl_field, (unsigned int) trace_count, &offset_count, trace0, trace_bsize);
+
+ if (error != 0) {
+ return py_handle_segy_error_with_fields(error, errno, il_field, xl_field, 2);
+ }
+
+ int field;
+ unsigned int xl_count;
+ unsigned int il_count;
+ unsigned int *l1out;
+ unsigned int *l2out;
+
+ if (sorting == CROSSLINE_SORTING) {
+ field = il_field;
+ l1out = &xl_count;
+ l2out = &il_count;
+ } else if (sorting == INLINE_SORTING) {
+ field = xl_field;
+ l1out = &il_count;
+ l2out = &xl_count;
+ } else {
+ return PyErr_Format(PyExc_RuntimeError, "Unable to determine sorting. File may be corrupt.");
+ }
+
+ error = segy_count_lines(p_FILE, field, offset_count, l1out, l2out, trace0, trace_bsize);
+
+ if (error != 0) {
+ return py_handle_segy_error_with_fields(error, errno, il_field, xl_field, 2);
+ }
+
+ PyObject *dict = PyDict_New();
+ PyDict_SetItemString(dict, "iline_field", Py_BuildValue("i", il_field));
+ PyDict_SetItemString(dict, "xline_field", Py_BuildValue("i", xl_field));
+ PyDict_SetItemString(dict, "trace0", Py_BuildValue("l", trace0));
+ PyDict_SetItemString(dict, "sample_count", Py_BuildValue("I", sample_count));
+ PyDict_SetItemString(dict, "format", Py_BuildValue("i", format));
+ PyDict_SetItemString(dict, "trace_bsize", Py_BuildValue("I", trace_bsize));
+ PyDict_SetItemString(dict, "sorting", Py_BuildValue("i", sorting));
+ PyDict_SetItemString(dict, "trace_count", Py_BuildValue("k", trace_count));
+ PyDict_SetItemString(dict, "offset_count", Py_BuildValue("I", offset_count));
+ PyDict_SetItemString(dict, "iline_count", Py_BuildValue("I", il_count));
+ PyDict_SetItemString(dict, "xline_count", Py_BuildValue("I", xl_count));
+
+ return Py_BuildValue("O", dict);
+}
+
+static Py_buffer check_and_get_buffer(PyObject *object, const char *name, unsigned int expected) {
+ Py_buffer buffer;
+ if (!PyObject_CheckBuffer(object)) {
+ PyErr_Format(PyExc_TypeError, "The destination for %s is not a buffer object", name);
+ return buffer;
+ }
+ PyObject_GetBuffer(object, &buffer, PyBUF_FORMAT | PyBUF_C_CONTIGUOUS | PyBUF_WRITEABLE);
+
+ if (strcmp(buffer.format, "I") != 0) {
+ PyErr_Format(PyExc_TypeError, "The destination for %s is not a buffer object of type 'uintc'", name);
+ PyBuffer_Release(&buffer);
+ return buffer;
+ }
+
+ if (buffer.len < expected * sizeof(unsigned int)) {
+ PyErr_Format(PyExc_ValueError, "The destination for %s is too small. ", name);
+ PyBuffer_Release(&buffer);
+ return buffer;
+ }
+
+ return buffer;
+}
+
+
+static PyObject *py_init_line_indices(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *file_capsule = NULL;
+ PyObject *metrics = NULL;
+ PyObject *iline_out = NULL;
+ PyObject *xline_out = NULL;
+
+ PyArg_ParseTuple(args, "OOOO", &file_capsule, &metrics, &iline_out, &xline_out);
+
+ FILE *p_FILE = get_FILE_pointer_from_capsule(file_capsule);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ if (!PyDict_Check(metrics)) {
+ PyErr_SetString(PyExc_TypeError, "metrics is not a dictionary!");
+ return NULL;
+ }
+
+ unsigned int iline_count;
+ unsigned int xline_count;
+ PyArg_Parse(PyDict_GetItemString(metrics, "iline_count"), "I", &iline_count);
+ PyArg_Parse(PyDict_GetItemString(metrics, "xline_count"), "I", &xline_count);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ Py_buffer iline_buffer = check_and_get_buffer(iline_out, "inline", iline_count);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ Py_buffer xline_buffer = check_and_get_buffer(xline_out, "crossline", xline_count);
+
+ if (PyErr_Occurred()) {
+ PyBuffer_Release(&iline_buffer);
+ return NULL;
+ }
+
+ int il_field;
+ int xl_field;
+ int sorting;
+ unsigned int offset_count;
+ long trace0;
+ unsigned int trace_bsize;
+
+ PyArg_Parse(PyDict_GetItemString(metrics, "iline_field"), "i", &il_field);
+ PyArg_Parse(PyDict_GetItemString(metrics, "xline_field"), "i", &xl_field);
+ PyArg_Parse(PyDict_GetItemString(metrics, "sorting"), "i", &sorting);
+ PyArg_Parse(PyDict_GetItemString(metrics, "offset_count"), "I", &offset_count);
+ PyArg_Parse(PyDict_GetItemString(metrics, "trace0"), "l", &trace0);
+ PyArg_Parse(PyDict_GetItemString(metrics, "trace_bsize"), "I", &trace_bsize);
+
+ int error = segy_inline_indices(p_FILE, il_field, sorting, iline_count, xline_count, offset_count, iline_buffer.buf,
+ trace0, trace_bsize);
+
+ if (error != 0) {
+ py_handle_segy_error_with_fields(error, errno, il_field, xl_field, 2);
+ }
+
+ error = segy_crossline_indices(p_FILE, xl_field, sorting, iline_count, xline_count, offset_count, xline_buffer.buf,
+ trace0, trace_bsize);
+
+ if (error != 0) {
+ py_handle_segy_error_with_fields(error, errno, il_field, xl_field, 2);
+ }
+
+ PyBuffer_Release(&xline_buffer);
+ PyBuffer_Release(&iline_buffer);
+ return Py_BuildValue("");
+}
+
+
+static PyObject *py_fread_trace0(PyObject *self, PyObject *args) {
+ errno = 0;
+ unsigned int lineno;
+ unsigned int other_line_length;
+ unsigned int stride;
+ PyObject *indices_object;
+ char *type_name;
+
+ PyArg_ParseTuple(args, "IIIOs", &lineno, &other_line_length, &stride, &indices_object, &type_name);
+
+ Py_buffer buffer;
+ if (!PyObject_CheckBuffer(indices_object)) {
+ PyErr_Format(PyExc_TypeError, "The destination for %s is not a buffer object", type_name);
+ return NULL;
+ }
+ PyObject_GetBuffer(indices_object, &buffer, PyBUF_FORMAT | PyBUF_C_CONTIGUOUS);
+
+ unsigned int trace_no;
+ unsigned int linenos_sz = (unsigned int) PyObject_Length(indices_object);
+ int error = segy_line_trace0(lineno, other_line_length, stride, buffer.buf, linenos_sz, &trace_no);
+
+ if (error != 0) {
+ return py_handle_segy_error_with_index_and_name(error, errno, lineno, type_name);
+ }
+
+ return Py_BuildValue("I", trace_no);
+}
+
+static PyObject *py_read_trace(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *file_capsule = NULL;
+ unsigned int trace_no;
+ PyObject *buffer_out;
+ long trace0;
+ unsigned int trace_bsize;
+ int format;
+ unsigned int samples;
+
+ PyArg_ParseTuple(args, "OIOlIiI", &file_capsule, &trace_no, &buffer_out, &trace0, &trace_bsize, &format, &samples);
+
+ FILE *p_FILE = get_FILE_pointer_from_capsule(file_capsule);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ if (!PyObject_CheckBuffer(buffer_out)) {
+ PyErr_SetString(PyExc_TypeError, "The destination buffer is not of the correct type.");
+ return NULL;
+ }
+ Py_buffer buffer;
+ PyObject_GetBuffer(buffer_out, &buffer, PyBUF_FORMAT | PyBUF_C_CONTIGUOUS | PyBUF_WRITEABLE);
+
+ int error = segy_readtrace(p_FILE, trace_no, buffer.buf, trace0, trace_bsize);
+
+ if (error != 0) {
+ return py_handle_segy_error_with_index_and_name(error, errno, trace_no, "Trace");
+ }
+
+ error = segy_to_native(format, samples, buffer.buf);
+
+ if (error != 0) {
+ PyErr_SetString(PyExc_TypeError, "Unable to convert buffer to native format.");
+ return NULL;
+ }
+
+ Py_IncRef(buffer_out);
+ return buffer_out;
+}
+
+static PyObject *py_write_trace(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *file_capsule = NULL;
+ unsigned int trace_no;
+ PyObject *buffer_in;
+ long trace0;
+ unsigned int trace_bsize;
+ int format;
+ unsigned int samples;
+
+ PyArg_ParseTuple(args, "OIOlIiI", &file_capsule, &trace_no, &buffer_in, &trace0, &trace_bsize, &format, &samples);
+
+ FILE *p_FILE = get_FILE_pointer_from_capsule(file_capsule);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ if (!PyObject_CheckBuffer(buffer_in)) {
+ PyErr_SetString(PyExc_TypeError, "The source buffer is not of the correct type.");
+ return NULL;
+ }
+ Py_buffer buffer;
+ PyObject_GetBuffer(buffer_in, &buffer, PyBUF_FORMAT | PyBUF_C_CONTIGUOUS | PyBUF_WRITEABLE);
+
+ int error = segy_from_native(format, samples, buffer.buf);
+
+ if (error != 0) {
+ PyErr_SetString(PyExc_TypeError, "Unable to convert buffer from native format.");
+ return NULL;
+ }
+
+ error = segy_writetrace(p_FILE, trace_no, buffer.buf, trace0, trace_bsize);
+
+ if (error != 0) {
+ return py_handle_segy_error_with_index_and_name(error, errno, trace_no, "Trace");
+ }
+
+ error = segy_to_native(format, samples, buffer.buf);
+
+ if (error != 0) {
+ PyErr_SetString(PyExc_TypeError, "Unable to convert buffer to native format.");
+ return NULL;
+ }
+
+ Py_IncRef(buffer_in);
+ return buffer_in;
+}
+
+static PyObject *py_read_line(PyObject *self, PyObject *args) {
+ errno = 0;
+ PyObject *file_capsule = NULL;
+ unsigned int line_trace0;
+ unsigned int line_length;
+ unsigned int stride;
+ PyObject *buffer_in;
+ long trace0;
+ unsigned int trace_bsize;
+ int format;
+ unsigned int samples;
+
+ PyArg_ParseTuple(args, "OIIIOlIiI", &file_capsule, &line_trace0, &line_length, &stride, &buffer_in, &trace0,
+ &trace_bsize, &format, &samples);
+
+ FILE *p_FILE = get_FILE_pointer_from_capsule(file_capsule);
+
+ if (PyErr_Occurred()) { return NULL; }
+
+ if (!PyObject_CheckBuffer(buffer_in)) {
+ PyErr_SetString(PyExc_TypeError, "The destination buffer is not of the correct type.");
+ return NULL;
+ }
+ Py_buffer buffer;
+ PyObject_GetBuffer(buffer_in, &buffer, PyBUF_FORMAT | PyBUF_C_CONTIGUOUS | PyBUF_WRITEABLE);
+
+ int error = segy_read_line(p_FILE, line_trace0, line_length, stride, buffer.buf, trace0, trace_bsize);
+
+ if (error != 0) {
+ return py_handle_segy_error_with_index_and_name(error, errno, line_trace0, "Line");
+ }
+
+ error = segy_to_native(format, samples * line_length, buffer.buf);
+
+ if (error != 0) {
+ PyErr_SetString(PyExc_TypeError, "Unable to convert buffer to native format.");
+ return NULL;
+ }
+
+ Py_IncRef(buffer_in);
+ return buffer_in;
+}
+
+/* define functions in module */
+static PyMethodDef SegyMethods[] = {
+ {"open", (PyCFunction) py_FILE_open, METH_VARARGS, "Opens a file."},
+ {"close", (PyCFunction) py_FILE_close, METH_VARARGS, "Closes a file."},
+ {"flush", (PyCFunction) py_FILE_flush, METH_VARARGS, "Flushes a file."},
+
+ {"binheader_size", (PyCFunction) py_binheader_size, METH_NOARGS, "Return the size of the binary header."},
+ {"textheader_size", (PyCFunction) py_textheader_size, METH_NOARGS, "Return the size of the text header."},
+
+ {"read_textheader", (PyCFunction) py_read_texthdr, METH_VARARGS, "Reads the text header from a segy file."},
+ {"write_textheader", (PyCFunction) py_write_texthdr, METH_VARARGS, "Write the text header to a segy file."},
+
+ {"empty_binaryheader", (PyCFunction) py_empty_binaryhdr, METH_NOARGS, "Create empty binary header for a segy file."},
+ {"read_binaryheader", (PyCFunction) py_read_binaryhdr, METH_VARARGS, "Read the binary header from a segy file."},
+ {"write_binaryheader", (PyCFunction) py_write_binaryhdr, METH_VARARGS, "Write the binary header to a segy file."},
+
+ {"empty_traceheader", (PyCFunction) py_empty_trace_header, METH_NOARGS, "Create empty trace header for a segy file."},
+ {"read_traceheader", (PyCFunction) py_read_trace_header, METH_VARARGS, "Read a trace header from a segy file."},
+ {"write_traceheader", (PyCFunction) py_write_trace_header, METH_VARARGS, "Write a trace header to a segy file."},
+
+ {"trace_bsize", (PyCFunction) py_trace_bsize, METH_VARARGS, "Returns the number of bytes in a trace."},
+ {"get_field", (PyCFunction) py_get_field, METH_VARARGS, "Get a header field."},
+ {"set_field", (PyCFunction) py_set_field, METH_VARARGS, "Set a header field."},
+
+ {"init_line_metrics", (PyCFunction) py_init_line_metrics, METH_VARARGS, "Find the length and stride of inline and crossline."},
+ {"init_metrics", (PyCFunction) py_init_metrics, METH_VARARGS, "Find most metrics for a segy file."},
+ {"init_line_indices", (PyCFunction) py_init_line_indices, METH_VARARGS, "Find the indices for inline and crossline."},
+ {"fread_trace0", (PyCFunction) py_fread_trace0, METH_VARARGS, "Find trace0 of a line."},
+ {"read_trace", (PyCFunction) py_read_trace, METH_VARARGS, "Read trace data."},
+ {"write_trace", (PyCFunction) py_write_trace, METH_VARARGS, "Write trace data."},
+ {"read_line", (PyCFunction) py_read_line, METH_VARARGS, "Read a xline/iline from file."},
+ {NULL, NULL, 0, NULL}
+};
+
+
+/* module initialization */
+PyMODINIT_FUNC
+init_segyio(void) {
+ (void) Py_InitModule("_segyio", SegyMethods);
+}
\ No newline at end of file
diff --git a/python/segyio/_trace.py b/python/segyio/_trace.py
new file mode 100644
index 0000000..063fb68
--- /dev/null
+++ b/python/segyio/_trace.py
@@ -0,0 +1,101 @@
+import numpy as np
+import segyio
+
+
+class Trace:
+
+ def __init__(self, file):
+ self._file = file
+ """:type: segyio.file"""
+
+ def __getitem__(self, index, buf=None):
+ if isinstance(index, tuple):
+ return self.__getitem__(index[0], index[1])
+
+ buf = self._trace_buffer(buf)
+
+ if isinstance(index, int):
+ if not 0 <= abs(index) < len(self):
+ raise IndexError("Trace %d not in range (-%d,%d)", (index, len(self), len(self)))
+
+ return self._readtr(index, buf)
+
+ elif isinstance(index, slice):
+ def gen():
+ for i in xrange(*index.indices(len(self))):
+ yield self._readtr(i, buf)
+
+ return gen()
+
+ else:
+ raise TypeError("Key must be int, slice, (int,np.ndarray) or (slice,np.ndarray)")
+
+ def __setitem__(self, index, val):
+ if not 0 <= abs(index) < len(self):
+ raise IndexError("Trace %d not in range (-%d,%d)", (index, len(self), len(self)))
+
+ if not isinstance(val, np.ndarray):
+ raise TypeError("Value must be numpy.ndarray")
+
+ if val.dtype != np.single:
+ raise TypeError("Numpy array must be of type single")
+
+ shape = (self._file.samples,)
+
+ if val.shape[0] < shape[0]:
+ raise TypeError("Array wrong shape. Expected minimum %s, was %s" % (shape, val.shape))
+
+ if isinstance(index, int):
+ self._writetr(index, val)
+
+ elif isinstance(index, slice):
+ for i, buf in xrange(*index.indices(len(self))), val:
+ self._writetr(i, val)
+
+ else:
+ raise KeyError("Wrong shape of index")
+
+ def __len__(self):
+ return self._file.tracecount
+
+ def __iter__(self):
+ return self[:]
+
+ def _trace_buffer(self, buf=None):
+ samples = self._file.samples
+
+ if buf is None:
+ buf = np.empty(shape=samples, dtype=np.single)
+ elif not isinstance(buf, np.ndarray):
+ raise TypeError("Buffer must be None or numpy.ndarray")
+ elif buf.dtype != np.single:
+ buf = np.empty(shape=samples, dtype=np.single)
+ elif buf.shape != samples:
+ buf.reshape(samples)
+
+ return buf
+
+ def _readtr(self, traceno, buf=None):
+ if traceno < 0:
+ traceno += self._file.tracecount
+
+ buf = self._trace_buffer(buf)
+
+ trace0 = self._file._tr0
+ bsz = self._file._bsz
+ fmt = self._file._fmt
+ samples = self._file.samples
+ return segyio._segyio.read_trace(self._file.xfd, traceno, buf, trace0, bsz, fmt, samples)
+
+ def _writetr(self, traceno, buf):
+ self.write_trace(traceno, buf, self._file)
+
+ @classmethod
+ def write_trace(cls, traceno, buf, segy):
+ """
+ :type traceno: int
+ :type buf: ?
+ :type segy: segyio.SegyFile
+ """
+
+ segyio._segyio.write_trace(segy.xfd, traceno, buf, segy._tr0, segy._bsz, segy._fmt, segy.samples)
diff --git a/python/segyio/binfield.py b/python/segyio/binfield.py
index f1dd91e..e0133a5 100644
--- a/python/segyio/binfield.py
+++ b/python/segyio/binfield.py
@@ -1,70 +1,36 @@
-from cwrap import BaseCEnum
+from segyio import Enum
-class BinField(BaseCEnum):
- TYPE_NAME = "SEGY_BINFIELD"
- JobID = None
- LineNumber = None
- ReelNumber = None
- Traces = None
- AuxTraces = None
- Interval = None
- IntervalOriginal = None
- Samples = None
- SamplesOriginal = None
- Format = None
- EnsembleFold = None
- SortingCode = None
- VerticalSum = None
- SweepFrequencyStart = None
- SweepFrequencyEnd = None
- SweepLength = None
- Sweep = None
- SweepChannel = None
- SweepTaperStart = None
- SweepTaperEnd = None
- Taper = None
- CorrelatedTraces = None
- BinaryGainRecovery = None
- AmplitudeRecovery = None
- MeasurementSystem = None
- ImpulseSignalPolarity = None
- VibratoryPolarity = None
- Unassigned1 = None
- SEGYRevision = None
- TraceFlag = None
- ExtendedHeaders = None
- Unassigned2 = None
-
-BinField.addEnum("JobID", 3201)
-BinField.addEnum("LineNumber", 3205)
-BinField.addEnum("ReelNumber", 3209)
-BinField.addEnum("Traces", 3213)
-BinField.addEnum("AuxTraces", 3215)
-BinField.addEnum("Interval", 3217)
-BinField.addEnum("IntervalOriginal", 3219)
-BinField.addEnum("Samples", 3221)
-BinField.addEnum("SamplesOriginal", 3223)
-BinField.addEnum("Format", 3225)
-BinField.addEnum("EnsembleFold", 3227)
-BinField.addEnum("SortingCode", 3229)
-BinField.addEnum("VerticalSum", 3231)
-BinField.addEnum("SweepFrequencyStart", 3233)
-BinField.addEnum("SweepFrequencyEnd", 3235)
-BinField.addEnum("SweepLength", 3237)
-BinField.addEnum("Sweep", 3239)
-BinField.addEnum("SweepChannel", 3241)
-BinField.addEnum("SweepTaperStart", 3243)
-BinField.addEnum("SweepTaperEnd", 3245)
-BinField.addEnum("Taper", 3247)
-BinField.addEnum("CorrelatedTraces", 3249)
-BinField.addEnum("BinaryGainRecovery", 3251)
-BinField.addEnum("AmplitudeRecovery", 3253)
-BinField.addEnum("MeasurementSystem", 3255)
-BinField.addEnum("ImpulseSignalPolarity", 3257)
-BinField.addEnum("VibratoryPolarity", 3259)
-BinField.addEnum("Unassigned1", 3261)
-BinField.addEnum("SEGYRevision", 3501)
-BinField.addEnum("TraceFlag", 3503)
-BinField.addEnum("ExtendedHeaders", 3505)
-BinField.addEnum("Unassigned2", 3507)
+class BinField(Enum):
+ JobID = 3201
+ LineNumber = 3205
+ ReelNumber = 3209
+ Traces = 3213
+ AuxTraces = 3215
+ Interval = 3217
+ IntervalOriginal = 3219
+ Samples = 3221
+ SamplesOriginal = 3223
+ Format = 3225
+ EnsembleFold = 3227
+ SortingCode = 3229
+ VerticalSum = 3231
+ SweepFrequencyStart = 3233
+ SweepFrequencyEnd = 3235
+ SweepLength = 3237
+ Sweep = 3239
+ SweepChannel = 3241
+ SweepTaperStart = 3243
+ SweepTaperEnd = 3245
+ Taper = 3247
+ CorrelatedTraces = 3249
+ BinaryGainRecovery = 3251
+ AmplitudeRecovery = 3253
+ MeasurementSystem = 3255
+ ImpulseSignalPolarity = 3257
+ VibratoryPolarity = 3259
+ Unassigned1 = 3261
+ SEGYRevision = 3501
+ TraceFlag = 3503
+ ExtendedHeaders = 3505
+ Unassigned2 = 3507
diff --git a/python/segyio/create.py b/python/segyio/create.py
index d0c64d0..618d5d1 100644
--- a/python/segyio/create.py
+++ b/python/segyio/create.py
@@ -1,5 +1,5 @@
import datetime
-import ctypes as ct
+import numpy
import segyio
def default_text_header(iline, xline, offset):
@@ -91,38 +91,35 @@ def create(filename, spec):
... dst.bin = src.bin
... dst.header = src.header
... dst.trace = src.trace
+ :rtype: segyio.SegyFile
"""
- f = segyio.file(filename, "w+")
-
- f.samples = spec.samples
- f.ext_headers = spec.ext_headers
- f._bsz = segyio.file._trace_bsize(f.samples)
- f._tr0 = -1 + segyio.file._textsize() + \
- segyio.file._binheader_size() + \
- (spec.ext_headers * segyio.file._textsize())
- f.sorting = spec.sorting
+ f = segyio.SegyFile(filename, "w+")
+
+ f._samples = spec.samples
+ f._ext_headers = spec.ext_headers
+ f._bsz = segyio._segyio.trace_bsize(f.samples)
+
+ txt_hdr_sz = segyio._segyio.textheader_size()
+ bin_hdr_sz = segyio._segyio.binheader_size()
+ f._tr0 = -1 + txt_hdr_sz + bin_hdr_sz + (spec.ext_headers * (txt_hdr_sz - 1))
+ f._sorting = spec.sorting
f._fmt = spec.format
- f.offsets = spec.offsets
- f.tracecount = len(spec.ilines) * len(spec.xlines) * spec.offsets
+ f._offsets = spec.offsets
+ f._tracecount = len(spec.ilines) * len(spec.xlines) * spec.offsets
f._il = int(spec.iline)
- f.ilines = spec.ilines
- f._raw_ilines = (ct.c_uint * len(f.ilines))()
- for i, x in enumerate(f.ilines):
- f._raw_ilines[i] = x
+ f._ilines = numpy.copy(numpy.asarray(spec.ilines, dtype=numpy.uintc))
f._xl = int(spec.xline)
- f.xlines = spec.xlines
- f._raw_xlines = (ct.c_uint * len(f.xlines))()
- for i, x in enumerate(f.xlines):
- f._raw_xlines[i] = x
+ f._xlines = numpy.copy(numpy.asarray(spec.xlines, dtype=numpy.uintc))
+ line_metrics = segyio._segyio.init_line_metrics(f.sorting, f.tracecount, len(f.ilines), len(f.xlines), f.offsets)
- f._iline_length = f._init_iline_length(len(f.xlines))
- f._iline_stride = f._init_iline_stride(len(f.ilines))
+ f._iline_length = line_metrics['iline_length']
+ f._iline_stride = line_metrics['iline_stride']
- f._xline_length = f._init_xline_length(len(f.ilines))
- f._xline_stride = f._init_xline_stride(len(f.xlines))
+ f._xline_length = line_metrics['xline_length']
+ f._xline_stride = line_metrics['xline_stride']
f.text[0] = default_text_header(f._il, f._xl, segyio.TraceField.offset)
f.bin = { 3213: f.tracecount,
diff --git a/python/segyio/open.py b/python/segyio/open.py
index f9b1580..c0fb384 100644
--- a/python/segyio/open.py
+++ b/python/segyio/open.py
@@ -1,6 +1,9 @@
+import numpy
+
import segyio
-def open(filename, mode = "r", iline = 189, xline = 193):
+
+def open(filename, mode="r", iline=189, xline=193):
"""Open a segy file.
Opens a segy file and tries to figure out its sorting, inline numbers,
@@ -40,35 +43,41 @@ def open(filename, mode = "r", iline = 189, xline = 193):
>>> with segyio.open(path) as src, segyio.open(path, "r+") as dst:
... dst.trace = src.trace # copy all traces from src to dst
...
+ :rtype: segyio.SegyFile
"""
- f = segyio.file(filename, mode, iline, xline)
+ f = segyio.SegyFile(filename, mode, iline, xline)
try:
header = f.bin.buf
+ metrics = segyio._segyio.init_metrics(f.xfd, header, iline, xline)
+
+ f._samples = metrics['sample_count']
+ f._tr0 = metrics['trace0']
+ f._fmt = metrics['format']
+ f._bsz = metrics['trace_bsize']
+ f._ext_headers = (f._tr0 - 3600) / 3200 # should probably be from C
- f.samples = f._get_samples(header)
- f._tr0 = f._trace0(header)
- f._fmt = f._format(header)
- f._bsz = f._trace_bsize(f.samples)
- f.ext_headers = (f._tr0 - 3600) / 3200 # should probably be from C
+ f._tracecount = metrics['trace_count']
- f.tracecount = f._init_traces()
- f.sorting = f._init_sorting()
- f.offsets = f._init_offsets()
+ f._sorting = metrics['sorting']
+ f._offsets = metrics['offset_count']
- iline_count, xline_count = f._init_line_count()
+ iline_count, xline_count = metrics['iline_count'], metrics['xline_count']
- f.ilines, f._raw_ilines = f._init_ilines(iline_count, xline_count)
- f._iline_length = f._init_iline_length(xline_count)
- f._iline_stride = f._init_iline_stride(iline_count)
+ line_metrics = segyio._segyio.init_line_metrics(f.sorting, f.tracecount, iline_count, xline_count, f.offsets)
- f.xlines, f._raw_xlines = f._init_xlines(iline_count, xline_count)
- f._xline_length = f._init_xline_length(iline_count)
- f._xline_stride = f._init_xline_stride(xline_count)
+ f._ilines = numpy.zeros(iline_count, dtype=numpy.uintc)
+ f._xlines = numpy.zeros(xline_count, dtype=numpy.uintc)
+ segyio._segyio.init_line_indices(f.xfd, metrics, f.ilines, f.xlines)
+
+ f._iline_length = line_metrics['iline_length']
+ f._iline_stride = line_metrics['iline_stride']
+
+ f._xline_length = line_metrics['xline_length']
+ f._xline_stride = line_metrics['xline_stride']
except:
f.close()
raise
return f
-
diff --git a/python/segyio/segy.py b/python/segyio/segy.py
index 24e842f..fa2fcbc 100644
--- a/python/segyio/segy.py
+++ b/python/segyio/segy.py
@@ -14,557 +14,56 @@ You can also have a look at the example programs that are distributed with
segyio which you can find in the examples directory or where your distribution
installs example programs.
"""
-
-import sys, os, errno
import itertools
import numpy as np
-import ctypes as ct
-from cwrap import BaseCClass, BaseCEnum, Prototype
-from segyio import TraceField, BinField
-
-class _Segyio(Prototype):
- SEGYIO_LIB = ct.CDLL("libsegyio.so", use_errno=True)
-
- def __init__(self, prototype, bind=True):
- super(_Segyio, self).__init__(_Segyio.SEGYIO_LIB, prototype, bind=bind)
-
-def _floatp(obj):
- return obj.ctypes.data_as(ct.POINTER(ct.c_float))
-
-# match the SEGY_ERROR enum from segy.h
-class _error(BaseCEnum):
- TYPE_NAME = "SEGY_ERROR"
-
- OK = None
- FOPEN_ERROR = None
- FSEEK_ERROR = None
- FREAD_ERROR = None
- FWRITE_ERROR = None
- INVALID_FIELD = None
- INVALID_SORTING = None
- MISSING_LINE_INDEX = None
- INVALID_OFFSETS = None
- TRACE_SIZE_MISMATCH = None
-
-_error.addEnum("OK", 0)
-_error.addEnum("FOPEN_ERROR", 1)
-_error.addEnum("FSEEK_ERROR", 2)
-_error.addEnum("FREAD_ERROR", 3)
-_error.addEnum("FWRITE_ERROR", 4)
-_error.addEnum("INVALID_FIELD", 5)
-_error.addEnum("INVALID_SORTING", 6)
-_error.addEnum("MISSING_LINE_INDEX", 7)
-_error.addEnum("INVALID_OFFSETS", 8)
-_error.addEnum("TRACE_SIZE_MISMATCH", 9)
-
-def _header_buffer(buf = None):
- if buf is None:
- return (ct.c_char * 240)()
-
- if len(buf) < 240:
- raise ValueError("Buffer must be a minimum %d size'd byte array." % 240)
-
- return buf
-
-def _set_field(buf, field, x):
- errc = file._set_field(buf, int(field), x)
- err = _error(errc)
-
- if err != _error.OK:
- raise IndexError("Invalid byte offset %d" % field)
-
-def _get_field(buf, field, x):
- errc = file._get_field(buf, int(field), ct.byref(x))
- err = _error(errc)
-
- if err != _error.OK:
- raise IndexError("Invalid byte offset %d" % field)
-
- return int(x.value)
-
-def _write_header(buf, segy, traceno):
- errc = segy._write_header(traceno, buf, segy._tr0, segy._bsz)
- err = _error(errc)
-
- if err != _error.OK:
- errno = ct.get_errno()
- raise OSError(errno, "Error writing header for trace %d: %s" % (traceno, os.strerror(errno)))
-
-
-class _line:
- """ Line mode for traces and trace headers. Internal.
-
- The _line class provides an interface for line-oriented operations. The
- line reading operations themselves are not streaming - it's assumed than
- when a line is queried it's somewhat limited in size and will comfortably
- fit in memory, and that the full line is interesting. This also applies to
- line headers; however, all returned values support the iterable protocol so
- they work fine together with the streaming bits of this library.
-
- _line should not be instantiated directly by users, but rather returned
- from the iline/xline properties of file or from the header mode. Any
- direct construction of this should be conisdered an error.
- """
- def __init__(self, segy, length, stride, lines, raw_lines, other_lines, buffn, readfn, writefn, name):
- self.segy = segy
- self.len = length
- self.stride = stride
- self.lines = lines
- self.raw_lines = raw_lines
- self.other_lines = other_lines
- self.name = name
- self.buffn = buffn
- self.readfn = readfn
- self.writefn = writefn
-
- def __getitem__(self, lineno, buf = None):
- if isinstance(lineno, tuple):
- return self.__getitem__(lineno[0], lineno[1])
-
- buf = self.buffn(buf)
-
- if isinstance(lineno, int):
- t0 = self.segy._fread_trace0(lineno, len(self.other_lines), self.stride, self.raw_lines, self.name)
- return self.readfn(t0, self.len, self.stride, buf)
-
- elif isinstance(lineno, slice):
- # in order to support [:end] syntax, we must make sure
- # start has a non-None value. lineno.indices() would set it
- # to 0, but we don't know if that's a reasonable value or
- # not. If start is None we set it to the first line
- if lineno.start is None:
- lineno = slice(self.lines[0], lineno.stop, lineno.step)
-
- def gen():
- s = set(self.lines)
- rng = xrange(*lineno.indices(self.lines[-1] + 1))
-
- # use __getitem__ lookup to avoid tuple
- # construction and unpacking and fast-forward
- # into the interesting code path
- for i in itertools.ifilter(s.__contains__, rng):
- yield self.__getitem__(i, buf)
-
- return gen()
-
- def __setitem__(self, lineno, val):
- if isinstance(lineno, slice):
- if lineno.start is None:
- lineno = slice(self.lines[0], lineno.stop, lineno.step)
-
- rng = xrange(*lineno.indices(self.lines[-1] + 1))
- s = set(self.lines)
-
- for i, x in itertools.izip(filter(s.__contains__, rng), val):
- self.__setitem__(i, x)
-
- return
-
- t0 = self.segy._fread_trace0(lineno, len(self.other_lines), self.stride, self.raw_lines, self.name)
- self.writefn(t0, self.len, self.stride, val)
-
- def __len__(self):
- return len(self.lines)
-
- def __iter__(self):
- buf = self.buffn()
- for i in self.lines:
- yield self.__getitem__(i, buf)
-
-
-class _DepthPlane:
-
- def __init__(self, samples, sorting, read_fn):
-
- self.samples = samples
- self.sorting = sorting
- self.read_fn = read_fn
-
- def __getitem__(self, depth):
- if isinstance(depth, int):
- return self.read_fn(self.sorting, depth)
-
- def __len__(self):
- return len(self.samples)
-
- def __iter__(self):
- for i in range(self.samples):
- yield self.__getitem__(i)
-
-
-class _header:
- def __init__(self, segy):
- self.segy = segy
-
- class proxy:
- def __init__(inner, buf, traceno = None, segy = None, get_field = _get_field, set_field = _set_field, write = _write_header, field_type = TraceField):
- inner.buf = buf
- inner.traceno = traceno
- inner._segy = segy
- inner._get_field = get_field
- inner._set_field = set_field
- inner._field_type = field_type
- inner._write = write
-
- def __getitem__(inner, field):
- val = ct.c_int()
-
- # add some structure so we can always iterate over fields
- if isinstance(field, int) or isinstance(field, inner._field_type):
- field = [field]
-
- d = { inner._field_type(f): inner._get_field(inner.buf, f, val) for f in field }
-
- # unpack the dictionary. if header[field] is requested, a
- # plain, unstructed output is expected, but header[f1,f2,f3]
- # yields a dict
- if len(d) == 1:
- return d.values()[0]
-
- return d
-
- def __setitem__(inner, field, val):
- inner._set_field(inner.buf, field, val)
- inner._write(inner.buf, inner._segy, inner.traceno)
-
- def __getitem__(self, traceno, buf = None):
- if isinstance(traceno, tuple):
- return self.__getitem__(traceno[0], traceno[1])
- buf = _header_buffer(buf)
+from segyio._header import Header
+from segyio._line import Line
+from segyio._trace import Trace
+from segyio._field import Field
+from segyio._depth_plane import DepthPlane
+import segyio._segyio as _segyio
- if isinstance(traceno, slice):
- def gen():
- for i in xrange(*traceno.indices(self.segy.tracecount)):
- yield self.__getitem__(i, buf)
-
- return gen()
-
- buf = self.segy._readh(traceno, buf)
- return _header.proxy(buf, traceno = traceno, segy = self.segy)
-
- def __setitem__(self, traceno, val):
- buf = None
-
- # library-provided loops can re-use a buffer for the lookup, even in
- # __setitem__, so we might need to unpack the tuple to reuse the buffer
- if isinstance(traceno, tuple):
- buf = traceno[1]
- traceno = traceno[0]
-
- if isinstance(val, dict):
- try:
- # try to read a buffer. If the file was created by
- # libsegyio this might not have been written to before and
- # getitem might fail, so we try to read it and if it fails
- # we check if we're still within bounds if we are we create
- # an empty header and write to that
- buf = self.__getitem__(traceno, buf).buf
-
- except:
- if traceno >= self.segy.tracecount: raise
- buf = _header_buffer(buf)
-
- for f, v in val.items():
- _set_field(buf, f, v)
-
- else:
- buf = val.buf
-
- _write_header(buf, self.segy, traceno)
-
- def __iter__(self):
- return self[:]
-
- def readfn(self, t0, length, stride, buf):
- def gen():
- start = t0
- stop = t0 + (length * stride)
- for i in xrange(start, stop, stride):
- self.segy._readh(i, buf)
- yield _header.proxy(buf, traceno = i, segy = self.segy)
-
- return gen()
-
- def writefn(self, t0, length, stride, val):
- start = t0
- stop = t0 + (length * stride)
-
- if isinstance(val, _header.proxy) or isinstance(val, dict):
- val = itertools.repeat(val)
-
- for i, x in itertools.izip(xrange(start, stop, stride), val):
- self[i] = x
-
-
- @property
- def iline(self):
- segy = self.segy
- length = segy._iline_length
- stride = segy._iline_stride
- lines = segy.ilines
- raw_lines = segy._raw_ilines
- other_lines = segy.xlines
- buffn = _header_buffer
-
- return _line(segy, length, stride, lines, raw_lines, other_lines, buffn, self.readfn, self.writefn, "Inline")
-
- @property
- def xline(self):
- segy = self.segy
- length = segy._xline_length
- stride = segy._xline_stride
- lines = segy.xlines
- raw_lines = segy._raw_xlines
- other_lines = segy.xlines
- buffn = _header_buffer
+from segyio.tracesortingformat import TraceSortingFormat
- return _line(segy, length, stride, lines, raw_lines, other_lines, buffn, self.readfn, self.writefn, "Crossline")
- def __setattr__(self, name, value):
- """Write iterables to lines
+class SegyFile(object):
- Examples:
- setattr supports writing to *all* inlines and crosslines via
- assignment, regardless of data source and format. Will respect the
- sample size and structure of the file being assigned to, so if the
- argument traces are longer than that of the file being written to
- the surplus data will be ignored. Uses same rules for writing as
- `f.iline[i] = x`.
- """
- if name == "iline":
- for i, src in itertools.izip(self.segy.ilines, value):
- self.iline[i] = src
-
- if name == "xline":
- for i, src in itertools.izip(self.segy.xlines, value):
- self.xline[i] = src
-
- else:
- self.__dict__[name] = value
- return
-
-class file(BaseCClass):
- TYPE_NAME = "FILE"
-
- _open = _Segyio("void* fopen(char*, char*)", bind = False)
- _flush = _Segyio("int fflush(FILE)")
- _close = _Segyio("int fclose(FILE)")
-
- _binheader_size = _Segyio("uint segy_binheader_size()", bind = False)
- _binheader = _Segyio("int segy_binheader(FILE, char*)")
- _write_binheader = _Segyio("int segy_write_binheader(FILE, char*)")
-
- _trace0 = _Segyio("int segy_trace0(char*)", bind = False)
- _get_samples = _Segyio("uint segy_samples(char*)", bind = False)
- _format = _Segyio("int segy_format(char*)", bind = False)
- _sorting = _Segyio("int segy_sorting(FILE, int, int, int*, long, uint)")
- _trace_bsize = _Segyio("uint segy_trace_bsize(uint)", bind = False)
- _traces = _Segyio("uint segy_traces(FILE, uint*, long, uint)")
- _offsets = _Segyio("uint segy_offsets(FILE, int, int, uint, uint*, long, uint)")
-
- _get_field = _Segyio("int segy_get_field(char*, int, int*)", bind = False)
- _set_field = _Segyio("int segy_set_field(char*, int, int)", bind = False)
-
- _get_bfield = _Segyio("int segy_get_bfield(char*, int, int*)", bind = False)
- _set_bfield = _Segyio("int segy_set_bfield(char*, int, int)", bind = False)
-
- _to_native = _Segyio("int segy_to_native(int, uint, float*)", bind = False)
- _from_native = _Segyio("int segy_from_native(int, uint, float*)", bind = False)
-
- _read_header = _Segyio("int segy_traceheader(FILE, uint, char*, long, uint)")
- _write_header = _Segyio("int segy_write_traceheader(FILE, uint, char*, long, uint)")
- _read_trace = _Segyio("int segy_readtrace(FILE, uint, float*, long, uint)")
- _write_trace = _Segyio("int segy_writetrace(FILE, uint, float*, long, uint)")
-
- _count_lines = _Segyio("int segy_count_lines(FILE, int, uint, uint*, uint*, long, uint)")
- _inline_length = _Segyio("int segy_inline_length(int, uint, uint, uint, uint*)", bind = False)
- _inline_stride = _Segyio("int segy_inline_stride(int, uint, uint*)", bind = False)
- _inline_indices = _Segyio("int segy_inline_indices(FILE, int, int, uint, uint, uint, uint*, long, uint)")
- _crossline_length = _Segyio("int segy_crossline_length(int, uint, uint, uint, uint*)", bind = False)
- _crossline_stride = _Segyio("int segy_crossline_stride(int, uint, uint*)", bind = False)
- _crossline_indices = _Segyio("int segy_crossline_indices(FILE, int, int, uint, uint, uint, uint*, long, uint)")
- _line_trace0 = _Segyio("int segy_line_trace0( uint, uint, uint, uint*, uint, uint*)", bind = False)
- _read_line = _Segyio("int segy_read_line(FILE, uint, uint, uint, float*, long, uint)")
- _write_line = _Segyio("int segy_write_line(FILE, uint, uint, uint, float*, long, uint)")
-
- _textsize = _Segyio("int segy_textheader_size()", bind = False)
- _texthdr = _Segyio("int segy_textheader(FILE, char*)")
- _write_texthdr = _Segyio("int segy_write_textheader(FILE, uint, char*)")
-
- def __init__(self, filename, mode, iline = 189 , xline = 193, t0 = 1111.0 ):
+ def __init__(self, filename, mode, iline=189, xline=193, t0=1111.0):
"""
Constructor, internal.
"""
- self._filename = filename
- self._mode = mode
- self._il = iline
- self._xl = xline
- fd = self._open(filename, mode)
-
- if not fd:
- errno = ct.get_errno()
- strerror = os.strerror(errno)
- raise OSError(errno, "Opening file '%s' failed: %s" % (filename, strerror))
-
- super(file, self).__init__(fd)
-
- def _init_traces(self):
- traces = ct.c_uint()
- errc = self._traces(ct.byref(traces), self._tr0, self._bsz)
- err = _error(errc)
-
- if err == _error.OK:
- return int(traces.value)
-
- if err == _error.TRACE_SIZE_MISMATCH:
- raise RuntimeError("Number of traces is not consistent with file size. File probably corrupt.")
-
- errno = ct.get_errno()
- raise OSError("Error while detecting number of traces: %s" % os.strerror(errno))
-
- def _init_sorting(self):
- sorting = ct.c_int()
- errc = self._sorting( self._il, self._xl, ct.byref(sorting), self._tr0, self._bsz)
-
- err = _error(errc)
-
- if err == _error.OK:
- return int(sorting.value)
-
- if err == _error.INVALID_FIELD:
- raise ValueError("Invalid inline (%d) or crossline (%d) field/byte offset. "\
- "Too large or between valid byte offsets" % (self._il, self._xl))
-
- if err == _error.INVALID_SORTING:
- raise RuntimeError("Unable to determine sorting. File probably corrupt.")
-
- errno = ct.get_errno()
- raise OSError(errno, "Error while detecting file sorting: %s" % os.strerror(errno))
-
- return int(sorting.value)
-
- def _init_offsets(self):
- offsets = ct.c_uint()
- errc = self._offsets(self._il, self._xl, self.tracecount, ct.byref(offsets), self._tr0, self._bsz)
-
- err = _error(errc)
-
- if err == _error.OK:
- return int(offsets.value)
-
- if err == _error.INVALID_FIELD:
- raise ValueError("Invalid inline (%d) or crossline (%d) field/byte offset. "\
- "Too large or between valid byte offsets" % (self._il, self._xl))
-
- if err == _error.INVALID_OFFSETS:
- raise RuntimeError("Found more offsets than traces. File probably corrupt.")
-
- def _init_line_count(self):
- ilines_sz, xlines_sz = ct.c_uint(), ct.c_uint()
-
- if self.sorting == 1: #crossline sorted
- fi = self._il
- l1out = ct.byref(xlines_sz)
- l2out = ct.byref(ilines_sz)
- elif self.sorting == 2: #inline sorted
- fi = self._xl
- l1out = ct.byref(ilines_sz)
- l2out = ct.byref(xlines_sz)
- else:
- raise RuntimeError("Unable to determine sorting. File probably corrupt.")
-
- errc = self._count_lines(fi, self.offsets, l1out, l2out, self._tr0, self._bsz)
- err = _error(errc)
-
- if err == _error.OK:
- return int(ilines_sz.value), int(xlines_sz.value)
-
- errno = ct.get_errno()
- raise OSError(errno, "Error while counting lines: %s", os.strerror(errno))
-
-
- def _init_ilines(self, iline_count, xline_count):
- ilines = (ct.c_uint * iline_count)()
- errc = self._inline_indices(self._il, self.sorting, iline_count, xline_count, self.offsets, ilines, self._tr0, self._bsz)
- err = _error(errc)
-
- if err == _error.OK:
- return map(int, ilines), ilines
-
- if err == _error.INVALID_SORTING:
- raise RuntimeError("Unknown file sorting.")
-
- errno = ct.get_errno()
- raise OSError(errno, "Error while reading inline indices: %s", os.strerror(errno))
-
- def _init_iline_length(self, xline_count):
- length = ct.c_uint()
- errc = self._inline_length(self.sorting, self.tracecount, xline_count, self.offsets, ct.byref(length))
- err = _error(errc)
-
- if err == _error.OK:
- return int(length.value)
-
- if err == _error.INVALID_SORTING:
- raise RuntimeError("Unknown file sorting.")
-
- errno = ct.get_errno()
- raise OSError(errno, "Error while determining inline length: %s", os.strerror(errno))
-
- def _init_iline_stride(self, iline_count):
- stride = ct.c_uint()
- errc = self._inline_stride(self.sorting, iline_count, ct.byref(stride))
- err = _error(errc)
-
- if err == _error.OK:
- return int(stride.value)
-
- if err == _error.INVALID_SORTING:
- raise RuntimeError("Unknown file sorting.")
-
- def _init_xlines(self, iline_count, xline_count):
- xlines = (ct.c_uint * xline_count)()
- errc = self._crossline_indices(self._xl, self.sorting, iline_count, xline_count, self.offsets, xlines, self._tr0, self._bsz)
- err = _error(errc)
-
- if err == _error.OK:
- return map(int, xlines), xlines
-
- if err == _error.INVALID_SORTING:
- raise RuntimeError("Unknown file sorting.")
-
- errno = ct.get_errno()
- raise OSError(errno, "Error while reading crossline indices: %s", os.strerror(errno))
-
- def _init_xline_length(self, iline_count):
- length = ct.c_uint()
- errc = self._crossline_length(self.sorting, self.tracecount, iline_count, self.offsets, ct.byref(length))
- err = _error(errc)
-
- if err == _error.OK:
- return int(length.value)
-
- if err == _error.INVALID_SORTING:
- raise RuntimeError("Unknown file sorting.")
-
- errno = ct.get_errno()
- raise OSError(errno, "Error while determining crossline length: %s", os.strerror(errno))
-
-
- def _init_xline_stride(self, xline_count):
- stride = ct.c_uint()
- errc = self._crossline_stride(self.sorting, xline_count, ct.byref(stride))
- err = _error(errc)
-
- if err == _error.OK:
- return int(stride.value)
-
- if err == _error.INVALID_SORTING:
- raise RuntimeError("Unknown file sorting.")
+ self._filename = filename
+ self._mode = mode
+ self._il = iline
+ self._xl = xline
+
+ # property value holders
+ self._ilines = None
+ self._xlines = None
+ self._tracecount = None
+ self._sorting = None
+ self._offsets = None
+ self._ext_headers = None
+ self._samples = None
+
+ # private values
+ self._iline_length = None
+ self._iline_stride = None
+ self._xline_length = None
+ self._xline_stride = None
+ self._fmt = None
+ self._tr0 = None
+ self._bsz = None
+
+ self.xfd = _segyio.open(filename, mode)
+
+ super(SegyFile, self).__init__()
def __enter__(self):
- """Internal."""
+ """Internal.
+ :rtype: segyio.segy.SegyFile
+ """
return self
def __exit__(self, type, value, traceback):
@@ -587,17 +86,43 @@ class file(BaseCClass):
... # write something to f
... f.flush()
"""
- self._flush()
+ _segyio.flush(self.xfd)
def close(self):
- """Close the file.
+ """Close the file.#
This method is mostly useful for testing.
It is not necessary to call this method if you're using the `with`
statement, which will close the file for you.
"""
- self._close()
+ _segyio.close(self.xfd)
+
+ @property
+ def sorting(self):
+ """ :rtype: int """
+ return self._sorting
+
+ @property
+ def tracecount(self):
+ """ :rtype: int """
+ return self._tracecount
+
+ @property
+ def samples(self):
+ """ :rtype: int """
+ return self._samples
+
+ @property
+ def offsets(self):
+ """ :rtype: int """
+ return self._offsets
+
+ @property
+ def ext_headers(self):
+ """ :rtype: int """
+ return self._ext_headers
+
@property
def header(self):
@@ -609,8 +134,11 @@ class file(BaseCClass):
the generator is realised and the header in question is used. Supports
python slicing (yields a generator), as well as direct lookup.
+
Examples:
Reading a field in a trace::
+ >>> import segyio
+ >>> f = segyio.open("filename")
>>> f.header[10][TraceField.offset]
Writing a field in a trace::
@@ -646,7 +174,7 @@ class file(BaseCClass):
Write a field in every other header::
>>> for h in f.header[::2]:
- ... h = { TraceField.offset = 2 }
+ ... h = { TraceField.offset : 2 }
...
Cache a header:
@@ -663,7 +191,9 @@ class file(BaseCClass):
file headers the writing will stop, i.e. not all all headers in the
destination file will be written to.
- Copy headers from file f to file g::
+ Copy headers from file g to file f:
+ >>> f = segyio.open("path to file")
+ >>> g = segyio.open("path to another file")
>>> f.header = g.header
Set offset field::
@@ -679,12 +209,13 @@ class file(BaseCClass):
False
>>> f.header[2] == g.header[24]
True
+ :rtype: segyio._header.Header
"""
- return _header(self)
+ return Header(self)
@header.setter
def header(self, val):
- if isinstance(val, _header.proxy) or isinstance(val, dict):
+ if isinstance(val, Field) or isinstance(val, dict):
val = itertools.repeat(val)
h, buf = self.header, None
@@ -772,62 +303,8 @@ class file(BaseCClass):
>>> tr = np.zeros(f.samples)
>>> f.trace = itertools.repeat(tr)
"""
- class trace:
- def __getitem__(inner, index, buf = None):
- if isinstance(index, tuple):
- return inner.__getitem__(index[0], index[1])
-
- buf = self._trace_buffer(buf)
-
- if isinstance(index, int):
- if not 0 <= abs(index) < len(inner):
- raise IndexError("Trace %d not in range (-%d,%d)", (index, len(inner), len(inner)))
-
- return self._readtr(index, buf)
-
- elif isinstance(index, slice):
- def gen():
- for i in xrange(*index.indices(len(inner))):
- yield self._readtr(i, buf)
-
- return gen()
-
- else:
- raise TypeError( "Key must be int, slice, (int,np.ndarray) or (slice,np.ndarray)" )
-
- def __setitem__(inner, index, val):
- if not 0 <= abs(index) < len(inner):
- raise IndexError("Trace %d not in range (-%d,%d)", (index, len(inner), len(inner)))
- if not isinstance( val, np.ndarray ):
- raise TypeError( "Value must be numpy.ndarray" )
-
- if val.dtype != np.float32:
- raise TypeError( "Numpy array must be float32" )
-
- shape = (self.samples,)
-
- if val.shape[0] < shape[0]:
- raise TypeError( "Array wrong shape. Expected minimum %s, was %s" % (shape, val.shape))
-
- if isinstance(index, int):
- self._writetr(index, val)
-
- elif isinstance(index, slice):
- for i, buf in xrange(*index.indices(len(inner))), val:
- self._writetr(i, val)
-
- else:
- raise KeyError( "Wrong shape of index" )
-
-
- def __len__(inner):
- return self.tracecount
-
- def __iter__(inner):
- return inner[:]
-
- return trace()
+ return Trace(self)
@trace.setter
def trace(self, val):
@@ -835,17 +312,17 @@ class file(BaseCClass):
for i, v in itertools.izip(xrange(len(tr)), val):
tr[i] = v
- def _line_buffer(self, length, buf = None):
+ def _line_buffer(self, length, buf=None):
shape = (length, self.samples)
if buf is None:
- return np.empty(shape = shape, dtype = np.float32)
+ return np.empty(shape=shape, dtype=np.single)
if not isinstance(buf, np.ndarray):
return buf
- if buf.dtype != np.float32:
- return np.empty(shape = shape, dtype = np.float32)
+ if buf.dtype != np.single:
+ return np.empty(shape=shape, dtype=np.single)
if buf.shape[0] == shape[0]:
return buf
@@ -855,56 +332,14 @@ class file(BaseCClass):
return buf
- def _fread_trace0(self, lineno, length, stride, linenos, line_type):
- line0 = ct.c_uint()
- errc = self._line_trace0(lineno, length, stride, linenos, len(linenos), ct.byref(line0))
- err = _error(errc)
-
- if err == _error.OK:
- return int(line0.value)
-
- if errc == _error.MISSING_LINE_INDEX:
- raise KeyError("%s number %d does not exist." % (line_type, lineno))
-
- errno = ct.get_errno()
- raise OSError( errno, "Unable to read line %d: %s" % (lineno, os.strerror(errno)))
-
def _fread_line(self, trace0, length, stride, buf):
- errc = self._read_line(trace0, length, stride, _floatp(buf), self._tr0, self._bsz)
- err = _error(errc)
-
- if err != _error.OK:
- errno = ct.get_errno()
- raise OSError(errno, "Unable to read line starting at trace %d: %s" % (trace0, os.strerror(errno)))
-
- errc = self._to_native(self._fmt, buf.size, _floatp(buf))
- err = _error(errc)
-
- if err == _error.OK:
- return buf
-
- raise BufferError("Unable to convert line to native float")
-
-
- def _fwrite_line(self, trace0, length, stride, buf):
- errc_conv = self._from_native(self._fmt, buf.size, _floatp(buf))
- err_conv = _error(errc_conv)
-
- if err_conv != _error.OK:
- raise BufferError("Unable to convert line from native float.")
-
- errc = self._write_line(trace0, length, stride, _floatp(buf), self._tr0, self._bsz)
- errc_conv = self._to_native(self._fmt, buf.size, _floatp(buf))
+ return _segyio.read_line(self.xfd, trace0, length, stride, buf, self._tr0, self._bsz, self._fmt, self.samples)
- err = _error(errc)
- err_conv = _error(errc_conv)
- if err != _error.OK:
- errno = ct.get_errno()
- raise OSError(errno, "Error writing line starting at trace %d: %s" % (trace0, os.strerror(errno)))
-
- if err_conv != _error.OK:
- raise BufferError("Unable to convert line from native float.")
+ @property
+ def ilines(self):
+ """ :rtype: numpy.ndarray"""
+ return self._ilines
@property
def iline(self):
@@ -965,23 +400,28 @@ class file(BaseCClass):
>>> f.iline = g.iline[::2]
"""
il_len, il_stride = self._iline_length, self._iline_stride
- lines, raw_lines = self.ilines, self._raw_ilines
+ lines = self.ilines
other_lines = self.xlines
- buffn = lambda x = None: self._line_buffer(il_len, x)
+ buffn = lambda x=None: self._line_buffer(il_len, x)
readfn = self._fread_line
def writefn(t0, length, step, val):
val = buffn(val)
- for i, v in itertools.izip(xrange(t0, t0 + step*length, step), val):
- self._writetr(i, v)
+ for i, v in itertools.izip(xrange(t0, t0 + step * length, step), val):
+ Trace.write_trace(i, v, self)
- return _line(self, il_len, il_stride, lines, raw_lines, other_lines, buffn, readfn, writefn, "Inline")
+ return Line(self, il_len, il_stride, lines, other_lines, buffn, readfn, writefn, "Inline")
@iline.setter
def iline(self, value):
self.iline[:] = value
@property
+ def xlines(self):
+ """ :rtype: numpy.ndarray"""
+ return self._xlines
+
+ @property
def xline(self):
""" Interact with segy in crossline mode.
@@ -991,7 +431,7 @@ class file(BaseCClass):
Note that accessing crosslines uses the line numbers, not their position,
so if a files has crosslines [1400..1450], accessing line [0..100] will be
an error. Note that each line is returned as a numpy array, meaning
- accessing the intersections of the crossline and crossline is 0-indexed.
+ accessing the intersections of the inline and crossline is 0-indexed.
Examples:
Read an crossline::
@@ -1041,17 +481,17 @@ class file(BaseCClass):
>>> f.xline = g.xline[::2]
"""
xl_len, xl_stride = self._xline_length, self._xline_stride
- lines, raw_lines = self.xlines, self._raw_xlines
+ lines = self.xlines
other_lines = self.ilines
- buffn = lambda x = None: self._line_buffer(xl_len, x)
+ buffn = lambda x=None: self._line_buffer(xl_len, x)
readfn = self._fread_line
def writefn(t0, length, step, val):
val = buffn(val)
- for i, v in itertools.izip(xrange(t0, t0 + step*length, step), val):
- self._writetr(i, v)
+ for i, v in itertools.izip(xrange(t0, t0 + step * length, step), val):
+ Trace.write_trace(i, v, self)
- return _line(self, xl_len, xl_stride, lines, raw_lines, other_lines, buffn, readfn, writefn, "Crossline")
+ return Line(self, xl_len, xl_stride, lines, other_lines, buffn, readfn, writefn, "Crossline")
@xline.setter
def xline(self, value):
@@ -1060,97 +500,25 @@ class file(BaseCClass):
@property
def depth_plane(self):
- def depth_plane(sorting, depth):
+ def readfn(sorting, depth):
il_len = self._iline_length
xl_len = self._xline_length
- dim = None
- if sorting == 1:
+ if sorting == TraceSortingFormat.CROSSLINE_SORTING:
dim = (xl_len, il_len)
- if sorting == 2:
+ elif sorting == TraceSortingFormat.INLINE_SORTING:
dim = (il_len, xl_len)
+ else:
+ raise RuntimeError("Unexpected sorting type")
- if not dim:
- raise Exception("TODO")
-
- plane = np.empty(shape=dim[0] * dim[1], dtype=np.float32)
+ plane = np.empty(shape=dim[0] * dim[1], dtype=np.single)
for i, t in enumerate(self.trace):
plane[i] = t[depth]
return plane.reshape(dim)
- return _DepthPlane(self.samples, self.sorting, depth_plane)
-
- def _readh(self, index, buf = None):
- errc = self._read_header(index, buf, self._tr0, self._bsz)
- err = _error(errc)
-
- if err != _error.OK:
- errno = ct.get_errno()
- raise OSError(errno, os.strerror(errno))
-
- return buf
-
- def _trace_buffer(self, buf = None):
- samples = self.samples
-
- if buf is None:
- buf = np.empty( shape = samples, dtype = np.float32 )
- elif not isinstance( buf, np.ndarray ):
- raise TypeError("Buffer must be None or numpy.ndarray" )
- elif buf.dtype != np.float32:
- buf = np.empty( shape = samples, dtype = np.float32 )
- elif buf.shape != samples:
- buf.reshape( samples )
-
- return buf
-
- def _readtr(self, traceno, buf = None):
- if traceno < 0:
- traceno += self.tracecount
-
- buf = self._trace_buffer(buf)
- bufp = _floatp(buf)
-
- errc = self._read_trace(traceno, bufp, self._tr0, self._bsz)
- err = _error(errc)
-
- if err != _error.OK:
- errno = ct.get_errno()
- raise OSError(errno, "Could not read trace %d: %s" % (traceno, os.strerror(errno)))
-
- errc = self._to_native(self._fmt, self.samples, bufp)
- err = _error(errc)
-
- if err == _error.OK:
- return buf
-
- raise BufferError("Error converting to native float.")
-
- def _writetr(self, traceno, buf):
- bufp = _floatp(buf)
- errc = self._from_native(self._fmt, self.samples, bufp)
- err = _error(errc)
-
- if err != _error.OK:
- raise BufferError("Error converting from native float.")
-
- errc = self._write_trace(traceno, bufp, self._tr0, self._bsz)
- errc_conv = self._to_native(self._fmt, self.samples, bufp)
-
- err, err_conv = _error(errc), _error(errc_conv)
-
- if err != _error.OK and err_conv != SEGY_OK:
- errno = ct.get_errno()
- raise OSError(errno, "Writing trace failed, and array integrity can not be guaranteed: %s" % os.strerror(errno))
-
- if err != _error.OK:
- errno = ct.get_errno()
- raise OSError(errno, "Error writing trace %d: %s" % (traceno, os.strerror(errno)))
-
- if err_conv != _error.OK:
- raise BufferError("Could convert to native float. The array integrety can not be guaranteed.")
+ return DepthPlane(self.samples, self.sorting, readfn)
@property
def text(self):
@@ -1182,38 +550,22 @@ class file(BaseCClass):
... print(line)
...
"""
- class text:
- def __init__(inner):
- inner.size = self._textsize()
+
+ class TextHeader:
def __getitem__(inner, index):
if index > self.ext_headers:
raise IndexError("Textual header %d not in file" % index)
- buf = ct.create_string_buffer(inner.size)
- err = self._texthdr( buf )
-
- if err == 0: return buf.value
-
- errno = ct.get_errno()
- raise OSError(errno, "Could not read text header: %s" % os.strerror(errno))
+ return _segyio.read_textheader(self.xfd, index)
def __setitem__(inner, index, val):
if index > self.ext_headers:
raise IndexError("Textual header %d not in file" % index)
- buf = ct.create_string_buffer(inner.size)
- for i, x in enumerate(val[:inner.size]):
- buf[i] = x
-
- err = self._write_texthdr(index, buf)
-
- if err == 0: return
-
- errno = ct.get_errno()
- raise OSError(errno, "Could not write text header: %s" % os.strerror(errno))
+ _segyio.write_textheader(self.xfd, index, val)
- return text()
+ return TextHeader()
@property
def bin(self):
@@ -1253,69 +605,23 @@ class file(BaseCClass):
Write multiple fields in a trace::
>>> f.bin = { 3213: 5, BinField.SweepFrequencyStart: 17 }
"""
- def get_bfield(buf, field, val):
- errc = self._get_bfield(buf, int(field), ct.byref(val))
- err = _error(errc)
- if err != _error.OK:
- raise IndexError("Invalid byte offset %d" % field)
-
- return int(val.value)
-
- def set_bfield(buf, field, val):
- errc = self._set_bfield(buf, int(field), val)
- err = _error(errc)
-
- if err != _error.OK:
- raise IndexError("Invalid byte offset %d" % field)
-
- buf = (ct.c_char * self._binheader_size())()
- gt = get_bfield
- st = set_bfield
- wr = self._write_binheader
- ty = BinField
-
- err = self._binheader(buf)
- if err == 0:
- return _header.proxy(buf, get_field = gt, set_field = st, write = wr, field_type = ty)
-
- errno = ct.get_errno()
- raise OSError(errno, "Could not read binary header: %s" % os.strerror(errno))
+ return Field.binary(self)
@bin.setter
def bin(self, value):
- try:
- buf = self.bin.buf
-
- except OSError:
- # the file was probably newly created and the binary header hasn't
- # been written yet. if this is the case we want to try and write
- # it. if the file was broken, permissions were wrong etc writing
- # will fail too
- buf = (ct.c_char * self._binheader_size())()
-
- if isinstance(value, dict):
- for k, v in value.items():
- self._set_bfield(buf, int(k), v)
- else:
- buf = value.buf
-
- err = self._write_binheader(buf)
- if err == 0: return
-
- errno = ct.get_errno()
- raise OSError(errno, "Could not write text header: %s" % os.strerror(errno))
+ self.bin.update(value)
@property
def format(self):
d = {
- 1: "4-byte IBM float",
- 2: "4-byte signed integer",
- 3: "2-byte signed integer",
- 4: "4-byte fixed point with gain",
- 5: "4-byte IEEE float",
- 8: "1-byte signed char"
- }
+ 1: "4-byte IBM float",
+ 2: "4-byte signed integer",
+ 3: "2-byte signed integer",
+ 4: "4-byte fixed point with gain",
+ 5: "4-byte IEEE float",
+ 8: "1-byte signed char"
+ }
class fmt:
def __int__(inner):
@@ -1325,7 +631,7 @@ class file(BaseCClass):
if not self._fmt in d:
return "Unknown format"
- return d[ self._fmt ]
+ return d[self._fmt]
return fmt()
@@ -1333,17 +639,17 @@ class file(BaseCClass):
"""Internal."""
pass
+
class spec:
def __init__(self):
- self.iline = 189
- self.ilines = None
- self.xline = 193
- self.xlines = None
- self.offsets = 1
- self.samples = None
- self.tracecount = None
- self.ext_headers = 0
- self.format = None
- self.sorting = None
- self.t0 = 1111.0
- self.depth_plane = None
+ self.iline = 189
+ self.ilines = None
+ self.xline = 193
+ self.xlines = None
+ self.offsets = 1
+ self.samples = None
+ self.tracecount = None
+ self.ext_headers = 0
+ self.format = None
+ self.sorting = None
+ self.t0 = 1111.0
diff --git a/python/segyio/segysampleformat.py b/python/segyio/segysampleformat.py
index 81abc8a..b8889e0 100644
--- a/python/segyio/segysampleformat.py
+++ b/python/segyio/segysampleformat.py
@@ -1,24 +1,12 @@
-from cwrap import BaseCEnum
-
-
-class SegySampleFormat(BaseCEnum):
- TYPE_NAME = "SegySampleFormat"
-
- IBM_FLOAT_4_BYTE = None
- SIGNED_INTEGER_4_BYTE = None
- SIGNED_SHORT_2_BYTE = None
- FIXED_POINT_WITH_GAIN_4_BYTE = None
- IEEE_FLOAT_4_BYTE = None
- NOT_IN_USE_1 = None
- NOT_IN_USE_2 = None
- SIGNED_CHAR_1_BYTE = None
-
-SegySampleFormat.addEnum("IBM_FLOAT_4_BYTE", 1)
-SegySampleFormat.addEnum("SIGNED_INTEGER_4_BYTE", 2)
-SegySampleFormat.addEnum("SIGNED_SHORT_2_BYTE", 3)
-SegySampleFormat.addEnum("FIXED_POINT_WITH_GAIN_4_BYTE", 4)
-SegySampleFormat.addEnum("IEEE_FLOAT_4_BYTE", 5)
-SegySampleFormat.addEnum("NOT_IN_USE_1", 6)
-SegySampleFormat.addEnum("NOT_IN_USE_2", 7)
-SegySampleFormat.addEnum("SIGNED_CHAR_1_BYTE", 8)
-
+from segyio import Enum
+
+
+class SegySampleFormat(Enum):
+ IBM_FLOAT_4_BYTE = 1
+ SIGNED_INTEGER_4_BYTE = 2
+ SIGNED_SHORT_2_BYTE = 3
+ FIXED_POINT_WITH_GAIN_4_BYTE = 4
+ IEEE_FLOAT_4_BYTE = 5
+ NOT_IN_USE_1 = 6
+ NOT_IN_USE_2 = 7
+ SIGNED_CHAR_1_BYTE = 8
diff --git a/python/segyio/tracefield.py b/python/segyio/tracefield.py
index 5223a1a..cb2b64d 100644
--- a/python/segyio/tracefield.py
+++ b/python/segyio/tracefield.py
@@ -1,189 +1,95 @@
-from cwrap import BaseCEnum
+from segyio import Enum
-class TraceField(BaseCEnum):
- TYPE_NAME = "TraceField"
-
- TRACE_SEQUENCE_LINE = None
- TRACE_SEQUENCE_FILE = None
- FieldRecord = None
- TraceNumber = None
- EnergySourcePoint = None
- CDP = None
- CDP_TRACE = None
- TraceIdentificationCode = None
- NSummedTraces = None
- NStackedTraces = None
- DataUse = None
- offset = None
- ReceiverGroupElevation = None
- SourceSurfaceElevation = None
- SourceDepth = None
- ReceiverDatumElevation = None
- SourceDatumElevation = None
- SourceWaterDepth = None
- GroupWaterDepth = None
- ElevationScalar = None
- SourceGroupScalar = None
- SourceX = None
- SourceY = None
- GroupX = None
- GroupY = None
- CoordinateUnits = None
- WeatheringVelocity = None
- SubWeatheringVelocity = None
- SourceUpholeTime = None
- GroupUpholeTime = None
- SourceStaticCorrection = None
- GroupStaticCorrection = None
- TotalStaticApplied = None
- LagTimeA = None
- LagTimeB = None
- DelayRecordingTime = None
- MuteTimeStart = None
- MuteTimeEND = None
- TRACE_SAMPLE_COUNT = None
- TRACE_SAMPLE_INTERVAL = None
- GainType = None
- InstrumentGainConstant = None
- InstrumentInitialGain = None
- Correlated = None
- SweepFrequencyStart = None
- SweepFrequencyEnd = None
- SweepLength = None
- SweepType = None
- SweepTraceTaperLengthStart = None
- SweepTraceTaperLengthEnd = None
- TaperType = None
- AliasFilterFrequency = None
- AliasFilterSlope = None
- NotchFilterFrequency = None
- NotchFilterSlope = None
- LowCutFrequency = None
- HighCutFrequency = None
- LowCutSlope = None
- HighCutSlope = None
- YearDataRecorded = None
- DayOfYear = None
- HourOfDay = None
- MinuteOfHour = None
- SecondOfMinute = None
- TimeBaseCode = None
- TraceWeightingFactor = None
- GeophoneGroupNumberRoll1 = None
- GeophoneGroupNumberFirstTraceOrigField = None
- GeophoneGroupNumberLastTraceOrigField = None
- GapSize = None
- OverTravel = None
- CDP_X = None
- CDP_Y = None
- INLINE_3D = None
- CROSSLINE_3D = None
- ShotPoint = None
- ShotPointScalar = None
- TraceValueMeasurementUnit = None
- TransductionConstantMantissa = None
- TransductionConstantPower = None
- TransductionUnit = None
- TraceIdentifier = None
- ScalarTraceHeader = None
- SourceType = None
- SourceEnergyDirectionMantissa = None
- SourceEnergyDirectionExponent = None
- SourceMeasurementMantissa = None
- SourceMeasurementExponent = None
- SourceMeasurementUnit = None
- UnassignedInt1 = None
- UnassignedInt2 = None
-
-TraceField.addEnum("TRACE_SEQUENCE_LINE", 1)
-TraceField.addEnum("TRACE_SEQUENCE_FILE", 5)
-TraceField.addEnum("FieldRecord", 9)
-TraceField.addEnum("TraceNumber", 13)
-TraceField.addEnum("EnergySourcePoint", 17)
-TraceField.addEnum("CDP", 21)
-TraceField.addEnum("CDP_TRACE", 25)
-TraceField.addEnum("TraceIdentificationCode", 29)
-TraceField.addEnum("NSummedTraces", 31)
-TraceField.addEnum("NStackedTraces", 33)
-TraceField.addEnum("DataUse", 35)
-TraceField.addEnum("offset", 37)
-TraceField.addEnum("ReceiverGroupElevation", 41)
-TraceField.addEnum("SourceSurfaceElevation", 45)
-TraceField.addEnum("SourceDepth", 49)
-TraceField.addEnum("ReceiverDatumElevation", 53)
-TraceField.addEnum("SourceDatumElevation", 57)
-TraceField.addEnum("SourceWaterDepth", 61)
-TraceField.addEnum("GroupWaterDepth", 65)
-TraceField.addEnum("ElevationScalar", 69)
-TraceField.addEnum("SourceGroupScalar", 71)
-TraceField.addEnum("SourceX", 73)
-TraceField.addEnum("SourceY", 77)
-TraceField.addEnum("GroupX", 81)
-TraceField.addEnum("GroupY", 85)
-TraceField.addEnum("CoordinateUnits", 89)
-TraceField.addEnum("WeatheringVelocity", 91)
-TraceField.addEnum("SubWeatheringVelocity", 93)
-TraceField.addEnum("SourceUpholeTime", 95)
-TraceField.addEnum("GroupUpholeTime", 97)
-TraceField.addEnum("SourceStaticCorrection", 99)
-TraceField.addEnum("GroupStaticCorrection", 101)
-TraceField.addEnum("TotalStaticApplied", 103)
-TraceField.addEnum("LagTimeA", 105)
-TraceField.addEnum("LagTimeB", 107)
-TraceField.addEnum("DelayRecordingTime", 109)
-TraceField.addEnum("MuteTimeStart", 111)
-TraceField.addEnum("MuteTimeEND", 113)
-TraceField.addEnum("TRACE_SAMPLE_COUNT", 115)
-TraceField.addEnum("TRACE_SAMPLE_INTERVAL", 117)
-TraceField.addEnum("GainType", 119)
-TraceField.addEnum("InstrumentGainConstant", 121)
-TraceField.addEnum("InstrumentInitialGain", 123)
-TraceField.addEnum("Correlated", 125)
-TraceField.addEnum("SweepFrequencyStart", 127)
-TraceField.addEnum("SweepFrequencyEnd", 129)
-TraceField.addEnum("SweepLength", 131)
-TraceField.addEnum("SweepType", 133)
-TraceField.addEnum("SweepTraceTaperLengthStart", 135)
-TraceField.addEnum("SweepTraceTaperLengthEnd", 137)
-TraceField.addEnum("TaperType", 139)
-TraceField.addEnum("AliasFilterFrequency", 141)
-TraceField.addEnum("AliasFilterSlope", 143)
-TraceField.addEnum("NotchFilterFrequency", 145)
-TraceField.addEnum("NotchFilterSlope", 147)
-TraceField.addEnum("LowCutFrequency", 149)
-TraceField.addEnum("HighCutFrequency", 151)
-TraceField.addEnum("LowCutSlope", 153)
-TraceField.addEnum("HighCutSlope", 155)
-TraceField.addEnum("YearDataRecorded", 157)
-TraceField.addEnum("DayOfYear", 159)
-TraceField.addEnum("HourOfDay", 161)
-TraceField.addEnum("MinuteOfHour", 163)
-TraceField.addEnum("SecondOfMinute", 165)
-TraceField.addEnum("TimeBaseCode", 167)
-TraceField.addEnum("TraceWeightingFactor", 169)
-TraceField.addEnum("GeophoneGroupNumberRoll1", 171)
-TraceField.addEnum("GeophoneGroupNumberFirstTraceOrigField", 173)
-TraceField.addEnum("GeophoneGroupNumberLastTraceOrigField", 175)
-TraceField.addEnum("GapSize", 177)
-TraceField.addEnum("OverTravel", 179)
-TraceField.addEnum("CDP_X", 181)
-TraceField.addEnum("CDP_Y", 185)
-TraceField.addEnum("INLINE_3D", 189)
-TraceField.addEnum("CROSSLINE_3D", 193)
-TraceField.addEnum("ShotPoint", 197)
-TraceField.addEnum("ShotPointScalar", 201)
-TraceField.addEnum("TraceValueMeasurementUnit", 203)
-TraceField.addEnum("TransductionConstantMantissa", 205)
-TraceField.addEnum("TransductionConstantPower", 209)
-TraceField.addEnum("TransductionUnit", 211)
-TraceField.addEnum("TraceIdentifier", 213)
-TraceField.addEnum("ScalarTraceHeader", 215)
-TraceField.addEnum("SourceType", 217)
-TraceField.addEnum("SourceEnergyDirectionMantissa", 219)
-TraceField.addEnum("SourceEnergyDirectionExponent", 223)
-TraceField.addEnum("SourceMeasurementMantissa", 225)
-TraceField.addEnum("SourceMeasurementExponent", 229)
-TraceField.addEnum("SourceMeasurementUnit", 231)
-TraceField.addEnum("UnassignedInt1", 233)
-TraceField.addEnum("UnassignedInt2", 237)
\ No newline at end of file
+class TraceField(Enum):
+ TRACE_SEQUENCE_LINE = 1
+ TRACE_SEQUENCE_FILE = 5
+ FieldRecord = 9
+ TraceNumber = 13
+ EnergySourcePoint = 17
+ CDP = 21
+ CDP_TRACE = 25
+ TraceIdentificationCode = 29
+ NSummedTraces = 31
+ NStackedTraces = 33
+ DataUse = 35
+ offset = 37
+ ReceiverGroupElevation = 41
+ SourceSurfaceElevation = 45
+ SourceDepth = 49
+ ReceiverDatumElevation = 53
+ SourceDatumElevation = 57
+ SourceWaterDepth = 61
+ GroupWaterDepth = 65
+ ElevationScalar = 69
+ SourceGroupScalar = 71
+ SourceX = 73
+ SourceY = 77
+ GroupX = 81
+ GroupY = 85
+ CoordinateUnits = 89
+ WeatheringVelocity = 91
+ SubWeatheringVelocity = 93
+ SourceUpholeTime = 95
+ GroupUpholeTime = 97
+ SourceStaticCorrection = 99
+ GroupStaticCorrection = 101
+ TotalStaticApplied = 103
+ LagTimeA = 105
+ LagTimeB = 107
+ DelayRecordingTime = 109
+ MuteTimeStart = 111
+ MuteTimeEND = 113
+ TRACE_SAMPLE_COUNT = 115
+ TRACE_SAMPLE_INTERVAL = 117
+ GainType = 119
+ InstrumentGainConstant = 121
+ InstrumentInitialGain = 123
+ Correlated = 125
+ SweepFrequencyStart = 127
+ SweepFrequencyEnd = 129
+ SweepLength = 131
+ SweepType = 133
+ SweepTraceTaperLengthStart = 135
+ SweepTraceTaperLengthEnd = 137
+ TaperType = 139
+ AliasFilterFrequency = 141
+ AliasFilterSlope = 143
+ NotchFilterFrequency = 145
+ NotchFilterSlope = 147
+ LowCutFrequency = 149
+ HighCutFrequency = 151
+ LowCutSlope = 153
+ HighCutSlope = 155
+ YearDataRecorded = 157
+ DayOfYear = 159
+ HourOfDay = 161
+ MinuteOfHour = 163
+ SecondOfMinute = 165
+ TimeBaseCode = 167
+ TraceWeightingFactor = 169
+ GeophoneGroupNumberRoll1 = 171
+ GeophoneGroupNumberFirstTraceOrigField = 173
+ GeophoneGroupNumberLastTraceOrigField = 175
+ GapSize = 177
+ OverTravel = 179
+ CDP_X = 181
+ CDP_Y = 185
+ INLINE_3D = 189
+ CROSSLINE_3D = 193
+ ShotPoint = 197
+ ShotPointScalar = 201
+ TraceValueMeasurementUnit = 203
+ TransductionConstantMantissa = 205
+ TransductionConstantPower = 209
+ TransductionUnit = 211
+ TraceIdentifier = 213
+ ScalarTraceHeader = 215
+ SourceType = 217
+ SourceEnergyDirectionMantissa = 219
+ SourceEnergyDirectionExponent = 223
+ SourceMeasurementMantissa = 225
+ SourceMeasurementExponent = 229
+ SourceMeasurementUnit = 231
+ UnassignedInt1 = 233
+ UnassignedInt2 = 237
\ No newline at end of file
diff --git a/python/segyio/tracesortingformat.py b/python/segyio/tracesortingformat.py
index 3e6846b..7df07cc 100644
--- a/python/segyio/tracesortingformat.py
+++ b/python/segyio/tracesortingformat.py
@@ -1,13 +1,7 @@
-from cwrap import BaseCEnum
+from segyio import Enum
-class TraceSortingFormat(BaseCEnum):
- TYPE_NAME = "TraceSortingFormat"
-
- UNKNOWN_SORTING = None
- CROSSLINE_SORTING = None
- INLINE_SORTING = None
-
-TraceSortingFormat.addEnum("UNKNOWN_SORTING", -1)
-TraceSortingFormat.addEnum("CROSSLINE_SORTING", 0)
-TraceSortingFormat.addEnum("INLINE_SORTING", 1)
+class TraceSortingFormat(Enum):
+ UNKNOWN_SORTING = 0
+ CROSSLINE_SORTING = 1
+ INLINE_SORTING = 2
diff --git a/src/segyio/segy.c b/src/segyio/segy.c
index 5198b7f..dc13c74 100644
--- a/src/segyio/segy.c
+++ b/src/segyio/segy.c
@@ -50,8 +50,8 @@ void ebcdic2ascii( const char* ebcdic, char* ascii ) {
}
void ascii2ebcdic( const char* ascii, char* ebcdic ) {
- while( *ascii != '\0' )
- *ebcdic++ = a2e[ (unsigned char)*ascii++ ];
+ while (*ascii != '\0')
+ *ebcdic++ = a2e[(unsigned char) *ascii++];
*ebcdic = '\0';
}
@@ -415,6 +415,10 @@ int segy_set_bfield( char* binheader, int field, int val ) {
}
int segy_binheader( FILE* fp, char* buf ) {
+ if(fp == NULL) {
+ return SEGY_INVALID_ARGS;
+ }
+
const int err = fseek( fp, SEGY_TEXT_HEADER_SIZE, SEEK_SET );
if( err != 0 ) return SEGY_FSEEK_ERROR;
@@ -426,12 +430,16 @@ int segy_binheader( FILE* fp, char* buf ) {
}
int segy_write_binheader( FILE* fp, const char* buf ) {
+ if(fp == NULL) {
+ return SEGY_INVALID_ARGS;
+ }
+
const int err = fseek( fp, SEGY_TEXT_HEADER_SIZE, SEEK_SET );
if( err != 0 ) return SEGY_FSEEK_ERROR;
const size_t writec = fwrite( buf, 1, SEGY_BINARY_HEADER_SIZE, fp);
if( writec != SEGY_BINARY_HEADER_SIZE )
- return SEGY_FREAD_ERROR;
+ return SEGY_FWRITE_ERROR;
return SEGY_OK;
}
@@ -445,7 +453,7 @@ int segy_format( const char* buf ) {
unsigned int segy_samples( const char* buf ) {
int samples;
segy_get_bfield( buf, BIN_Samples, &samples );
- return samples;
+ return (unsigned int) samples;
}
unsigned int segy_trace_bsize( unsigned int samples ) {
@@ -636,6 +644,12 @@ int segy_sorting( FILE* fp,
err = segy_traceheader( fp, 0, traceheader, trace0, trace_bsize );
if( err != SEGY_OK ) return err;
+ if( il < 0 || il >= SEGY_TRACE_HEADER_SIZE )
+ return SEGY_INVALID_FIELD;
+
+ if( xl < 0 || xl >= SEGY_TRACE_HEADER_SIZE )
+ return SEGY_INVALID_FIELD;
+
/* make sure field is valid, so we don't have to check errors later */
if( field_size[ il ] == 0 || field_size[ xl ] == 0 )
return SEGY_INVALID_FIELD;
@@ -658,6 +672,7 @@ int segy_sorting( FILE* fp,
++traceno;
} while( off0 != off1 );
+ // todo: Should expect at least xline, inline or offset to change by one?
if ( il0 == il1 ) *sorting = INLINE_SORTING;
else if( xl0 == xl1 ) *sorting = CROSSLINE_SORTING;
else return SEGY_INVALID_SORTING;
@@ -1089,7 +1104,10 @@ int segy_crossline_stride( int sorting,
}
}
-int segy_textheader( FILE* fp, char* buf ) {
+int segy_read_textheader(FILE *fp, char *buf) { //todo: Missing position/index support
+ if(fp == NULL) {
+ return SEGY_FSEEK_ERROR;
+ }
rewind( fp );
const size_t read = fread( buf, 1, SEGY_TEXT_HEADER_SIZE, fp );
diff --git a/src/segyio/segy.h b/src/segyio/segy.h
index 17b3388..9de955c 100644
--- a/src/segyio/segy.h
+++ b/src/segyio/segy.h
@@ -53,7 +53,7 @@ int segy_traces( FILE*, size_t*, long trace0, unsigned int trace_bsize );
int segy_sample_indexes(FILE* fp, double* buf, double t0, size_t count);
/* text header operations */
-int segy_textheader( FILE*, char* buf );
+int segy_read_textheader(FILE *, char *buf);
unsigned int segy_textheader_size();
int segy_write_textheader( FILE*, unsigned int pos, const char* buf );
@@ -320,7 +320,7 @@ typedef enum {
SourceMeasurementUnit = 231,
UnassignedInt1 = 233,
UnassignedInt2 = 237
-
+
} SEGY_FIELD;
typedef enum {
@@ -387,6 +387,7 @@ typedef enum {
SEGY_MISSING_LINE_INDEX,
SEGY_INVALID_OFFSETS,
SEGY_TRACE_SIZE_MISMATCH,
+ SEGY_INVALID_ARGS
} SEGY_ERROR;
diff --git a/src/spec/segyspec.c b/src/spec/segyspec.c
index a22b373..b326d0e 100644
--- a/src/spec/segyspec.c
+++ b/src/spec/segyspec.c
@@ -1,4 +1,4 @@
-#include <malloc.h>
+#include <stdlib.h>
#include "segyio/segy.h"
#include "segyspec.h"
diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt
index 5c2fb68..8d91eb5 100644
--- a/tests/CMakeLists.txt
+++ b/tests/CMakeLists.txt
@@ -13,4 +13,8 @@ if(BUILD_MEX)
endif()
endif()
-add_python_test(python.segy test_segy.py)
+if(BUILD_PYTHON)
+ add_python_test(python.segy test_segy.py)
+ add_python_test(python.h.segy test_segyio_c.py)
+ add_python_test(enum.segy test_enum.py)
+endif()
\ No newline at end of file
diff --git a/tests/test_enum.py b/tests/test_enum.py
new file mode 100644
index 0000000..89939fc
--- /dev/null
+++ b/tests/test_enum.py
@@ -0,0 +1,24 @@
+from unittest.case import TestCase
+
+from segyio import Enum
+
+
+class TestEnum(Enum):
+ ZERO = 0
+ ONE = 1
+ TWO = 2
+
+
+class EnumTest(TestCase):
+
+ def test_enum(self):
+ self.assertEqual(TestEnum.ONE, 1)
+
+ one = TestEnum(TestEnum.ONE)
+ self.assertIsInstance(one, TestEnum)
+ self.assertEqual(str(one), "ONE")
+ self.assertEqual(int(one), 1)
+ self.assertEqual(one, 1)
+
+ self.assertListEqual(TestEnum.enums(), TestEnum.enums())
+ self.assertListEqual(TestEnum.enums(), [0, 1, 2])
diff --git a/tests/test_segy.c b/tests/test_segy.c
index 23d1f1f..77ed6c8 100644
--- a/tests/test_segy.c
+++ b/tests/test_segy.c
@@ -442,7 +442,7 @@ void test_text_header() {
FILE* fp = fopen( file, "r" );
char ascii[ SEGY_TEXT_HEADER_SIZE + 1 ] = { 0 };
- int err = segy_textheader( fp, ascii );
+ int err = segy_read_textheader(fp, ascii);
assertTrue( err == 0, "Could not read text header" );
assertTrue( strcmp(expected_textheader, ascii) == 0, "Text headers did not match" );
@@ -519,7 +519,7 @@ void test_file_error_codes() {
err = segy_get_field( header, DayOfYear, &field );
assertTrue( err == SEGY_OK, "Reading failed at valid byte offset." );
- err = segy_textheader( fp, NULL );
+ err = segy_read_textheader(fp, NULL);
assertTrue( err == SEGY_FSEEK_ERROR, "Could seek in invalid file." );
size_t traces;
diff --git a/tests/test_segy.py b/tests/test_segy.py
index 4693240..80cbd83 100644
--- a/tests/test_segy.py
+++ b/tests/test_segy.py
@@ -1,10 +1,102 @@
+from types import GeneratorType
+
+import itertools
import numpy as np
from unittest import TestCase
import segyio
from segyio import TraceField, BinField
import shutil
import filecmp
-import itertools
+
+from segyio._depth_plane import DepthPlane
+from segyio._field import Field
+from segyio._line import Line
+from segyio._header import Header
+from segyio._trace import Trace
+
+
+def mklines(fname):
+ spec = segyio.spec()
+ spec.format = 5
+ spec.sorting = 2
+ spec.samples = 10
+ spec.ilines = range(1, 11)
+ spec.xlines = range(1, 6)
+
+ # create a file with 10 inlines, with values on the form l.0tv where
+ # l = line no
+ # t = trace number (within line)
+ # v = trace value
+ # i.e. 2.043 is the value at inline 2's fourth trace's third value
+ with segyio.create(fname, spec) as dst:
+ ln = np.arange(start = 0,
+ stop = 0.001 * (5 * 10),
+ step = 0.001,
+ dtype = np.single).reshape(5, 10)
+
+ for il in spec.ilines:
+ ln += 1
+
+ dst.header.iline[il] = { TraceField.INLINE_3D: il }
+ dst.iline[il] = ln
+
+ for xl in spec.xlines:
+ dst.header.xline[xl] = { TraceField.CROSSLINE_3D: xl }
+
+
+def make_file(filename, samples, first_iline, last_iline, first_xline, last_xline):
+
+ spec = segyio.spec()
+ # to create a file from nothing, we need to tell segyio about the structure of
+ # the file, i.e. its inline numbers, crossline numbers, etc. You can also add
+ # more structural information, but offsets etc. have sensible defaults. This is
+ # the absolute minimal specification for a N-by-M volume
+ spec.sorting = 2
+ spec.format = 1
+ spec.samples = samples
+ spec.ilines = range(*map(int, [first_iline, last_iline]))
+ spec.xlines = range(*map(int, [first_xline, last_xline]))
+
+ with segyio.create(filename, spec) as f:
+ start = 0.0
+ step = 0.00001
+ # fill a trace with predictable values: left-of-comma is the inline
+ # number. Immediately right of comma is the crossline number
+ # the rightmost digits is the index of the sample in that trace meaning
+ # looking up an inline's i's jth crosslines' k should be roughly equal
+ # to i.j0k
+ trace = np.arange(start = start,
+ stop = start + step * spec.samples,
+ step = step,
+ dtype = np.float32)
+
+ # one inline is N traces concatenated. We fill in the xline number
+ line = np.concatenate([trace + (xl / 100.0) for xl in spec.xlines])
+
+ # write the line itself to the file
+ # write the inline number in all this line's headers
+ for ilno in spec.ilines:
+ f.iline[ilno] = (line + ilno)
+ f.header.iline[ilno] = { segyio.TraceField.INLINE_3D: ilno,
+ segyio.TraceField.offset: 1
+ }
+
+ # then do the same for xlines
+ for xlno in spec.xlines:
+ f.header.xline[xlno] = { segyio.TraceField.CROSSLINE_3D: xlno }
+
+
+def il_sample(s):
+ return int(s)
+
+
+def xl_sample(s):
+ return int(round((s-int(s))*100))
+
+
+def depth_sample(s):
+ return int(round((s - il_sample(s) - xl_sample(s)/100.0)*10e2,2)*100)
+
class TestSegy(TestCase):
@@ -75,89 +167,35 @@ class TestSegy(TestCase):
# last sample
self.assertAlmostEqual(5.22049, data[last_line, f.samples-1], places = 6)
- @staticmethod
- def make_file(filename, samples, first_iline, last_iline, first_xline, last_xline):
-
- spec = segyio.spec()
- # to create a file from nothing, we need to tell segyio about the structure of
- # the file, i.e. its inline numbers, crossline numbers, etc. You can also add
- # more structural information, but offsets etc. have sensible defaults. This is
- # the absolute minimal specification for a N-by-M volume
- spec.sorting = 2
- spec.format = 1
- spec.samples = samples
- spec.ilines = range(*map(int, [first_iline, last_iline]))
- spec.xlines = range(*map(int, [first_xline, last_xline]))
-
- with segyio.create(filename, spec) as f:
- start = 0.0
- step = 0.00001
- # fill a trace with predictable values: left-of-comma is the inline
- # number. Immediately right of comma is the crossline number
- # the rightmost digits is the index of the sample in that trace meaning
- # looking up an inline's i's jth crosslines' k should be roughly equal
- # to i.j0k
- trace = np.arange(start = start,
- stop = start + step * spec.samples,
- step = step,
- dtype = np.float32)
-
- # one inline is N traces concatenated. We fill in the xline number
- line = np.concatenate([trace + (xl / 100.0) for xl in spec.xlines])
-
- # write the line itself to the file
- # write the inline number in all this line's headers
- for ilno in spec.ilines:
- f.iline[ilno] = (line + ilno)
- f.header.iline[ilno] = { segyio.TraceField.INLINE_3D: ilno,
- segyio.TraceField.offset: 1
- }
-
- # then do the same for xlines
- for xlno in spec.xlines:
- f.header.xline[xlno] = { segyio.TraceField.CROSSLINE_3D: xlno }
-
- @staticmethod
- def il_sample(s):
- return int(s)
-
- @staticmethod
- def xl_sample(s):
- return int(round((s-int(s))*100))
-
- @classmethod
- def depth_sample(cls, s):
- return int(round((s - cls.il_sample(s) - cls.xl_sample(s)/100.0)*10e2,2)*100)
-
def test_make_file(self):
filename = "test.segy"
samples = 10
- self.make_file(filename, samples, 0, 2, 10, 13)
+ make_file(filename, samples, 0, 2, 10, 13)
with segyio.open(filename, "r") as f:
for xlno, xl in itertools.izip(f.xlines, f.xline):
for ilno, trace in itertools.izip(f.ilines, xl):
for sample_index, sample in itertools.izip(range(samples), trace):
- self.assertEqual(self.il_sample(sample), ilno,
- ("sample: {0}, ilno {1}".format(self.il_sample(sample), ilno)))
- self.assertEqual(self.xl_sample(sample), xlno,
+ self.assertEqual(il_sample(sample), ilno,
+ ("sample: {0}, ilno {1}".format(il_sample(sample), ilno)))
+ self.assertEqual(xl_sample(sample), xlno,
("sample: {0}, xlno {1}, sample {2}".format(
- self.xl_sample(sample), xlno, sample)))
- self.assertEqual(self.depth_sample(sample), sample_index,
+ xl_sample(sample), xlno, sample)))
+ self.assertEqual(depth_sample(sample), sample_index,
("sample: {0}, sample_index {1}, real_sample {2}".format(
- self.depth_sample(sample), sample_index, sample)))
+ depth_sample(sample), sample_index, sample)))
def test_read_all_depth_planes(self):
filename = "test.segy"
samples = 10
- self.make_file(filename, samples, 0, 2, 10, 13)
+ make_file(filename, samples, 0, 2, 10, 13)
with segyio.open(filename, "r") as f:
for i, plane in enumerate(f.depth_plane):
for ilno, xlno in itertools.product(range(len(f.ilines)), range(len(f.xlines))):
- self.assertEqual(self.depth_sample(plane[xlno, ilno]), i,
+ self.assertEqual(depth_sample(plane[xlno, ilno]), i,
"plane[{0},{1}] == {2}, should be 0".format(
- ilno, xlno, self.depth_sample(plane[xlno, ilno])))
+ ilno, xlno, depth_sample(plane[xlno, ilno])))
def test_iline_slicing(self):
with segyio.open(self.filename, "r") as f:
@@ -190,11 +228,10 @@ class TestSegy(TestCase):
xl = f.xlines
with segyio.open(self.filename, "r", segyio.TraceField.CROSSLINE_3D, segyio.TraceField.INLINE_3D) as f:
- self.assertEqual(il, f.xlines)
- self.assertEqual(xl, f.ilines)
+ self.assertListEqual(list(il), list(f.xlines))
+ self.assertListEqual(list(xl), list(f.ilines))
pass
-
def test_file_info(self):
with segyio.open(self.filename, "r") as f:
self.assertEqual(2, f.sorting)
@@ -203,23 +240,12 @@ class TestSegy(TestCase):
xlines = list(xrange(20, 25))
ilines = list(xrange(1, 6))
- self.assertEqual(xlines, f.xlines)
- self.assertEqual(ilines, f.ilines)
+ self.assertEqual(xlines, list(f.xlines))
+ self.assertEqual(ilines, list(f.ilines))
self.assertEqual(25, f.tracecount)
self.assertEqual(len(f.trace), f.tracecount)
self.assertEqual(50, f.samples)
- def native_conversion(self):
- arr1 = np.random.rand(10, dtype=np.float32)
- arr2 = np.copy(arr1)
- self.assertTrue(np.array_equal(arr1, arr2))
-
- # round-trip should not modify data
- segyio.file._from_native(1, f.samples, segyio.asfloatp(arr1))
- self.assertFalse(np.array_equal(arr1, arr2))
- segyio.file._to_native(1, f.samples, segyio.asfloatp(arr1))
- self.assertTrue(np.array_equal(arr1, arr2))
-
def test_traces_slicing(self):
with segyio.open(self.filename, "r") as f:
@@ -312,9 +338,10 @@ class TestSegy(TestCase):
# copy a header
f.header[2] = f.header[1]
f.flush()
+
# don't use this interface in production code, it's only for testing
# i.e. don't access buf of treat it as a list
- self.assertEqual(list(f.header[2].buf), list(f.header[1].buf))
+ #self.assertEqual(list(f.header[2].buf), list(f.header[1].buf))
def test_write_binary(self):
fname = self.filename.replace( ".sgy", "-binary.sgy")
@@ -354,11 +381,11 @@ class TestSegy(TestCase):
def test_fopen_error(self):
# non-existent file
- with self.assertRaises(OSError):
+ with self.assertRaises(IOError):
segyio.open("no_dir/no_file", "r")
# non-existant mode
- with self.assertRaises(OSError):
+ with self.assertRaises(IOError):
segyio.open(self.filename, "foo")
def test_wrong_lineno(self):
@@ -371,12 +398,12 @@ class TestSegy(TestCase):
f.xline[2]
def test_open_wrong_inline(self):
- with self.assertRaises(ValueError):
+ with self.assertRaises(IndexError):
with segyio.open(self.filename, "r", 2) as f:
pass
def test_open_wrong_crossline(self):
- with self.assertRaises(ValueError):
+ with self.assertRaises(IndexError):
with segyio.open(self.filename, "r", 189, 2) as f:
pass
@@ -443,7 +470,7 @@ class TestSegy(TestCase):
with segyio.open(dstfile, "r") as dst:
self.assertEqual(20, dst.samples)
- self.assertEqual([x + 100 for x in src.ilines], dst.ilines)
+ self.assertEqual([x + 100 for x in src.ilines], list(dst.ilines))
def test_create_from_naught(self):
fname = "test-data/mk.sgy"
@@ -455,7 +482,7 @@ class TestSegy(TestCase):
spec.xlines = range(1, 6)
with segyio.create(fname, spec) as dst:
- tr = np.arange( start = 1.000, stop = 1.151, step = 0.001, dtype = np.float32 )
+ tr = np.arange( start = 1.000, stop = 1.151, step = 0.001, dtype = np.single)
for i in xrange( len( dst.trace ) ):
dst.trace[i] = tr
@@ -478,39 +505,10 @@ class TestSegy(TestCase):
self.assertEqual(f.header[0][TraceField.offset], f.header[1][TraceField.offset])
self.assertEqual(1, f.header[1][TraceField.offset])
- @staticmethod
- def mklines(fname):
- spec = segyio.spec()
- spec.format = 5
- spec.sorting = 2
- spec.samples = 10
- spec.ilines = range(1, 11)
- spec.xlines = range(1, 6)
-
-# create a file with 10 inlines, with values on the form l.0tv where
-# l = line no
-# t = trace number (within line)
-# v = trace value
-# i.e. 2.043 is the value at inline 2's fourth trace's third value
- with segyio.create(fname, spec) as dst:
- ln = np.arange(start = 0,
- stop = 0.001 * (5 * 10),
- step = 0.001,
- dtype = np.float32).reshape(5, 10)
-
- for il in spec.ilines:
- ln += 1
-
- dst.header.iline[il] = { TraceField.INLINE_3D: il }
- dst.iline[il] = ln
-
- for xl in spec.xlines:
- dst.header.xline[xl] = { TraceField.CROSSLINE_3D: xl }
-
def test_create_write_lines(self):
fname = "test-data/mklines.sgy"
- self.mklines(fname)
+ mklines(fname)
with segyio.open(fname, "r") as f:
self.assertAlmostEqual(1, f.iline[1][0][0], places = 4)
@@ -522,7 +520,7 @@ class TestSegy(TestCase):
fname = "test-data/lines.sgy"
dstfile = fname.replace(".sgy", "-halved.sgy")
- self.mklines(fname)
+ mklines(fname)
with segyio.open(fname, "r") as src:
spec = segyio.spec()
@@ -545,9 +543,51 @@ class TestSegy(TestCase):
dst.iline = src.iline[::2]
with segyio.open(dstfile, "r") as f:
- self.assertEqual(f.ilines, spec.ilines)
- self.assertEqual(f.xlines, [1,3,6])
+ self.assertListEqual(list(f.ilines), list(spec.ilines))
+ self.assertListEqual(list(f.xlines), [1, 3, 6])
self.assertAlmostEqual(1, f.iline[1][0][0], places = 4)
self.assertAlmostEqual(3.004, f.iline[3][0][4], places = 4)
self.assertAlmostEqual(3.014, f.iline[3][1][4], places = 4)
self.assertAlmostEqual(7.023, f.iline[7][2][3], places = 4)
+
+ def test_segyio_types(self):
+ with segyio.open(self.filename, "r") as f:
+ self.assertIsInstance(f.sorting, int)
+ self.assertIsInstance(f.ext_headers, int)
+ self.assertIsInstance(f.tracecount, int)
+ self.assertIsInstance(f.samples, int)
+
+ self.assertIsInstance(f.depth_plane, DepthPlane)
+ self.assertIsInstance(f.depth_plane[1], np.ndarray)
+
+ self.assertIsInstance(f.ilines, np.ndarray)
+ self.assertIsInstance(f.iline, Line)
+ self.assertIsInstance(f.iline[1], np.ndarray)
+ self.assertIsInstance(f.iline[1:3], GeneratorType)
+ self.assertIsInstance(f.iline[1][0], np.ndarray)
+ self.assertIsInstance(f.iline[1][0:2], np.ndarray)
+ self.assertIsInstance(float(f.iline[1][0][0]), float)
+ self.assertIsInstance(f.iline[1][0][0:3], np.ndarray)
+
+ self.assertIsInstance(f.xlines, np.ndarray)
+ self.assertIsInstance(f.xline, Line)
+ self.assertIsInstance(f.xline[21], np.ndarray)
+ self.assertIsInstance(f.xline[21:23], GeneratorType)
+ self.assertIsInstance(f.xline[21][0], np.ndarray)
+ self.assertIsInstance(f.xline[21][0:2], np.ndarray)
+ self.assertIsInstance(float(f.xline[21][0][0]), float)
+ self.assertIsInstance(f.xline[21][0][0:3], np.ndarray)
+
+ self.assertIsInstance(f.header, Header)
+ self.assertIsInstance(f.header.iline, Line)
+ self.assertIsInstance(f.header.iline[1], GeneratorType)
+ self.assertIsInstance(next(f.header.iline[1]), Field)
+ self.assertIsInstance(f.header.xline, Line)
+ self.assertIsInstance(f.header.xline[21], GeneratorType)
+ self.assertIsInstance(next(f.header.xline[21]), Field)
+
+ self.assertIsInstance(f.trace, Trace)
+ self.assertIsInstance(f.trace[0], np.ndarray)
+
+ self.assertIsInstance(f.bin, Field)
+ self.assertIsInstance(f.text, object) # inner TextHeader instance
diff --git a/tests/test_segyio_c.py b/tests/test_segyio_c.py
new file mode 100644
index 0000000..6035c58
--- /dev/null
+++ b/tests/test_segyio_c.py
@@ -0,0 +1,425 @@
+import shutil
+from unittest import TestCase
+
+import numpy
+import segyio._segyio as _segyio
+
+ACTUAL_TEXT_HEADER = "C 1 DATE: 2016-09-19 " \
+ "C 2 AN INCREASE IN AMPLITUDE EQUALS AN INCREASE IN ACOUSTIC IMPEDANCE " \
+ "C 3 Written by libsegyio (python) " \
+ "C 4 " \
+ "C 5 " \
+ "C 6 " \
+ "C 7 " \
+ "C 8 " \
+ "C 9 " \
+ "C10 " \
+ "C11 TRACE HEADER POSITION: " \
+ "C12 INLINE BYTES 189-193 | OFFSET BYTES 037-041 " \
+ "C13 CROSSLINE BYTES 193-197 | " \
+ "C14 " \
+ "C15 END EBCDIC HEADER " \
+ "C16 " \
+ "C17 " \
+ "C18 " \
+ "C19 " \
+ "C20 " \
+ "C21 " \
+ "C22 " \
+ "C23 " \
+ "C24 " \
+ "C25 " \
+ "C26 " \
+ "C27 " \
+ "C28 " \
+ "C29 " \
+ "C30 " \
+ "C31 " \
+ "C32 " \
+ "C33 " \
+ "C34 " \
+ "C35 " \
+ "C36 " \
+ "C37 " \
+ "C38 " \
+ "C39 " \
+ "C40 \x80"
+
+
+class _segyioTests(TestCase):
+ def setUp(self):
+ self.filename = "test-data/small.sgy"
+
+ def test_binary_header_size(self):
+ self.assertEqual(400, _segyio.binheader_size())
+
+ def test_textheader_size(self):
+ self.assertEqual(3201, _segyio.textheader_size())
+
+ def test_open_non_existing_file(self):
+ with self.assertRaises(IOError):
+ f = _segyio.open("non-existing", "r")
+
+ def test_close_non_existing_file(self):
+ with self.assertRaises(TypeError):
+ _segyio.close(None)
+
+ def test_open_and_close_file(self):
+ f = _segyio.open(self.filename, "r")
+ _segyio.close(f)
+
+ def test_open_flush_and_close_file(self):
+ _segyio.flush(None)
+ f = _segyio.open(self.filename, "r")
+ _segyio.flush(f)
+ _segyio.close(f)
+
+ # This does not fail for some reason.
+ # with self.assertRaises(IOError):
+ # _segyio.flush(f)
+
+ def test_read_text_header(self):
+ f = _segyio.open(self.filename, "r")
+
+ self.assertEqual(_segyio.read_textheader(f, 0), ACTUAL_TEXT_HEADER)
+
+ with self.assertRaises(Exception):
+ _segyio.read_texthdr(None, 0)
+
+ _segyio.close(f)
+
+ def test_write_text_header(self):
+ fname = self.filename.replace("small", "txt_hdr_wrt")
+ shutil.copyfile(self.filename, fname)
+ f = _segyio.open(fname, "r+")
+
+ with self.assertRaises(ValueError):
+ _segyio.write_textheader(f, 0, "")
+
+ self.assertEqual(_segyio.read_textheader(f, 0), ACTUAL_TEXT_HEADER)
+
+ _segyio.write_textheader(f, 0, "yolo" * 800)
+
+ self.assertEqual(_segyio.read_textheader(f, 0), "yolo" * 800)
+
+ _segyio.close(f)
+
+ def test_read_and_write_binary_header(self):
+ with self.assertRaises(Exception):
+ hdr = _segyio.read_binaryheader(None)
+
+ with self.assertRaises(Exception):
+ _segyio.write_binaryheader(None, None)
+
+ fname = self.filename.replace("small", "bin_hdr_wrt")
+ shutil.copyfile(self.filename, fname)
+ f = _segyio.open(fname, "r+")
+
+ binary_header = _segyio.read_binaryheader(f)
+
+ with self.assertRaises(Exception):
+ _segyio.write_binaryheader(f, "Not the correct type")
+
+ _segyio.write_binaryheader(f, binary_header)
+ _segyio.close(f)
+
+
+ def test_read_binary_header_fields(self):
+ f = _segyio.open(self.filename, "r")
+
+ binary_header = _segyio.read_binaryheader(f)
+
+ with self.assertRaises(TypeError):
+ value = _segyio.get_field("s", 0)
+
+ with self.assertRaises(IndexError):
+ value = _segyio.get_field(binary_header, -1)
+
+ self.assertEqual(_segyio.get_field(binary_header, 3225), 1)
+ self.assertEqual(_segyio.get_field(binary_header, 3221), 50)
+
+ _segyio.close(f)
+
+ def test_line_metrics(self):
+ f = _segyio.open(self.filename, "r")
+
+ binary_header = _segyio.read_binaryheader(f)
+ ilb = 189
+ xlb = 193
+ metrics = _segyio.init_metrics(f, binary_header, ilb, xlb)
+ _segyio.close(f)
+
+ sorting = metrics['sorting']
+ trace_count = metrics['trace_count']
+ inline_count = metrics['iline_count']
+ crossline_count = metrics['xline_count']
+ offset_count = metrics['offset_count']
+
+ with self.assertRaises(RuntimeError):
+ metrics = _segyio.init_line_metrics(0, trace_count, inline_count, crossline_count, offset_count)
+
+ metrics = _segyio.init_line_metrics(sorting, trace_count, inline_count, crossline_count, offset_count)
+
+ self.assertEqual(metrics['xline_length'], 5)
+ self.assertEqual(metrics['xline_stride'], 5)
+ self.assertEqual(metrics['iline_length'], 5)
+ self.assertEqual(metrics['iline_stride'], 1)
+
+ def test_metrics(self):
+ f = _segyio.open(self.filename, "r")
+ binary_header = _segyio.read_binaryheader(f)
+ ilb = 189
+ xlb = 193
+
+ with self.assertRaises(TypeError):
+ metrics = _segyio.init_metrics("?", binary_header, ilb, xlb)
+
+ with self.assertRaises(TypeError):
+ metrics = _segyio.init_metrics(f, "?", ilb, xlb)
+
+ with self.assertRaises(IndexError):
+ metrics = _segyio.init_metrics(f, binary_header, ilb + 1, xlb)
+
+ metrics = _segyio.init_metrics(f, binary_header, ilb, xlb)
+
+ self.assertEqual(metrics['iline_field'], ilb)
+ self.assertEqual(metrics['xline_field'], xlb)
+ self.assertEqual(metrics['trace0'], _segyio.textheader_size() + _segyio.binheader_size() - 1)
+ self.assertEqual(metrics['sample_count'], 50)
+ self.assertEqual(metrics['format'], 1)
+ self.assertEqual(metrics['trace_bsize'], 200)
+ self.assertEqual(metrics['sorting'], 2) # inline sorting = 2, crossline sorting = 1
+ self.assertEqual(metrics['trace_count'], 25)
+ self.assertEqual(metrics['offset_count'], 1)
+ self.assertEqual(metrics['iline_count'], 5)
+ self.assertEqual(metrics['xline_count'], 5)
+
+ _segyio.close(f)
+
+ with self.assertRaises(IOError):
+ metrics = _segyio.init_metrics(f, binary_header, ilb, xlb)
+
+ def test_line_indices(self):
+ f = _segyio.open(self.filename, "r")
+
+ binary_header = _segyio.read_binaryheader(f)
+ ilb = 189
+ xlb = 193
+ metrics = _segyio.init_metrics(f, binary_header, ilb, xlb)
+ dmy = numpy.zeros(2, dtype=numpy.uintc)
+
+ dummy_metrics = {'xline_count': 2, 'iline_count': 2}
+
+ with self.assertRaises(TypeError):
+ _segyio.init_line_indices(".", {}, dmy, dmy)
+
+ with self.assertRaises(TypeError):
+ _segyio.init_line_indices(f, "-", dmy, dmy)
+
+ # with self.assertRaises(KeyError):
+ # _segyio.init_line_indices(f, {}, dmy, dmy)
+
+ with self.assertRaises(TypeError):
+ _segyio.init_line_indices(f, dummy_metrics, 1, dmy)
+
+ with self.assertRaises(TypeError):
+ _segyio.init_line_indices(f, dummy_metrics, dmy, 2)
+
+ with self.assertRaises(TypeError):
+ _segyio.init_line_indices(f, dummy_metrics, dmy, 2)
+
+ with self.assertRaises(TypeError):
+ fdmy = numpy.zeros(1, dtype=numpy.single)
+ _segyio.init_line_indices(f, dummy_metrics, fdmy, dmy)
+
+ one = numpy.zeros(1, dtype=numpy.uintc)
+ two = numpy.zeros(2, dtype=numpy.uintc)
+ with self.assertRaises(ValueError):
+ _segyio.init_line_indices(f, dummy_metrics, one, two)
+
+ with self.assertRaises(ValueError):
+ _segyio.init_line_indices(f, dummy_metrics, two, one)
+
+ # Happy Path
+ iline_indexes = numpy.zeros(metrics['iline_count'], dtype=numpy.uintc)
+ xline_indexes = numpy.zeros(metrics['xline_count'], dtype=numpy.uintc)
+ _segyio.init_line_indices(f, metrics, iline_indexes, xline_indexes)
+
+ self.assertListEqual([1, 2, 3, 4, 5], list(iline_indexes))
+ self.assertListEqual([20, 21, 22, 23, 24], list(xline_indexes))
+
+ _segyio.close(f)
+
+ def test_fread_trace0(self):
+ f = _segyio.open(self.filename, "r")
+
+ binary_header = _segyio.read_binaryheader(f)
+ ilb = 189
+ xlb = 193
+
+ metrics = _segyio.init_metrics(f, binary_header, ilb, xlb)
+
+ sorting = metrics['sorting']
+ trace_count = metrics['trace_count']
+ inline_count = metrics['iline_count']
+ crossline_count = metrics['xline_count']
+ offset_count = metrics['offset_count']
+
+ line_metrics = _segyio.init_line_metrics(sorting, trace_count, inline_count, crossline_count, offset_count)
+
+ iline_indexes = numpy.zeros(metrics['iline_count'], dtype=numpy.uintc)
+ xline_indexes = numpy.zeros(metrics['xline_count'], dtype=numpy.uintc)
+ _segyio.init_line_indices(f, metrics, iline_indexes, xline_indexes)
+
+ with self.assertRaises(KeyError):
+ _segyio.fread_trace0(0, len(xline_indexes), line_metrics['iline_stride'], iline_indexes, "inline")
+
+ with self.assertRaises(KeyError):
+ _segyio.fread_trace0(2, len(iline_indexes), line_metrics['xline_stride'], xline_indexes, "crossline")
+
+ value = _segyio.fread_trace0(1, len(xline_indexes), line_metrics['iline_stride'], iline_indexes, "inline")
+ self.assertEqual(value, 0)
+
+ value = _segyio.fread_trace0(2, len(xline_indexes), line_metrics['iline_stride'], iline_indexes, "inline")
+ self.assertEqual(value, 5)
+
+ value = _segyio.fread_trace0(21, len(iline_indexes), line_metrics['xline_stride'], xline_indexes, "crossline")
+ self.assertEqual(value, 1)
+
+ value = _segyio.fread_trace0(22, len(iline_indexes), line_metrics['xline_stride'], xline_indexes, "crossline")
+ self.assertEqual(value, 2)
+
+ _segyio.close(f)
+
+ def test_get_and_set_field(self):
+ hdr = _segyio.empty_traceheader()
+
+ with self.assertRaises(TypeError):
+ _segyio.get_field(".", 0)
+
+ with self.assertRaises(TypeError):
+ _segyio.set_field(".", 0, 1)
+
+ with self.assertRaises(IndexError):
+ _segyio.get_field(hdr, 0)
+
+ with self.assertRaises(IndexError):
+ _segyio.set_field(hdr, 0, 1)
+
+ _segyio.set_field(hdr, 1, 127)
+ _segyio.set_field(hdr, 5, 67)
+ _segyio.set_field(hdr, 9, 19)
+
+ self.assertEqual(_segyio.get_field(hdr, 1), 127)
+ self.assertEqual(_segyio.get_field(hdr, 5), 67)
+ self.assertEqual(_segyio.get_field(hdr, 9), 19)
+
+ def test_read_and_write_traceheader(self):
+ fname = self.filename.replace("small", "bin_hdr_wrt")
+ shutil.copyfile(self.filename, fname)
+ f = _segyio.open(fname, "r+")
+ binary_header = _segyio.read_binaryheader(f)
+ ilb = 189
+ xlb = 193
+ metrics = _segyio.init_metrics(f, binary_header, ilb, xlb)
+
+ empty = _segyio.empty_traceheader()
+
+ with self.assertRaises(TypeError):
+ trace_header = _segyio.read_traceheader("+", )
+
+ with self.assertRaises(TypeError):
+ trace_header = _segyio.read_traceheader(f, 0, None)
+
+ trace_header = _segyio.read_traceheader(f, 0, _segyio.empty_traceheader(), metrics['trace0'], metrics['trace_bsize'])
+
+ self.assertEqual(_segyio.get_field(trace_header, ilb), 1)
+ self.assertEqual(_segyio.get_field(trace_header, xlb), 20)
+
+ trace_header = _segyio.read_traceheader(f, 1, _segyio.empty_traceheader(), metrics['trace0'], metrics['trace_bsize'])
+
+ self.assertEqual(_segyio.get_field(trace_header, ilb), 1)
+ self.assertEqual(_segyio.get_field(trace_header, xlb), 21)
+
+ _segyio.set_field(trace_header, ilb, 99)
+ _segyio.set_field(trace_header, xlb, 42)
+
+ _segyio.write_traceheader(f, 0, trace_header, metrics['trace0'], metrics['trace_bsize'])
+
+ trace_header = _segyio.read_traceheader(f, 0, _segyio.empty_traceheader(), metrics['trace0'], metrics['trace_bsize'])
+
+ self.assertEqual(_segyio.get_field(trace_header, ilb), 99)
+ self.assertEqual(_segyio.get_field(trace_header, xlb), 42)
+
+ _segyio.close(f)
+
+ def test_read_and_write_trace(self):
+ f = _segyio.open("test-data/trace-wrt.sgy", "w+")
+
+ buf = numpy.ones(25, dtype=numpy.single)
+ buf[11] = 3.1415
+ _segyio.write_trace(f, 0, buf, 0, 100, 1, 25)
+ buf[:] = 42.0
+ _segyio.write_trace(f, 1, buf, 0, 100, 1, 25)
+
+ _segyio.flush(f)
+
+ buf = numpy.zeros(25, dtype=numpy.single)
+
+ _segyio.read_trace(f, 0, buf, 0, 100, 1, 25)
+
+ self.assertAlmostEqual(buf[10], 1.0, places=4)
+ self.assertAlmostEqual(buf[11], 3.1415, places=4)
+
+ _segyio.read_trace(f, 1, buf, 0, 100, 1, 25)
+
+ self.assertAlmostEqual(sum(buf), 42.0 * 25, places=4)
+
+ _segyio.close(f)
+
+ def read_small(self):
+ f = _segyio.open(self.filename, "r")
+
+ binary_header = _segyio.read_binaryheader(f)
+ ilb = 189
+ xlb = 193
+
+ metrics = _segyio.init_metrics(f, binary_header, ilb, xlb)
+
+ sorting = metrics['sorting']
+ trace_count = metrics['trace_count']
+ inline_count = metrics['iline_count']
+ crossline_count = metrics['xline_count']
+ offset_count = metrics['offset_count']
+
+ line_metrics = _segyio.init_line_metrics(sorting, trace_count, inline_count, crossline_count, offset_count)
+
+ metrics.update(line_metrics)
+
+ iline_indexes = numpy.zeros(metrics['iline_count'], dtype=numpy.uintc)
+ xline_indexes = numpy.zeros(metrics['xline_count'], dtype=numpy.uintc)
+ _segyio.init_line_indices(f, metrics, iline_indexes, xline_indexes)
+
+ return f, metrics, iline_indexes, xline_indexes
+
+ def test_read_line(self):
+ f, metrics, iline_idx, xline_idx = self.read_small()
+
+ tr0 = metrics['trace0']
+ bsz = metrics['trace_bsize']
+ samples = metrics['sample_count']
+ xline_stride = metrics['xline_stride']
+ iline_stride = metrics['iline_stride']
+
+ xline_trace0 = _segyio.fread_trace0(20, len(iline_idx), xline_stride, xline_idx, "crossline")
+ iline_trace0 = _segyio.fread_trace0(1, len(xline_idx), iline_stride, iline_idx, "inline")
+
+ buf = numpy.zeros((len(iline_idx), samples), dtype=numpy.single)
+
+ _segyio.read_line(f, xline_trace0, len(iline_idx), xline_stride, buf, tr0, bsz, 1, samples)
+ self.assertAlmostEqual(sum(sum(buf)), 800.061169624, places=6)
+
+ _segyio.read_line(f, iline_trace0, len(xline_idx), iline_stride, buf, tr0, bsz, 1, samples)
+ self.assertAlmostEqual(sum(sum(buf)), 305.061146736, places=6)
+
+ _segyio.close(f)
diff --git a/tests/unittest.h b/tests/unittest.h
index c0e3194..72afdcd 100644
--- a/tests/unittest.h
+++ b/tests/unittest.h
@@ -11,7 +11,7 @@
void testAssertionFailed(const char *message, const char *file, int line) {
fprintf(stderr, "Assertion failed in file: %s on line: %d\n", file, line);
if (strlen(message) > 0) {
- fprintf(stderr, message);
+ fprintf(stderr, "%s", message);
}
exit(1);
}
@@ -20,7 +20,7 @@ void testAssertionFailed(const char *message, const char *file, int line) {
void _testAssertTrue(bool value, const char *message, const char *file, int line) {
if (!value) {
- if (strlen(message) == 0) {
+ if (message && strlen(message) == 0) {
message = "The expression did not evaluate to true!";
}
testAssertionFailed(message, file, line);
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/segyio.git
More information about the debian-science-commits
mailing list