[Python-apps-commits] r11256 - in packages/s3ql/trunk/debian (6 files)

nikratio-guest at users.alioth.debian.org nikratio-guest at users.alioth.debian.org
Fri Sep 5 02:08:00 UTC 2014


    Date: Friday, September 5, 2014 @ 02:07:59
  Author: nikratio-guest
Revision: 11256

* New upstream release.
* Dropped patches/cve_2014_0485.diff, integrated upstream.
* Dropped patches/fix_failsafe_test_race.diff, integrated upstream.

Modified:
  packages/s3ql/trunk/debian/changelog
  packages/s3ql/trunk/debian/patches/check_dev_fuse_perms.diff
  packages/s3ql/trunk/debian/patches/clock-granularity.diff
  packages/s3ql/trunk/debian/patches/series
Deleted:
  packages/s3ql/trunk/debian/patches/cve_2014_0485.diff
  packages/s3ql/trunk/debian/patches/fix_failsafe_test_race.diff

Modified: packages/s3ql/trunk/debian/changelog
===================================================================
--- packages/s3ql/trunk/debian/changelog	2014-09-04 07:59:08 UTC (rev 11255)
+++ packages/s3ql/trunk/debian/changelog	2014-09-05 02:07:59 UTC (rev 11256)
@@ -1,3 +1,11 @@
+s3ql (2.11.1+dfsg-1) UNRELEASED; urgency=medium
+
+  * New upstream release.
+  * Dropped patches/cve_2014_0485.diff, integrated upstream.
+  * Dropped patches/fix_failsafe_test_race.diff, integrated upstream.
+
+ -- Nikolaus Rath <Nikolaus at rath.org>  Thu, 04 Sep 2014 18:28:41 -0700
+
 s3ql (2.10.1+dfsg-4) unstable; urgency=high
 
   * SECURITY UPDATE for CVE-2014-0485.

Modified: packages/s3ql/trunk/debian/patches/check_dev_fuse_perms.diff
===================================================================
--- packages/s3ql/trunk/debian/patches/check_dev_fuse_perms.diff	2014-09-04 07:59:08 UTC (rev 11255)
+++ packages/s3ql/trunk/debian/patches/check_dev_fuse_perms.diff	2014-09-05 02:07:59 UTC (rev 11256)
@@ -11,7 +11,7 @@
 
 --- a/tests/common.py
 +++ b/tests/common.py
-@@ -182,6 +182,13 @@
+@@ -196,6 +196,13 @@
      else:
          os.close(fd)
  

Modified: packages/s3ql/trunk/debian/patches/clock-granularity.diff
===================================================================
--- packages/s3ql/trunk/debian/patches/clock-granularity.diff	2014-09-04 07:59:08 UTC (rev 11255)
+++ packages/s3ql/trunk/debian/patches/clock-granularity.diff	2014-09-05 02:07:59 UTC (rev 11256)
@@ -21,4 +21,4 @@
 +    return max(1, 10 * (stamp2 - stamp1))
  CLOCK_GRANULARITY = get_clock_granularity()
  
- @contextmanager
+ def safe_sleep(secs):

Deleted: packages/s3ql/trunk/debian/patches/cve_2014_0485.diff
===================================================================
--- packages/s3ql/trunk/debian/patches/cve_2014_0485.diff	2014-09-04 07:59:08 UTC (rev 11255)
+++ packages/s3ql/trunk/debian/patches/cve_2014_0485.diff	2014-09-05 02:07:59 UTC (rev 11256)
@@ -1,228 +0,0 @@
-Description: Don't blindly unpickle untrusted data, CVE 2014-0485
-Origin: upstream (commit 8d517337bbbf)
-Forwarded: not-needed
-Last-Update: <2014-08-25>
-Author: Nikolaus Rath <Nikolaus at rath.org>
-
-The pickle protocol allows an attacker to execute arbitrary code by
-providing an appropriately crafted pickle stream. To fix this vulnerability,
-we take several steps:
-
- 1. Only allow pickle protocol 2 (so we know the exact capabilities)
- 2. Only allow the minimum number of opcodes
- 3. Prohibit the Unpickler to access any globals other than codecs.encode.
-
-Accessing codecs.encode is required in order to construct bytes objects from
-a pickle protocol 2 stream (which stores them as unicode by decoding them as latin1).
-
-The resulting subset of opcodes allows an attacker to
-
-  * create Python objects constructed from dict, list, tuple, str, unicode,
-    int, float, complex, bool and None
-  * obtain a reference to the codes.encode function
-  * call any of the above objects with parameters formed by any of the
-    above objects.
-
-(cf. http://hg.python.org/cpython/file/3.4/Lib/pickletools.py).
-
-This is sufficient to decode the pickle streams generated by S3QL itself,
-but should no longer allow execution of arbitrary code. Note that from the
-above types only codes.encode is actually a callable, and (assuming there
-are no bugs in the standard library) calling it with arbitrary arguments
-is safe. Furthermore, the above subset of the pickle protocol does not allow
-attribute access (and the getattr function is not available either), so
-obtaining access to more dangerous objects by "brachiating" from dunder
-attribute to dunder attribute is not possible.
-
-An ideal fix would of course be to switch to a less expressive storage format
-like JSON, but this would break access to any existing file system.
-
---- a/src/s3ql/backends/common.py
-+++ b/src/s3ql/backends/common.py
-@@ -12,6 +12,10 @@
- import time
- import textwrap
- import inspect
-+import codecs
-+import io
-+import pickletools
-+import pickle
- 
- log = logging.getLogger(__name__)
- 
-@@ -494,3 +498,64 @@
- 
-     def __str__(self):
-         return self.str
-+
-+
-+SAFE_UNPICKLE_OPCODES = {'BININT', 'BININT1', 'BININT2', 'LONG1', 'LONG4',
-+                         'BINSTRING', 'SHORT_BINSTRING', 'GLOBAL',
-+                         'NONE', 'NEWTRUE', 'NEWFALSE', 'BINUNICODE',
-+                         'BINFLOAT', 'EMPTY_LIST', 'APPEND', 'APPENDS',
-+                         'LIST', 'EMPTY_TUPLE', 'TUPLE', 'TUPLE1', 'TUPLE2',
-+                         'TUPLE3', 'EMPTY_DICT', 'DICT', 'SETITEM',
-+                         'SETITEMS', 'POP', 'DUP', 'MARK', 'POP_MARK',
-+                         'BINGET', 'LONG_BINGET', 'BINPUT', 'LONG_BINPUT',
-+                         'PROTO', 'STOP', 'REDUCE'}
-+
-+SAFE_UNPICKLE_GLOBAL_NAMES = { ('__builtin__', 'bytearray'),
-+                               ('__builtin__', 'set'),
-+                               ('__builtin__', 'frozenset'),
-+                               ('_codecs', 'encode') }
-+SAFE_UNPICKLE_GLOBAL_OBJS = { bytearray, set, frozenset, codecs.encode }
-+
-+class SafeUnpickler(pickle.Unpickler):
-+    def find_class(self, module, name):
-+        if (module, name) not in SAFE_UNPICKLE_GLOBAL_NAMES:
-+            raise pickle.UnpicklingError("global '%s.%s' is unsafe" %
-+                                         (module, name))
-+        ret = super().find_class(module, name)
-+        if ret not in SAFE_UNPICKLE_GLOBAL_OBJS:
-+            raise pickle.UnpicklingError("global '%s.%s' is unsafe" %
-+                                         (module, name))
-+        return ret
-+
-+
-+def safe_unpickle_fh(fh, fix_imports=True, encoding="ASCII",
-+                  errors="strict"):
-+    '''Safely unpickle untrusted data from *fh*
-+
-+    *fh* must be seekable.
-+    '''
-+
-+    if not fh.seekable():
-+        raise TypeError('*fh* must be seekable')
-+    pos = fh.tell()
-+
-+    # First make sure that we know all used opcodes
-+    for (opcode, arg, _) in pickletools.genops(fh):
-+        if opcode.proto > 2 or opcode.name not in SAFE_UNPICKLE_OPCODES:
-+            raise pickle.UnpicklingError('opcode %s is unsafe' % opcode.name)
-+
-+    fh.seek(pos)
-+
-+    # Then use a custom Unpickler to ensure that we only give access to
-+    # specific, whitelisted globals. Note that with the above opcodes, there is
-+    # no way to trigger attribute access, so "brachiating" from a white listed
-+    # object to __builtins__ is not possible.
-+    return SafeUnpickler(fh, fix_imports=fix_imports,
-+                         encoding=encoding, errors=errors).load()
-+
-+def safe_unpickle(buf, fix_imports=True, encoding="ASCII",
-+                  errors="strict"):
-+    '''Safely unpickle untrusted data in *buf*'''
-+
-+    return safe_unpickle_fh(io.BytesIO(buf), fix_imports=fix_imports,
-+                            encoding=encoding, errors=errors)
---- a/src/s3ql/backends/comprenc.py
-+++ b/src/s3ql/backends/comprenc.py
-@@ -8,7 +8,7 @@
- 
- from ..logging import logging # Ensure use of custom logger class
- from .. import BUFSIZE, PICKLE_PROTOCOL
--from .common import AbstractBackend, ChecksumError
-+from .common import AbstractBackend, ChecksumError, safe_unpickle
- from ..inherit_docstrings import (copy_ancestor_docstring, prepend_ancestor_docstring,
-                                   ABCDocstMeta)
- from Crypto.Cipher import AES
-@@ -112,7 +112,7 @@
- 
-         if not encrypted:
-             try:
--                return (None, pickle.loads(buf, encoding='latin1'))
-+                return (None, safe_unpickle(buf, encoding='latin1'))
-             except pickle.UnpicklingError:
-                 raise ChecksumError('Invalid metadata')
- 
-@@ -134,8 +134,8 @@
-                                 % (stored_key, key))
- 
-         buf = b64decode(metadata['data'])
--        return (nonce, pickle.loads(aes_cipher(meta_key).decrypt(buf),
--                                    encoding='latin1'))
-+        return (nonce, safe_unpickle(aes_cipher(meta_key).decrypt(buf),
-+                                     encoding='latin1'))
- 
-     @prepend_ancestor_docstring
-     def open_read(self, key):
---- a/src/s3ql/backends/local.py
-+++ b/src/s3ql/backends/local.py
-@@ -9,7 +9,8 @@
- from ..logging import logging # Ensure use of custom logger class
- from .. import BUFSIZE, PICKLE_PROTOCOL
- from ..inherit_docstrings import (copy_ancestor_docstring, ABCDocstMeta)
--from .common import AbstractBackend, DanglingStorageURLError, NoSuchObject, ChecksumError
-+from .common import (AbstractBackend, DanglingStorageURLError, NoSuchObject,
-+                     ChecksumError, safe_unpickle_fh)
- import _thread
- import io
- import os
-@@ -57,14 +58,11 @@
-         path = self._key_to_path(key)
-         try:
-             with open(path, 'rb') as src:
--                return pickle.load(src, encoding='latin1')
-+                return safe_unpickle_fh(src, encoding='latin1')
-         except FileNotFoundError:
-             raise NoSuchObject(key)
-         except pickle.UnpicklingError as exc:
--            if (isinstance(exc.args[0], str)
--                and exc.args[0].startswith('invalid load key')):
--                raise ChecksumError('Invalid metadata')
--            raise
-+            raise ChecksumError('Invalid metadata, pickle says: %s' % exc)
- 
-     @copy_ancestor_docstring
-     def get_size(self, key):
-@@ -79,12 +77,9 @@
-             raise NoSuchObject(key)
- 
-         try:
--            fh.metadata = pickle.load(fh, encoding='latin1')
-+            fh.metadata = safe_unpickle_fh(fh, encoding='latin1')
-         except pickle.UnpicklingError as exc:
--            if (isinstance(exc.args[0], str)
--                and exc.args[0].startswith('invalid load key')):
--                raise ChecksumError('Invalid metadata')
--            raise
-+            raise ChecksumError('Invalid metadata, pickle says: %s' % exc)
-         return fh
- 
-     @copy_ancestor_docstring
-@@ -191,9 +186,9 @@
- 
-             if metadata is not None:
-                 try:
--                    pickle.load(src, encoding='latin1')
--                except pickle.UnpicklingError:
--                    raise ChecksumError('Invalid metadata')
-+                    safe_unpickle_fh(src, encoding='latin1')
-+                except pickle.UnpicklingError as exc:
-+                    raise ChecksumError('Invalid metadata, pickle says: %s' % exc)
-                 pickle.dump(metadata, dest, PICKLE_PROTOCOL)
-             shutil.copyfileobj(src, dest, BUFSIZE)
-         except:
---- a/src/s3ql/backends/s3c.py
-+++ b/src/s3ql/backends/s3c.py
-@@ -9,7 +9,7 @@
- from ..logging import logging, QuietError # Ensure use of custom logger class
- from .. import PICKLE_PROTOCOL, BUFSIZE
- from .common import (AbstractBackend, NoSuchObject, retry, AuthorizationError,
--    AuthenticationError, DanglingStorageURLError, retry_generator)
-+    AuthenticationError, DanglingStorageURLError, retry_generator, safe_unpickle)
- from ..inherit_docstrings import (copy_ancestor_docstring, prepend_ancestor_docstring,
-                                   ABCDocstMeta)
- from io import BytesIO
-@@ -689,7 +689,10 @@
-                 log.warning('MD5 mismatch in metadata for %s', key)
-                 raise BadDigestError('BadDigest',
-                                      'Meta MD5 for %s does not match' % key)
--            return pickle.loads(b64decode(buf), encoding='latin1')
-+            try:
-+                return safe_unpickle(b64decode(buf), encoding='latin1')
-+            except pickle.UnpicklingError as exc:
-+                raise ChecksumError('Corrupted metadata, pickle says: %s' % exc)
-         elif format_ == 'raw': # No MD5 available
-             return meta
-         else:

Deleted: packages/s3ql/trunk/debian/patches/fix_failsafe_test_race.diff
===================================================================
--- packages/s3ql/trunk/debian/patches/fix_failsafe_test_race.diff	2014-09-04 07:59:08 UTC (rev 11255)
+++ packages/s3ql/trunk/debian/patches/fix_failsafe_test_race.diff	2014-09-05 02:07:59 UTC (rev 11256)
@@ -1,36 +0,0 @@
-Description: Fix race condition in unit test
-Origin: upstream (commit  9a8c0eb)
-Forwarded: no
-Last-Update: <2014-08-20>
-Author: Nikolaus Rath <Nikolaus at rath.org>
-
-NewerMetadataTest: take into account that metadata upload is async,
-so fs won't switch to failsafe right away.
-
---- a/tests/t5_failsafe.py
-+++ b/tests/t5_failsafe.py
-@@ -17,6 +17,7 @@
- import s3ql.ctrl
- import pytest
- import errno
-+import time
- from common import get_remote_test_info, NoTestSection
- from s3ql.backends import gs
- from argparse import Namespace
-@@ -125,10 +126,13 @@
-         # Try to upload metadata
-         s3ql.ctrl.main(['upload-meta', self.mnt_dir])
- 
--        # Try to write
-+        # Try to write. We repeat a few times, since the metadata upload
-+        # happens asynchronously.
-         with pytest.raises(PermissionError):
--            with open(fname + 'barz', 'w') as fh:
--                fh.write('foobar')
-+            for _ in range(10):
-+                with open(fname + 'barz', 'w') as fh:
-+                    fh.write('foobar')
-+                time.sleep(1)
- 
-         self.umount()
- 

Modified: packages/s3ql/trunk/debian/patches/series
===================================================================
--- packages/s3ql/trunk/debian/patches/series	2014-09-04 07:59:08 UTC (rev 11255)
+++ packages/s3ql/trunk/debian/patches/series	2014-09-05 02:07:59 UTC (rev 11256)
@@ -1,5 +1,3 @@
-cve_2014_0485.diff
-fix_failsafe_test_race.diff
 proc_mount.diff
 clock-granularity.diff
 check_dev_fuse_perms.diff




More information about the Python-apps-commits mailing list