[SCM] morituri/master: Imported Upstream version 0.2.0

js at users.alioth.debian.org js at users.alioth.debian.org
Sun Oct 19 20:09:55 UTC 2014


The following commit has been merged in the master branch:
commit f17c0172f8d84771182ba4cdbaba34fb88f7e6a0
Author: Jonas Smedegaard <dr at jones.dk>
Date:   Fri Jan 25 00:36:47 2013 +0100

    Imported Upstream version 0.2.0

diff --git a/ChangeLog b/ChangeLog
index 0477aed..f98473d 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,272 @@
+2013-01-06  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/config.py:
+	* morituri/rip/cd.py:
+	* morituri/rip/drive.py:
+	  Handle missing config better.
+	  Fixes #111.
+
+2013-01-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/config.py:
+	* morituri/rip/cd.py:
+	* morituri/test/test_common_config.py:
+	  strip model/release too.
+
+2012-12-23  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/program.py:
+	* morituri/rip/cd.py:
+	  feature: add %x for extension to track/disc template.
+
+2012-12-22  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/program.py:
+	  feature: add %r/%R for release type to track/disc template.
+	* morituri/rip/image.py:
+	  feature: add rip image rename to rename files based on metadata.
+
+2012-12-22  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/musicbrainzngs.py:
+	* morituri/rip/debug.py:
+	  Also store releaseType for disc metadata.
+
+2012-12-06  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/program.py:
+	  add release id to output.
+	* morituri/rip/image.py:
+	  add --release-id to rip image retag.  Fixes #96.
+
+2012-12-06  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/common.py:
+	  Strip bitrate tags too for comparison of dicts.
+	  Add a way to show us the different keys between dicts.
+	* morituri/common/encode.py:
+	  Debug different keys.
+
+2012-12-06  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/encode.py:
+	  Collect all tags by replacing from newer tag messages.
+	  Gets more than bitrate from flacs now.
+
+2012-12-06  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/rip/debug.py:
+	  Add rip debug tag to debug reading tags.
+
+2012-12-06  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/rip/image.py:
+	  Use getRealPath when retagging an image, fixes bug where it
+	  doesn't find the files relative to the cuepath.
+
+2012-12-06  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/program.py:
+	* morituri/rip/cd.py:
+	* morituri/rip/image.py:
+	  Add stdout to program.  Use it for getting musicbrainz info.
+
+2012-12-04  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/program/cdparanoia.py:
+	* morituri/test/test_program_cdparanoia.py:
+	  Add a task that can analyze the drive for whether it defeats
+	  the audio cache.
+	* morituri/result/result.py:
+	* morituri/rip/cd.py:
+	  Make it possible to store whether a drive defeats audio cache
+	  in the result.
+	* morituri/rip/drive.py:
+	  Add rip drive analyze command to analyze caching of a drive,
+	  and store it in the config.
+
+2012-12-04  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/task.py:
+	  Failed and done can stay unimplemented.
+	  If we read stdout or stderr, reschedule immediately to process
+	  all output before considering the possibility the program stopped.
+
+2012-12-04  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/config.py:
+	  Add methods to get/set defeating of audio cache.
+	  Make sure that we set read offset even if section is already there.
+
+2012-12-03  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/task.py:
+	* morituri/program/cdrdao.py:
+	  Factor out a PopenTask base class.
+
+2012-12-03  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/program.py:
+	  Store rip result after verifying AccurateRip so those results
+	  are stored too.
+
+2012-12-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/rip/cd.py:
+	* morituri/result/result.py:
+	  Store versions and encoding profile info in the rip result.
+
+2012-12-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/test/test_common_gstreamer.py (added):
+	* morituri/common/gstreamer.py:
+	* morituri/test/Makefile.am:
+	  add functions to get versions of gstreamer, gst-python,
+	  and element factory plugins.
+
+2012-12-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/rip/debug.py:
+	  add rip debug resultcache log command to generate a log
+	  based on a cached result.
+
+2012-12-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/common.py:
+	  Remove migrated classes.
+
+2012-12-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/cache.py:
+	  Disable the version-based persistence deleting.
+	  Allow not creating a fresh ripresult when getting one.
+
+2012-12-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/cache.py:
+	* morituri/rip/debug.py:
+	* morituri/test/test_common_cache.py:
+	  Add rip debug resultcache list to list cached results.
+
+2012-12-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/cache.py (added):
+	* morituri/test/cache (added):
+	* morituri/test/cache/result (added):
+	* morituri/test/cache/result/fe105a11.pickle (added):
+	* morituri/test/test_common_cache.py (added):
+	* morituri/common/Makefile.am:
+	* morituri/common/program.py:
+	* morituri/test/Makefile.am:
+	  Extract ResultCache object into separate file.
+
+2012-12-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/rip/drive.py:
+	  rip drive list now shows configured read offset if applicable.
+
+2012-12-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/rip/cd.py:
+	  Use configured read offset for drive if possible.
+	  Fixes #76.
+
+2012-12-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/rip/main.py:
+	  Add the config object to the root command.
+	* morituri/rip/offset.py:
+	  Save the drive's read offset when we find it.
+
+2012-12-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri.spec.in:
+	* morituri/common/config.py:
+	  Use XDG if we can import xdg.
+	  Fix writing the config.
+
+2012-12-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/drive.py:
+	* morituri/rip/drive.py:
+	  Extract getDeviceInfo function.
+
+2012-12-02  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/config.py (added):
+	* morituri/test/test_common_config.py (added):
+	* morituri/common/Makefile.am:
+	* morituri/test/Makefile.am:
+	  First stab at adding a configuration file to store
+	  drive read offsets.
+
+2012-11-28  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/program.py:
+	* morituri/rip/cd.py:
+	  Reset rip duration when we continue a rip but have to rerip
+	  a track.
+
+2012-11-27  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/common/program.py:
+	* morituri/program/cdparanoia.py:
+	* morituri/result/result.py:
+	  Track ripping speed for test and encode per-track.
+
+2012-11-25  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	patch by: mustbenice
+
+	* morituri/program/cdparanoia.py:
+	* morituri/result/result.py:
+	* morituri/rip/cd.py:
+	* morituri/test/test_program_cdparanoia.py:
+	  Get cdparanoia version.
+	  Store both cdparanoia and cdrdao versions on rip result.
+
+2012-11-25  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/program/cdparanoia.py:
+	  Make sure we calculate fractional speed.
+
+2012-11-25  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	patch by: mustbenice
+
+	* morituri/common/program.py:
+	* morituri/program/cdparanoia.py:
+	* morituri/result/logger.py:
+	* morituri/result/result.py:
+	  Calculate speed of reading track.
+	  Output Test and Copy speed per track in log.
+
+2012-11-25  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri.spec.in:
+	  Add plugins directory.
+
+2012-11-25  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* morituri/result/logger.py:
+	* morituri/result/result.py:
+	* morituri/rip/cd.py:
+	* morituri/rip/main.py:
+	  Expose loggers as pluggable.
+	  Add --logger option to rip cd rip to specify logger.
+
+2012-11-25  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* configure.ac:
+	* morituri/configure/installed.py.in:
+	* morituri/configure/uninstalled.py.in:
+	  Create configure.configure.pluginsdir
+
+2012-11-23  Thomas Vander Stichele  <thomas at apestaart dot org>
+
+	* configure.ac:
+	  back to development.
+
 === release 0.1.3 ===
 
 2012-11-23  Thomas Vander Stichele  <thomas at apestaart dot org>
diff --git a/Makefile.in b/Makefile.in
index 8ecc181..903fd8e 100644
--- a/Makefile.in
+++ b/Makefile.in
@@ -165,6 +165,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
diff --git a/NEWS b/NEWS
index 7ebd019..216a932 100644
--- a/NEWS
+++ b/NEWS
@@ -1,4 +1,30 @@
-This is morituri 0.1.3, "cranes"
+This is morituri 0.2.0, "ears"
+
+Coverage in 0.2.0: 67 %   (1890 / 2807), 95 python tests
+
+Features added in 0.2.0:
+
+- added plugins system for logger
+- added rip cd rip --logger to specify logger
+- added reading speed, cdparanoia and cdrdao version to logger
+- added rip drive analyze to detect whether we can defeat audio cache behaviour
+- store drive offsets and cache defeating in config file
+- rip drive list shows configured offset and audio cache defeating
+- added rip image retag --release-id to specify the release id to tag with
+- added %r/%R for release type to use in track/disc template
+- added %x for extension to release template
+
+Bugs fixed in 0.2.0:
+
+-  89: Fails to rip track with \ in its name
+- 105: Backslash in track names causes "Cannot find file" during rip
+- 108: Unable to find offset / rip
+- 109: KeyError when running "rip offset find"
+- 111: Python traceback when config has no read offset for CD
+-  76: morituri should allow for a configuration file
+-  96: rip image retag: allow specification of release ID
+- 107: Backslash in track name confuses AR step
+- 112: add MusicBrainz lookup URL to generated logfile
 
 Coverage in 0.1.3: 60 %   (1716 / 2825), 85 python tests
 
diff --git a/README b/README
index 6fa697e..6946f0e 100644
--- a/README
+++ b/README
@@ -11,13 +11,14 @@ FEATURES
 --------
 * support for MusicBrainz for metadata lookup
 * support for AccurateRip verification
-* detects sample read offset of drives
+* detects sample read offset and ability to defeat cache of drives
 * performs test and copy rip
 * detects and rips Hidden Track One Audio
 * templates for file and directory naming
 * support for lossless encoding and lossy encoding or re-encoding of images
 * tagging using GStreamer, including embedding MusicBrainz id's
 * retagging of images
+* plugins for logging
 * for now, only a command line client (rip) is shipped
 
 REQUIREMENTS
@@ -39,7 +40,7 @@ use morituri installed or uninstalled.
     (For example, $HOME/dev/ext or $HOME/prefix/src)
   - source: download tarball, unpack, and change to its directory
   - checkout:
-    svn co https://thomas.apestaart.org/morituri/svn/trunk morituri
+    git clone git://github.com/thomasvs/morituri.git
     cd morituri
     ./autogen.sh
 
@@ -51,8 +52,8 @@ use morituri installed or uninstalled.
   - installing:
     make install
   - running uninstalled:
-    ln -sf `pwd`/misc/morituri-uninstalled $HOME/bin/morituri-trunk
-    morituri-trunk
+    ln -sf `pwd`/misc/morituri-uninstalled $HOME/bin/morituri-git
+    morituri-git
     (this drops you in a shell where everything is set up to use morituri)
 
 RUNNING MORITURI
@@ -83,6 +84,8 @@ The simplest way to get started making accurate rips is:
   rip offset find
 - wait for it to complete; this might take a while
 - optionally, confirm this offset with two more discs
+- analyze the drive's caching behaviour
+  rip drive analyze
 - rip the disc by running
   rip cd rip --offset (the number you got before)
 
diff --git a/RELEASE b/RELEASE
index 3b7a41a..be74f73 100644
--- a/RELEASE
+++ b/RELEASE
@@ -1,41 +1,41 @@
 morituri is a CD ripper aiming for accuracy over speed.
 Its features are modeled to compare with Exact Audio Copy on Windows.
 
-This is morituri 0.1.3 "cranes".
+This is morituri 0.2.0 "ears".
 
 This is intended as a release for daring and curious people who've had enough
 of the fact that Windows has a more accurate CD ripper than Linux.
 
+Coverage in 0.2.0: 67 %   (1890 / 2807), 95 python tests
 
-Coverage in 0.1.3: 60 %   (1716 / 2825), 85 python tests
+Features added in 0.2.0:
 
-Features added in 0.1.3:
+- added plugins system for logger
+- added rip cd rip --logger to specify logger
+- added reading speed, cdparanoia and cdrdao version to logger
+- added rip drive analyze to detect whether we can defeat audio cache behaviour
+- store drive offsets and cache defeating in config file
+- rip drive list shows configured offset and audio cache defeating
+- added rip image retag --release-id to specify the release id to tag with
+- added %r/%R for release type to use in track/disc template
+- added %x for extension to release template
 
-- shorten really long file names if needed
-- support multi-disc ripping
-- add %y for release year in templates
-- added rip cd rip --release-id option to select the exact release
-- allow track and disc templates to create files in different directories
-- work out relative paths from cue/m3u files to audio files
+Bugs fixed in 0.2.0:
 
-Bugs fixed in 0.1.3:
+-  89: Fails to rip track with \ in its name
+- 105: Backslash in track names causes "Cannot find file" during rip
+- 108: Unable to find offset / rip
+- 109: KeyError when running "rip offset find"
+- 111: Python traceback when config has no read offset for CD
+-  76: morituri should allow for a configuration file
+-  96: rip image retag: allow specification of release ID
+- 107: Backslash in track name confuses AR step
+- 112: add MusicBrainz lookup URL to generated logfile
 
--  77: Unable to find solution to UTF-8 problem
--  93: Unable to choose if there are more than one matching CD
--  67: unable to rip multi-cd-sets correctly
--  73: rip image breaks with "query failed"
--  78: Could not create encoded file
--  84: Error when checksumming extremely short tracks
--  91: --release-id does not work for Pink Floyd - The Wall (Experience Edition) (Disc 1)
--  94: mp3vbr uses quality=0 instead of vbr-quality=0
--  95: Discs with multiple media not correctly identified.
--  99: rip offset find fails with "UnboundLocalError: local variable 'archecksum' referenced before assignment"
-- 102: Unable to run without -d option
--  98: Year of release in templates
-
-morituri 0.1.3 is brought to you by:
+morituri 0.2.0 is brought to you by:
 
 Loïc Minier
 Ross Burton
 Christophe Fergeau
 Thomas Vander Stichele
+mustbenice
diff --git a/TODO b/TODO
index e527b03..6198f76 100644
--- a/TODO
+++ b/TODO
@@ -1,4 +1,9 @@
 TODO:
+- add drive analysis mode
+  - use cdparanoia -A from 10.2 on for caching behaviour
+- store drive features in a database
+- try http://www.ime.usp.br/~pjssilva/secure-cdparanoia.py and see if it
+  is better at handling some bad cd's
 - .cue file:
   - add version to morituri comment
   - add DATE and CATALOG and PERFORMER and TITLE to top of cue
@@ -52,3 +57,10 @@ TODO:
   let user continue by choosing one
 - artist-credit-phrase fabricated by musicbrainzngs only looks at name, not at artist-credit->name (see e.g. Gorky)
 - getting cache results should depend on same drive/offset
+- do some character mangling so trail of dead is not in a hidden dir
+- fix %r for normal case release name
+- decide whether output-dir should be part of the relative filenames of things;
+  right now it is; maybe split in to base and output ?
+- rip task should abort on task 4 if checksums don't match
+- retry cdrdao a few times when it had to load the tray
+- when it detects the target dir is already there, but the files would be different names, complain or customize the name with further info (see GLB - mockingbirds singles)
diff --git a/bin/Makefile.in b/bin/Makefile.in
index 14677ab..7145e11 100644
--- a/bin/Makefile.in
+++ b/bin/Makefile.in
@@ -135,6 +135,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
diff --git a/configure b/configure
index be27de1..ba93e9b 100755
--- a/configure
+++ b/configure
@@ -1,6 +1,6 @@
 #! /bin/sh
 # Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.68 for morituri 0.1.3.
+# Generated by GNU Autoconf 2.68 for morituri 0.2.0.
 #
 # Report bugs to <http://thomas.apestaart.org/morituri/trac/newticket>.
 #
@@ -560,8 +560,8 @@ MAKEFLAGS=
 # Identity of this package.
 PACKAGE_NAME='morituri'
 PACKAGE_TARNAME='morituri'
-PACKAGE_VERSION='0.1.3'
-PACKAGE_STRING='morituri 0.1.3'
+PACKAGE_VERSION='0.2.0'
+PACKAGE_STRING='morituri 0.2.0'
 PACKAGE_BUGREPORT='http://thomas.apestaart.org/morituri/trac/newticket'
 PACKAGE_URL=''
 
@@ -573,6 +573,7 @@ PYCHECKER
 HAVE_EPYDOC_FALSE
 HAVE_EPYDOC_TRUE
 EPYDOC
+PLUGINSDIR
 PYTHONLIBDIR
 pkgpyexecdir
 pyexecdir
@@ -1203,7 +1204,7 @@ if test "$ac_init_help" = "long"; then
   # Omit some internal or obsolete options to make the list less imposing.
   # This message is too long to be a string in the A/UX 3.1 sh.
   cat <<_ACEOF
-\`configure' configures morituri 0.1.3 to adapt to many kinds of systems.
+\`configure' configures morituri 0.2.0 to adapt to many kinds of systems.
 
 Usage: $0 [OPTION]... [VAR=VALUE]...
 
@@ -1269,7 +1270,7 @@ fi
 
 if test -n "$ac_init_help"; then
   case $ac_init_help in
-     short | recursive ) echo "Configuration of morituri 0.1.3:";;
+     short | recursive ) echo "Configuration of morituri 0.2.0:";;
    esac
   cat <<\_ACEOF
 
@@ -1343,7 +1344,7 @@ fi
 test -n "$ac_init_help" && exit $ac_status
 if $ac_init_version; then
   cat <<\_ACEOF
-morituri configure 0.1.3
+morituri configure 0.2.0
 generated by GNU Autoconf 2.68
 
 Copyright (C) 2010 Free Software Foundation, Inc.
@@ -1360,7 +1361,7 @@ cat >config.log <<_ACEOF
 This file contains any messages produced by compilers while
 running configure, to aid debugging if configure makes a mistake.
 
-It was created by morituri $as_me 0.1.3, which was
+It was created by morituri $as_me 0.2.0, which was
 generated by GNU Autoconf 2.68.  Invocation command line was
 
   $ $0 $@
@@ -2176,7 +2177,7 @@ fi
 
 # Define the identity of the package.
  PACKAGE='morituri'
- VERSION='0.1.3'
+ VERSION='0.2.0'
 
 
 cat >>confdefs.h <<_ACEOF
@@ -2218,9 +2219,9 @@ am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'
 
 
 
-  PACKAGE_VERSION_MAJOR=$(echo 0.1.3 | cut -d'.' -f1)
-  PACKAGE_VERSION_MINOR=$(echo 0.1.3 | cut -d'.' -f2)
-  PACKAGE_VERSION_MICRO=$(echo 0.1.3 | cut -d'.' -f3)
+  PACKAGE_VERSION_MAJOR=$(echo 0.2.0 | cut -d'.' -f1)
+  PACKAGE_VERSION_MINOR=$(echo 0.2.0 | cut -d'.' -f2)
+  PACKAGE_VERSION_MICRO=$(echo 0.2.0 | cut -d'.' -f3)
 
 
 
@@ -2231,7 +2232,7 @@ am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking nano version" >&5
 $as_echo_n "checking nano version... " >&6; }
 
-  NANO=$(echo 0.1.3 | cut -d'.' -f4)
+  NANO=$(echo 0.2.0 | cut -d'.' -f4)
 
   if test x"$NANO" = x || test "x$NANO" = "x0" ; then
     { $as_echo "$as_me:${as_lineno-$LINENO}: result: 0 (release)" >&5
@@ -2439,6 +2440,38 @@ PYVER=`$PYTHON -c "import sys ; print sys.version[:3]"`
 $as_echo "$as_me: Installing python code in $PYTHONLIBDIR" >&6;}
 
 
+
+  EXP_VAR=PLUGINSDIR
+  FROM_VAR="\${libdir}/morituri/plugins"
+
+
+    prefix_save=$prefix
+  exec_prefix_save=$exec_prefix
+
+    if test "x$prefix" = "xNONE"; then
+    prefix="$ac_default_prefix"
+  fi
+    if test "x$exec_prefix" = "xNONE"; then
+    exec_prefix=$prefix
+  fi
+
+  full_var="$FROM_VAR"
+    while true; do
+        new_full_var="`eval echo $full_var`"
+    if test "x$new_full_var" = "x$full_var"; then break; fi
+    full_var=$new_full_var
+  done
+
+    full_var=$new_full_var
+  PLUGINSDIR="$full_var"
+
+
+    prefix=$prefix_save
+  exec_prefix=$exec_prefix_save
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: Setting plugins directory to $PLUGINSDIR" >&5
+$as_echo "$as_me: Setting plugins directory to $PLUGINSDIR" >&6;}
+
 # Extract the first word of "epydoc", so it can be a program name with args.
 set dummy epydoc; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
@@ -3105,7 +3138,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
 # report actual input values of CONFIG_FILES etc. instead of their
 # values after options handling.
 ac_log="
-This file was extended by morituri $as_me 0.1.3, which was
+This file was extended by morituri $as_me 0.2.0, which was
 generated by GNU Autoconf 2.68.  Invocation command line was
 
   CONFIG_FILES    = $CONFIG_FILES
@@ -3158,7 +3191,7 @@ _ACEOF
 cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
 ac_cs_version="\\
-morituri config.status 0.1.3
+morituri config.status 0.2.0
 configured by $0, generated by GNU Autoconf 2.68,
   with options \\"\$ac_cs_config\\"
 
diff --git a/configure.ac b/configure.ac
index 219f52e..07e605a 100644
--- a/configure.ac
+++ b/configure.ac
@@ -1,7 +1,7 @@
 dnl initialize autoconf
 dnl when going to/from release please remove/add the nano (fourth number)
 dnl releases only do Wall, trunk and prerelease does Werror too
-AC_INIT(morituri, 0.1.3,
+AC_INIT(morituri, 0.2.0,
    http://thomas.apestaart.org/morituri/trac/newticket,
    morituri)
 
@@ -35,6 +35,9 @@ AS_AC_EXPAND(PYTHONLIBDIR, "\${exec_prefix}/lib/python$PYVER/site-packages")
 AC_MSG_NOTICE(Installing python code in $PYTHONLIBDIR)
 AC_SUBST(PYTHONLIBDIR)
 
+AS_AC_EXPAND(PLUGINSDIR, "\${libdir}/morituri/plugins")
+AC_MSG_NOTICE(Setting plugins directory to $PLUGINSDIR)
+
 dnl check for epydoc
 AC_CHECK_PROG(EPYDOC, epydoc, yes, no)
 AM_CONDITIONAL(HAVE_EPYDOC, test "x$EPYDOC" = "xyes")
diff --git a/doc/Makefile.in b/doc/Makefile.in
index 2b29674..c38bf02 100644
--- a/doc/Makefile.in
+++ b/doc/Makefile.in
@@ -135,6 +135,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
diff --git a/doc/morituri.ics b/doc/morituri.ics
index 554fafc..8ec17a6 100644
--- a/doc/morituri.ics
+++ b/doc/morituri.ics
@@ -38,5 +38,14 @@ DTSTART;VALUE=DATE:20121123
 DTEND;VALUE=DATE:20121123
 END:VEVENT
 
+BEGIN:VEVENT
+SUMMARY:Morituri 0.2.0 'ears' released
+UID:2013-01-20-morituri-0.2.0 at moap
+CLASS:PUBLIC
+PRIORITY:3
+DTSTART;VALUE=DATE:20130120
+DTEND;VALUE=DATE:20130120
+END:VEVENT
+
 
 END:VCALENDAR
diff --git a/doc/morituri.rss2 b/doc/morituri.rss2
index fe13bb2..8df239a 100644
--- a/doc/morituri.rss2
+++ b/doc/morituri.rss2
@@ -5,6 +5,24 @@
     <link>http://thomas.apestaart.org/morituri/trac/</link>
     <language>en</language>
     <item>
+      <title>Morituri 0.2.0 'ears' released</title>
+      <guid isPermaLink="false">release-morituri-0.2.0</guid>
+      <link>http://thomas.apestaart.org/morituri/trac/</link>
+      <pubDate>Sun, 20 Jan 2013 00:00:00 +0000</pubDate>
+      <description>
+- added plugins system for logger
+- added rip cd rip --logger to specify logger
+- added reading speed, cdparanoia and cdrdao version to logger
+- added rip drive analyze to detect whether we can defeat audio cache behaviour
+- store drive offsets and cache defeating in config file
+- rip drive list shows configured offset and audio cache defeating
+- added rip image retag --release-id to specify the release id to tag with
+- added %r/%R for release type to use in track/disc template
+- added %x for extension to release template
+For more information, visit
+<A HREF="http://thomas.apestaart.org/morituri/trac/">the project homepage</A>
+      </description>
+    </item><item>
       <title>Morituri 0.1.3 'cranes' released</title>
       <guid isPermaLink="false">release-morituri-0.1.3</guid>
       <link>http://thomas.apestaart.org/morituri/trac/</link>
diff --git a/doc/rip.1 b/doc/rip.1
index ab86b05..3da2a3a 100644
--- a/doc/rip.1
+++ b/doc/rip.1
@@ -73,20 +73,25 @@ Usage: rip cd rip
 
 Rips a CD.
 
-Tracks are named according to the track template, filling in the
-variables and expanding the file extension.  Variables are:
+ Tracks are named according to the track template, filling in the
+variables and adding the file extension.  Variables exclusive to the
+track template are:
  - %t: track number
  - %a: track artist
  - %n: track title
  - %s: track sort name
 
 Disc files (.cue, .log, .m3u) are named according to the disc
-template, filling in the variables and expanding the file extension.
-Variables are:
+template, filling in the variables and adding the file extension.
+Variables for both disc and track template are:
  - %A: album artist
  - %S: album sort name
  - %d: disc title
  - %y: release year
+ - %r: release type, lowercase
+ - %R: Release type, normal case
+ - %x: audio extension
+
 
 Paths to track files referenced in .cue and .m3u files will be made
 relative to the directory of the disc files.
@@ -98,20 +103,30 @@ Implemented by: morituri.rip.cd.Rip
 
 Options:
   -h, --help            show this help message and exit
+  -L LOGGER, --logger=LOGGER
+                        logger to use (default 'morituri', choose from
+                        'morituri')
   -o OFFSET, --offset=OFFSET
-                        sample read offset (defaults to 0)
+                        sample read offset (defaults to configured
+                        value, or 0)
   -O OUTPUT_DIRECTORY, --output-directory=OUTPUT_DIRECTORY
-                        output directory (defaults to absolute path to
-                        current directory)
+                        output directory; will be included in file
+                        paths in result files (defaults to absolute
+                        path to current directory; set to empty if you
+                        want paths to be relative instead)
+  -W WORKING_DIRECTORY, --working-directory=WORKING_DIRECTORY
+                        working directory; morituri will change to
+                        this directory and files will be created
+                        relative to it when not absolute
   -T TOC_PICKLE, --toc-pickle=TOC_PICKLE
                         pickle to use for reading and writing the TOC
   --track-template=TRACK_TEMPLATE
-                        template for track file naming (default %A -
-                        %d/%t. %a - %n)
+                        template for track file naming (default %r/%A
+                        - %d/%t. %a - %n)
   --disc-template=DISC_TEMPLATE
-                        template for disc file naming (default %A -
+                        template for disc file naming (default %r/%A -
                         %d/%A - %d)
-  -R RELEASE, --release-id=RELEASE
+  -R RELEASE_ID, --release-id=RELEASE_ID
                         MusicBrainz release id to match to (if there
                         are multiple)
   --profile=PROFILE     profile for encoding (default 'flac', choices
@@ -131,6 +146,8 @@ Commands:
   checksum        run a checksum task
   encode          run an encode task
   musicbrainzngs  examine MusicBrainz NGS info
+  resultcache     debug result cache
+  tag             run a tag reading task
 
 Implemented by: morituri.rip.debug.Debug
 
@@ -163,16 +180,75 @@ Options:
 
 
 .SH rip debug musicbrainzngs
-Usage: rip debug musicbrainzngs 
-
 examine MusicBrainz NGS info
 
+Usage: rip debug musicbrainzngs [MusicBrainz disc id]
+
+Look up a MusicBrainz disc id and output information.
+
+Example disc id: KnpGsLhvH.lPrNc1PBL21lb9Bg4-
+
 Implemented by: morituri.rip.debug.MusicBrainzNGS
 
 Options:
   -h, --help  show this help message and exit
 
 
+.SH rip debug resultcache
+debug result cache
+
+Usage: rip debug resultcache [command]
+
+debug result cache
+
+Aliases: rc
+
+Commands:
+  list  list cached results
+  log   write a log file for the cached result
+
+Implemented by: morituri.rip.debug.ResultCache
+
+Options:
+  -h, --help  show this help message and exit
+
+
+.SH rip debug resultcache list
+Usage: rip debug resultcache list 
+
+list cached results
+
+Implemented by: morituri.rip.debug.RCList
+
+Options:
+  -h, --help  show this help message and exit
+
+
+.SH rip debug resultcache log
+Usage: rip debug resultcache log 
+
+write a log file for the cached result
+
+Implemented by: morituri.rip.debug.RCLog
+
+Options:
+  -h, --help            show this help message and exit
+  -L LOGGER, --logger=LOGGER
+                        logger to use (default 'morituri', choose from
+                        'morituri')
+
+
+.SH rip debug tag
+Usage: rip debug tag 
+
+run a tag reading task
+
+Implemented by: morituri.rip.debug.Tag
+
+Options:
+  -h, --help  show this help message and exit
+
+
 .SH rip drive
 handle drives
 
@@ -181,7 +257,8 @@ Usage: rip drive [command]
 handle drives
 
 Commands:
-  list  list drives
+  analyze  analyze caching behaviour of drive
+  list     list drives
 
 Implemented by: morituri.rip.drive.Drive
 
@@ -189,6 +266,19 @@ Options:
   -h, --help  show this help message and exit
 
 
+.SH rip drive analyze
+Usage: rip drive analyze 
+
+analyze caching behaviour of drive
+
+Implemented by: morituri.rip.drive.Analyze
+
+Options:
+  -h, --help            show this help message and exit
+  -d DEVICE, --device=DEVICE
+                        CD-DA device
+
+
 .SH rip drive list
 Usage: rip drive list 
 
@@ -245,7 +335,10 @@ retag image files
 Implemented by: morituri.rip.image.Retag
 
 Options:
-  -h, --help  show this help message and exit
+  -h, --help            show this help message and exit
+  -R RELEASE_ID, --release-id=RELEASE_ID
+                        MusicBrainz release id to match to (if there
+                        are multiple)
 
 
 .SH rip image verify
diff --git a/etc/Makefile.in b/etc/Makefile.in
index cb1059d..0aad8d8 100644
--- a/etc/Makefile.in
+++ b/etc/Makefile.in
@@ -144,6 +144,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
diff --git a/etc/bash_completion.d/Makefile.in b/etc/bash_completion.d/Makefile.in
index d5b4662..6ae4173 100644
--- a/etc/bash_completion.d/Makefile.in
+++ b/etc/bash_completion.d/Makefile.in
@@ -134,6 +134,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
diff --git a/m4/Makefile.in b/m4/Makefile.in
index 6e0de00..2df9af3 100644
--- a/m4/Makefile.in
+++ b/m4/Makefile.in
@@ -104,6 +104,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
diff --git a/misc/Makefile.in b/misc/Makefile.in
index aa8d804..0b51eb5 100644
--- a/misc/Makefile.in
+++ b/misc/Makefile.in
@@ -104,6 +104,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
diff --git a/morituri.doap b/morituri.doap
index 89b16b3..6e935a9 100644
--- a/morituri.doap
+++ b/morituri.doap
@@ -28,10 +28,10 @@ Morituri is a CD ripper aiming for maximum quality.
  <download-page rdf:resource="http://thomas.apestaart.org/projects/morituri/" />
 
  <repository>
-   <SVNRepository>
-     <location rdf:resource="http://thomas.apestaart.org/morituri/svn/trunk/" />
-     <browse rdf:resource="http://thomas.apestaart.org/morituri/trac/browser/trunk" />
-   </SVNRepository>
+   <GitRepository>
+     <location rdf:resource="git://github.com/thomasvs/morituri.git" />
+     <browse rdf:resource="http://github.com/thomasvs/morituri" />
+   </GitRepository>
  </repository>
 
  <maintainer>
@@ -42,6 +42,29 @@ Morituri is a CD ripper aiming for maximum quality.
 
  <release>
   <Version>
+   <revision>0.2.0</revision>
+   <branch>master</branch>
+   <name>ears</name>
+   <created>2013-01-20</created>
+   <file-release rdf:resource="http://thomas.apestaart.org/download/morituri/morituri-0.2.0.tar.bz2" />
+   <file-release rdf:resource="http://thomas.apestaart.org/download/morituri/morituri-0.2.0-1.noarch.rpm" />
+   <dc:description>
+- added plugins system for logger
+- added rip cd rip --logger to specify logger
+- added reading speed, cdparanoia and cdrdao version to logger
+- added rip drive analyze to detect whether we can defeat audio cache behaviour
+- store drive offsets and cache defeating in config file
+- rip drive list shows configured offset and audio cache defeating
+- added rip image retag --release-id to specify the release id to tag with
+- added %r/%R for release type to use in track/disc template
+- added %x for extension to release template
+   </dc:description>
+  </Version>
+ </release>
+
+
+ <release>
+  <Version>
    <revision>0.1.3</revision>
    <branch>trunk</branch>
    <name>cranes</name>
diff --git a/morituri.spec b/morituri.spec
index 84dbb32..44e908b 100644
--- a/morituri.spec
+++ b/morituri.spec
@@ -2,7 +2,7 @@
 %{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print get_python_lib()")}
 
 Name:           morituri
-Version:        0.1.3
+Version:        0.2.0
 Release:        1%{?dist}
 Summary:        CD ripper
 Source:         %{name}-%{version}.tar.bz2
@@ -15,6 +15,7 @@ BuildArch:      noarch
 
 BuildRequires:  pygobject2
 BuildRequires:  gstreamer-python
+BuildRequires:  python-setuptools
 
 Requires:       cdparanoia
 Requires:       cdrdao > 1.2.3-0.rc2.2
@@ -23,6 +24,7 @@ Requires:       gstreamer-python
 Requires:       python-musicbrainz2
 Requires:       python-CDDB
 Requires:       pycdio
+Requires:       pyxdg
 
 # we use parse_version in code
 Requires:       python-setuptools
@@ -41,6 +43,7 @@ Morituri is a CD ripper.
 rm -rf $RPM_BUILD_ROOT
 make DESTDIR=$RPM_BUILD_ROOT install
 
+mkdir -p $RPM_BUILD_ROOT%{_libdir}/morituri/plugins
 %clean
 rm -rf $RPM_BUILD_ROOT
 
@@ -48,11 +51,18 @@ rm -rf $RPM_BUILD_ROOT
 %defattr(-,root,root)
 %doc README morituri.doap NEWS RELEASE ChangeLog
 %{_bindir}/rip
+%{_libdir}/morituri/plugins
 %{_mandir}/man1/rip.1*
 %{python_sitelib}/morituri
 %{_sysconfdir}/bash_completion.d/
 
 %changelog
+* Sun Jan 20 2013 Thomas Vander Stichele <thomas at apestaart dot org>
+- Added python-setuptools now that we use it for plugins
+
+* Sun Dec 02 2012 Thomas Vander Stichele <thomas at apestaart dot org>
+- Require pyxdg for XDG config file.
+
 * Sun Jan 09 2011 Thomas Vander Stichele <thomas at apestaart dot org>
 - Fix URL
 
diff --git a/morituri.spec.in b/morituri.spec.in
index 45132a5..07a1823 100644
--- a/morituri.spec.in
+++ b/morituri.spec.in
@@ -15,6 +15,7 @@ BuildArch:      noarch
 
 BuildRequires:  pygobject2
 BuildRequires:  gstreamer-python
+BuildRequires:  python-setuptools
 
 Requires:       cdparanoia
 Requires:       cdrdao > 1.2.3-0.rc2.2
@@ -23,6 +24,7 @@ Requires:       gstreamer-python
 Requires:       python-musicbrainz2
 Requires:       python-CDDB
 Requires:       pycdio
+Requires:       pyxdg
 
 # we use parse_version in code
 Requires:       python-setuptools
@@ -41,6 +43,7 @@ Morituri is a CD ripper.
 rm -rf $RPM_BUILD_ROOT
 make DESTDIR=$RPM_BUILD_ROOT install
 
+mkdir -p $RPM_BUILD_ROOT%{_libdir}/morituri/plugins
 %clean
 rm -rf $RPM_BUILD_ROOT
 
@@ -48,11 +51,18 @@ rm -rf $RPM_BUILD_ROOT
 %defattr(-,root,root)
 %doc README morituri.doap NEWS RELEASE ChangeLog
 %{_bindir}/rip
+%{_libdir}/morituri/plugins
 %{_mandir}/man1/rip.1*
 %{python_sitelib}/morituri
 %{_sysconfdir}/bash_completion.d/
 
 %changelog
+* Sun Jan 20 2013 Thomas Vander Stichele <thomas at apestaart dot org>
+- Added python-setuptools now that we use it for plugins
+
+* Sun Dec 02 2012 Thomas Vander Stichele <thomas at apestaart dot org>
+- Require pyxdg for XDG config file.
+
 * Sun Jan 09 2011 Thomas Vander Stichele <thomas at apestaart dot org>
 - Fix URL
 
diff --git a/morituri/Makefile.in b/morituri/Makefile.in
index beb8fe4..224e56d 100644
--- a/morituri/Makefile.in
+++ b/morituri/Makefile.in
@@ -175,6 +175,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
diff --git a/morituri/common/Makefile.am b/morituri/common/Makefile.am
index b211fd1..1413edf 100644
--- a/morituri/common/Makefile.am
+++ b/morituri/common/Makefile.am
@@ -6,7 +6,10 @@ morituri_PYTHON = \
 	__init__.py \
 	accurip.py \
 	checksum.py \
+	cache.py \
 	common.py \
+	config.py \
+	directory.py \
 	drive.py \
 	encode.py \
 	gstreamer.py \
diff --git a/morituri/common/Makefile.in b/morituri/common/Makefile.in
index 85ab15b..8beea89 100644
--- a/morituri/common/Makefile.in
+++ b/morituri/common/Makefile.in
@@ -135,6 +135,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
@@ -195,7 +196,10 @@ morituri_PYTHON = \
 	__init__.py \
 	accurip.py \
 	checksum.py \
+	cache.py \
 	common.py \
+	config.py \
+	directory.py \
 	drive.py \
 	encode.py \
 	gstreamer.py \
diff --git a/morituri/common/cache.py b/morituri/common/cache.py
new file mode 100644
index 0000000..2fb8044
--- /dev/null
+++ b/morituri/common/cache.py
@@ -0,0 +1,189 @@
+# -*- Mode: Python; test-case-name: morituri.test.test_common_cache -*-
+# vi:si:et:sw=4:sts=4:ts=4
+
+# Morituri - for those about to RIP
+
+# Copyright (C) 2009 Thomas Vander Stichele
+
+# This file is part of morituri.
+#
+# morituri is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# morituri is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with morituri.  If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import os.path
+import glob
+import tempfile
+import shutil
+
+from morituri.result import result
+from morituri.extern.log import log
+
+
+class Persister(object):
+    """
+    I wrap an optional pickle to persist an object to disk.
+
+    Instantiate me with a path to automatically unpickle the object.
+    Call persist to store the object to disk; it will get stored if it
+    changed from the on-disk object.
+
+    @ivar object: the persistent object
+    """
+
+    def __init__(self, path=None, default=None):
+        """
+        If path is not given, the object will not be persisted.
+        This allows code to transparently deal with both persisted and
+        non-persisted objects, since the persist method will just end up
+        doing nothing.
+        """
+        self._path = path
+        self.object = None
+
+        self._unpickle(default)
+
+    def persist(self, obj=None):
+        """
+        Persist the given object, if we have a persistence path and the
+        object changed.
+
+        If object is not given, re-persist our object, always.
+        If object is given, only persist if it was changed.
+        """
+        # don't pickle if it's already ok
+        if obj and obj == self.object:
+            return
+
+        # store the object on ourselves if not None
+        if obj is not None:
+            self.object = obj
+
+        # don't pickle if there is no path
+        if not self._path:
+            return
+
+        # default to pickling our object again
+        if obj is None:
+            obj = self.object
+
+        # pickle
+        self.object = obj
+        (fd, path) = tempfile.mkstemp(suffix='.morituri.pickle')
+        handle = os.fdopen(fd, 'wb')
+        import pickle
+        pickle.dump(obj, handle, 2)
+        handle.close()
+        # do an atomic move
+        shutil.move(path, self._path)
+
+    def _unpickle(self, default=None):
+        self.object = default
+
+        if not self._path:
+            return None
+
+        if not os.path.exists(self._path):
+            return None
+
+        handle = open(self._path)
+        import pickle
+
+        try:
+            self.object = pickle.load(handle)
+        except:
+            # can fail for various reasons; in that case, pretend we didn't
+            # load it
+            pass
+
+    def delete(self):
+        self.object = None
+        os.unlink(self._path)
+
+
+class PersistedCache(object):
+    """
+    I wrap a directory of persisted objects.
+    """
+
+    path = None
+
+    def __init__(self, path):
+        self.path = path
+        try:
+            os.makedirs(self.path)
+        except OSError, e:
+            if e.errno != 17: # FIXME
+                raise
+
+    def _getPath(self, key):
+        return os.path.join(self.path, '%s.pickle' % key)
+
+    def get(self, key):
+        """
+        Returns the persister for the given key.
+        """
+        persister = Persister(self._getPath(key))
+        # FIXME: don't delete old objects atm
+        # if persister.object:
+        #     if hasattr(persister.object, 'instanceVersion'):
+        #         o = persister.object
+        #         if o.instanceVersion < o.__class__.classVersion:
+        #             persister.delete()
+
+        return persister
+
+
+class ResultCache(log.Loggable):
+
+    def __init__(self, path=None):
+        if not path:
+            path = self._getResultCachePath()
+
+        self._path = path
+        self._pcache = PersistedCache(self._path)
+
+    def _getResultCachePath(self):
+        path = os.path.join(os.path.expanduser('~'), '.morituri', 'cache',
+            'result')
+        return path
+
+    def getRipResult(self, cddbdiscid, create=True):
+        """
+        Retrieve the persistable RipResult either from our cache (from a
+        previous, possibly aborted rip), or return a new one.
+
+        @rtype: L{Persistable} for L{result.RipResult}
+        """
+        presult = self._pcache.get(cddbdiscid)
+
+        if not presult.object:
+            self.debug('result for cddbdiscid %r not in cache', cddbdiscid)
+            if not create:
+                self.debug('returning None')
+                return None
+
+            self.debug('creating result')
+            presult.object = result.RipResult()
+            presult.persist(presult.object)
+        else:
+            self.debug('result for cddbdiscid %r found in cache, reusing',
+                cddbdiscid)
+
+        return presult
+
+    def getIds(self):
+        paths = glob.glob(os.path.join(self._path, '*.pickle'))
+
+        return [os.path.splitext(os.path.basename(path))[0] for path in paths]
+        
diff --git a/morituri/common/checksum.py b/morituri/common/checksum.py
index 542e795..68bf524 100644
--- a/morituri/common/checksum.py
+++ b/morituri/common/checksum.py
@@ -48,30 +48,40 @@ class ChecksumTask(log.Loggable, gstreamer.GstPipelineTask):
     # this object needs a main loop to stop
     description = 'Calculating checksum'
 
-    def __init__(self, path, frameStart=0, frameLength=-1):
+    def __init__(self, path, sampleStart=0, sampleLength=-1):
         """
-        A frame is considered a set of samples for each channel;
-        ie 16 bit stereo is 4 bytes per frame.
-        If frameLength < 0 it is treated as 'unknown' and calculated.
+        A sample is considered a set of samples for each channel;
+        ie 16 bit stereo is 4 bytes per sample.
+        If sampleLength < 0 it is treated as 'unknown' and calculated.
 
         @type  path:       unicode
-        @type  frameStart: int
-        @param frameStart: the frame to start at
+        @type  sampleStart: int
+        @param sampleStart: the sample to start at
         """
+
+        # sampleLength can be e.g. -588 when it is -1 * SAMPLES_PER_FRAME
+
         assert type(path) is unicode, "%r is not unicode" % path
 
         self.logName = "ChecksumTask 0x%x" % id(self)
 
         # use repr/%r because path can be unicode
-        self.debug('Creating checksum task on %r from %d to %d',
-            path, frameStart, frameLength)
+        if sampleLength < 0:
+            self.debug(
+                'Creating checksum task on %r from sample %d until the end',
+                path, sampleStart)
+        else:
+            self.debug(
+                'Creating checksum task on %r from sample %d for %d samples',
+                path, sampleStart, sampleLength)
+
         if not os.path.exists(path):
             raise IndexError('%r does not exist' % path)
 
         self._path = path
-        self._frameStart = frameStart
-        self._frameLength = frameLength
-        self._frameEnd = None
+        self._sampleStart = sampleStart
+        self._sampleLength = sampleLength
+        self._sampleEnd = None
         self._checksum = 0
         self._bytes = 0 # number of bytes received
         self._first = None
@@ -94,7 +104,7 @@ class ChecksumTask(log.Loggable, gstreamer.GstPipelineTask):
     def paused(self):
         sink = self.pipeline.get_by_name('sink')
 
-        if self._frameLength < 0:
+        if self._sampleLength < 0:
             self.debug('query duration')
             try:
                 length, qformat = sink.query_duration(gst.FORMAT_DEFAULT)
@@ -106,12 +116,15 @@ class ChecksumTask(log.Loggable, gstreamer.GstPipelineTask):
             if qformat == gst.FORMAT_BYTES:
                 self.debug('query returned in BYTES format')
                 length /= 4
-            self.debug('total length: %r', length)
-            self._frameLength = length - self._frameStart
-            self.debug('audio frame length is %r', self._frameLength)
+            self.debug('total sample length of file: %r', length)
+            self._sampleLength = length - self._sampleStart
+            self.debug('sampleLength is queried as %d samples',
+                self._sampleLength)
         else:
-            self.debug('frameLength known, is %d' % self._frameLength)
-        self._frameEnd = self._frameStart + self._frameLength - 1
+            self.debug('sampleLength is known, and is %d samples' %
+                self._sampleLength)
+        self._sampleEnd = self._sampleStart + self._sampleLength - 1
+        self.debug('sampleEnd is sample %d' % self._sampleEnd)
 
         self.debug('event')
 
@@ -119,16 +132,18 @@ class ChecksumTask(log.Loggable, gstreamer.GstPipelineTask):
         # the segment end only is respected since -good 0.10.14.1
         event = gst.event_new_seek(1.0, gst.FORMAT_DEFAULT,
             gst.SEEK_FLAG_FLUSH,
-            gst.SEEK_TYPE_SET, self._frameStart,
-            gst.SEEK_TYPE_SET, self._frameEnd + 1) # half-inclusive interval
+            gst.SEEK_TYPE_SET, self._sampleStart,
+            gst.SEEK_TYPE_SET, self._sampleEnd + 1) # half-inclusive interval
         self.debug('CRCing %r from sector %d to sector %d' % (
             self._path,
-            self._frameStart / common.SAMPLES_PER_FRAME,
-            (self._frameEnd + 1) / common.SAMPLES_PER_FRAME))
-        # FIXME: sending it with frameEnd set screws up the seek, we don't get
+            self._sampleStart / common.SAMPLES_PER_FRAME,
+            (self._sampleEnd + 1) / common.SAMPLES_PER_FRAME))
+        # FIXME: sending it with sampleEnd set screws up the seek, we don't get
         # everything for flac; fixed in recent -good
         result = sink.send_event(event)
         self.debug('event sent, result %r', result)
+        if not result:
+            self.error('Failed to select samples with GStreamer seek event')
         sink.connect('new-buffer', self._new_buffer_cb)
         sink.connect('eos', self._eos_cb)
 
@@ -150,20 +165,26 @@ class ChecksumTask(log.Loggable, gstreamer.GstPipelineTask):
         self.debug('stopped')
         if not self._last:
             # see http://bugzilla.gnome.org/show_bug.cgi?id=578612
-            self.debug('not a single buffer gotten, setting exception EmptyError')
+            self.debug(
+                'not a single buffer gotten, setting exception EmptyError')
             self.setException(common.EmptyError('not a single buffer gotten'))
+            return
         else:
             self._checksum = self._checksum % 2 ** 32
-            self.debug("last offset %r", self._last.offset)
+            self.debug("last buffer's sample offset %r", self._last.offset)
+            self.debug("last buffer's sample size %r", len(self._last) / 4)
             last = self._last.offset + len(self._last) / 4 - 1
-            self.debug("last sample: %r", last)
-            self.debug("frame end: %r", self._frameEnd)
-            self.debug("frame length: %r", self._frameLength)
+            self.debug("last sample offset in buffer: %r", last)
+            self.debug("requested sample end: %r", self._sampleEnd)
+            self.debug("requested sample length: %r", self._sampleLength)
             self.debug("checksum: %08X", self._checksum)
             self.debug("bytes: %d", self._bytes)
-            if self._frameEnd != last:
-                print 'ERROR: did not get all frames, %d missing' % (
-                    self._frameEnd - last)
+            if self._sampleEnd != last:
+                msg = 'did not get all samples, %d of %d missing' % (
+                    self._sampleEnd - last, self._sampleEnd)
+                self.warning(msg)
+                self.setException(common.MissingFrames(msg))
+                return
 
         self.checksum = self._checksum
 
@@ -183,7 +204,7 @@ class ChecksumTask(log.Loggable, gstreamer.GstPipelineTask):
             buf.offset, buf.size))
         if self._first is None:
             self._first = buf.offset
-            self.debug('first sample is %r', self._first)
+            self.debug('first sample is sample offset %r', self._first)
         self._last = buf
 
         assert len(buf) % 4 == 0, "buffer is not a multiple of 4 bytes"
@@ -200,9 +221,9 @@ class ChecksumTask(log.Loggable, gstreamer.GstPipelineTask):
             self._bytes += len(buf)
 
             # update progress
-            frame = self._first + self._bytes / 4
-            framesDone = frame - self._frameStart
-            progress = float(framesDone) / float((self._frameLength))
+            sample = self._first + self._bytes / 4
+            samplesDone = sample - self._sampleStart
+            progress = float(samplesDone) / float((self._sampleLength))
             # marshall to the main thread
             self.schedule(0, self.setProgress, progress)
 
@@ -233,9 +254,9 @@ class AccurateRipChecksumTask(ChecksumTask):
 
     description = 'Calculating AccurateRip checksum'
 
-    def __init__(self, path, trackNumber, trackCount, frameStart=0,
-            frameLength=-1):
-        ChecksumTask.__init__(self, path, frameStart, frameLength)
+    def __init__(self, path, trackNumber, trackCount, sampleStart=0,
+            sampleLength=-1):
+        ChecksumTask.__init__(self, path, sampleStart, sampleLength)
         self._trackNumber = trackNumber
         self._trackCount = trackCount
         self._discFrameCounter = 0 # 1-based
@@ -262,7 +283,7 @@ class AccurateRipChecksumTask(ChecksumTask):
 
         # on last track, skip last 5 CD frames
         if self._trackNumber == self._trackCount:
-            discFrameLength = self._frameLength / common.SAMPLES_PER_FRAME
+            discFrameLength = self._sampleLength / common.SAMPLES_PER_FRAME
             if self._discFrameCounter > discFrameLength - 5:
                 self.debug('skipping frame %d', self._discFrameCounter)
                 return checksum
diff --git a/morituri/common/common.py b/morituri/common/common.py
index a1fa781..a647ba0 100644
--- a/morituri/common/common.py
+++ b/morituri/common/common.py
@@ -20,19 +20,19 @@
 # You should have received a copy of the GNU General Public License
 # along with morituri.  If not, see <http://www.gnu.org/licenses/>.
 
+
 import os
 import os.path
 import math
-import tempfile
-import shutil
+
 
 from morituri.extern.log import log
 
+FRAMES_PER_SECOND = 75
 
 SAMPLES_PER_FRAME = 588
 WORDS_PER_FRAME = SAMPLES_PER_FRAME * 2
 BYTES_PER_FRAME = SAMPLES_PER_FRAME * 4
-FRAMES_PER_SECOND = 75
 
 
 def msfToFrames(msf):
@@ -119,122 +119,10 @@ def formatTime(seconds, fractional=3):
     return " ".join(chunks)
 
 
-class Persister(object):
-    """
-    I wrap an optional pickle to persist an object to disk.
-
-    Instantiate me with a path to automatically unpickle the object.
-    Call persist to store the object to disk; it will get stored if it
-    changed from the on-disk object.
-
-    @ivar object: the persistent object
-    """
-
-    def __init__(self, path=None, default=None):
-        """
-        If path is not given, the object will not be persisted.
-        This allows code to transparently deal with both persisted and
-        non-persisted objects, since the persist method will just end up
-        doing nothing.
-        """
-        self._path = path
-        self.object = None
-
-        self._unpickle(default)
-
-    def persist(self, obj=None):
-        """
-        Persist the given object, if we have a persistence path and the
-        object changed.
-
-        If object is not given, re-persist our object, always.
-        If object is given, only persist if it was changed.
-        """
-        # don't pickle if it's already ok
-        if obj and obj == self.object:
-            return
-
-        # store the object on ourselves if not None
-        if obj is not None:
-            self.object = obj
-
-        # don't pickle if there is no path
-        if not self._path:
-            return
-
-        # default to pickling our object again
-        if obj is None:
-            obj = self.object
-
-        # pickle
-        self.object = obj
-        (fd, path) = tempfile.mkstemp(suffix='.morituri.pickle')
-        handle = os.fdopen(fd, 'wb')
-        import pickle
-        pickle.dump(obj, handle, 2)
-        handle.close()
-        # do an atomic move
-        shutil.move(path, self._path)
-
-    def _unpickle(self, default=None):
-        self.object = default
-
-        if not self._path:
-            return None
-
-        if not os.path.exists(self._path):
-            return None
-
-        handle = open(self._path)
-        import pickle
-
-        try:
-            self.object = pickle.load(handle)
-        except:
-            # can fail for various reasons; in that case, pretend we didn't
-            # load it
-            pass
-
-    def delete(self):
-        self.object = None
-        os.unlink(self._path)
-
-
-class PersistedCache(object):
-    """
-    I wrap a directory of persisted objects.
-    """
-
-    path = None
-
-    def __init__(self, path):
-        self.path = path
-        try:
-            os.makedirs(self.path)
-        except OSError, e:
-            if e.errno != 17: # FIXME
-                raise
-
-    def _getPath(self, key):
-        return os.path.join(self.path, '%s.pickle' % key)
-
-    def get(self, key):
-        """
-        Returns the persister for the given key.
-        """
-        persister = Persister(self._getPath(key))
-        if persister.object:
-            if hasattr(persister.object, 'instanceVersion'):
-                o = persister.object
-                if o.instanceVersion < o.__class__.classVersion:
-                    persister.delete()
-
-        return persister
-
-
 def tagListToDict(tl):
     """
-    Removes audio-codec and video-codec since we never set them ourselves.
+    Converts gst.TagList to dict.
+    Also strips it of tags that are not writable.
     """
     import gst
 
@@ -243,7 +131,13 @@ def tagListToDict(tl):
         if key == gst.TAG_DATE:
             date = tl[key]
             d[key] = "%4d-%2d-%2d" % (date.year, date.month, date.day)
-        elif key in [gst.TAG_AUDIO_CODEC, gst.TAG_VIDEO_CODEC]:
+        elif key in [
+            gst.TAG_AUDIO_CODEC,
+            gst.TAG_VIDEO_CODEC,
+            gst.TAG_MINIMUM_BITRATE,
+            gst.TAG_BITRATE,
+            gst.TAG_MAXIMUM_BITRATE,
+            ]:
             pass
         else:
             d[key] = tl[key]
@@ -257,6 +151,14 @@ def tagListEquals(tl1, tl2):
     return d1 == d2
 
 
+def tagListDifference(tl1, tl2):
+    d1 = tagListToDict(tl1)
+    d2 = tagListToDict(tl2)
+    return set(d1.keys()) - set(d2.keys())
+
+    return d1 == d2
+
+
 class MissingDependencyException(Exception):
     dependency = None
 
@@ -268,6 +170,12 @@ class MissingDependencyException(Exception):
 class EmptyError(Exception):
     pass
 
+class MissingFrames(Exception):
+    """
+    Less frames decoded than expected.
+    """
+    pass
+
 
 def shrinkPath(path):
     """
@@ -300,11 +208,17 @@ def shrinkPath(path):
     path = os.path.join(*parts)
     return path
 
+
 def getRealPath(refPath, filePath):
     """
-    Translate a .cue or .toc's FILE to an existing path.
+    Translate a .cue or .toc's FILE argument to an existing path.
+    Does Windows path translation.
+    Will look for the given file name, but with .flac and .wav as extensions.
+
+    @param refPath:  path to the file from which the track is referenced;
+                     for example, path to the .cue file in the same directory
+    @type  refPath:  unicode
 
-    @type  refPath: unicode
     @type  filePath: unicode
     """
     assert type(filePath) is unicode, "%r is not unicode" % filePath
@@ -312,24 +226,34 @@ def getRealPath(refPath, filePath):
     if os.path.exists(filePath):
         return filePath
 
-    # .cue FILE statements have Windows-style path separators, so convert
+    candidatePaths = []
+
+    # .cue FILE statements can have Windows-style path separators, so convert
+    # them as one possible candidate
+    # on the other hand, the file may indeed contain a backslash in the name
+    # on linux
+    # FIXME: I guess we might do all possible combinations of splitting or
+    #        keeping the slash, but let's just assume it's either Windows
+    #        or linux
+    # See https://thomas.apestaart.org/morituri/trac/ticket/107
     parts = filePath.split('\\')
     if parts[0] == '':
         parts[0] = os.path.sep
     tpath = os.path.join(*parts)
-    candidatePaths = []
-
-    if tpath == os.path.abspath(tpath):
-        candidatePaths.append(tpath)
-    else:
-        # if the path is relative:
-        # - check relatively to the cue file
-        # - check only the filename part relative to the cue file
-        candidatePaths.append(os.path.join(
-            os.path.dirname(refPath), tpath))
-        candidatePaths.append(os.path.join(
-            os.path.dirname(refPath), os.path.basename(tpath)))
 
+    for path in [filePath, tpath]:
+        if path == os.path.abspath(path):
+            candidatePaths.append(path)
+        else:
+            # if the path is relative:
+            # - check relatively to the cue file
+            # - check only the filename part relative to the cue file
+            candidatePaths.append(os.path.join(
+                os.path.dirname(refPath), path))
+            candidatePaths.append(os.path.join(
+                os.path.dirname(refPath), os.path.basename(path)))
+
+    # Now look for .wav and .flac files, as .flac files are often named .wav
     for candidate in candidatePaths:
         noext, _ = os.path.splitext(candidate)
         for ext in ['wav', 'flac']:
@@ -339,6 +263,7 @@ def getRealPath(refPath, filePath):
 
     raise KeyError("Cannot find file for %r" % filePath)
 
+
 def getRelativePath(targetPath, collectionPath):
     """
     Get a relative path from the directory of collectionPath to
diff --git a/morituri/common/config.py b/morituri/common/config.py
new file mode 100644
index 0000000..125f87f
--- /dev/null
+++ b/morituri/common/config.py
@@ -0,0 +1,145 @@
+# -*- Mode: Python; test-case-name: morituri.test.test_common_config -*-
+# vi:si:et:sw=4:sts=4:ts=4
+
+# Morituri - for those about to RIP
+
+# Copyright (C) 2009 Thomas Vander Stichele
+
+# This file is part of morituri.
+#
+# morituri is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# morituri is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with morituri.  If not, see <http://www.gnu.org/licenses/>.
+
+import os.path
+import shutil
+import urllib
+import codecs
+import tempfile
+import ConfigParser
+
+from morituri.common import directory, log
+
+
+class Config(log.Loggable):
+
+    def __init__(self, path=None):
+        if not path:
+            path = self.getDefaultPath()
+
+        self._path = path
+
+        self._parser = ConfigParser.SafeConfigParser()
+
+        self.open()
+
+    def getDefaultPath(self):
+        return directory.Directory().getConfig()
+
+    def open(self):
+        # Open the file with the correct encoding
+        if os.path.exists(self._path):
+            with codecs.open(self._path, 'r', encoding='utf-8') as f:
+                self._parser.readfp(f)
+
+        self.info('Loaded %d sections from config file' %
+            len(self._parser.sections()))
+
+    def setReadOffset(self, vendor, model, release, offset):
+        """
+        Set a read offset for the given drive.
+
+        Strips the given strings of leading and trailing whitespace.
+        """
+        section = self._findOrCreateDriveSection(vendor, model, release)
+        self._parser.set(section, 'read_offset', str(offset))
+        self.write()
+
+    def getReadOffset(self, vendor, model, release):
+        """
+        Get a read offset for the given drive.
+        """
+        section = self._findDriveSection(vendor, model, release)
+
+        try:
+            return int(self._parser.get(section, 'read_offset'))
+        except ConfigParser.NoOptionError:
+            raise KeyError("Could not find read_offset for %s/%s/%s" % (
+                vendor, model, release))
+
+
+    def setDefeatsCache(self, vendor, model, release, defeat):
+        """
+        Set whether the drive defeats the cache.
+
+        Strips the given strings of leading and trailing whitespace.
+        """
+        section = self._findOrCreateDriveSection(vendor, model, release)
+        self._parser.set(section, 'defeats_cache', str(defeat))
+        self.write()
+
+    def getDefeatsCache(self, vendor, model, release):
+        section = self._findDriveSection(vendor, model, release)
+
+        try:
+            return bool(self._parser.get(section, 'defeats_cache'))
+        except ConfigParser.NoOptionError:
+            raise KeyError("Could not find defeats_cache for %s/%s/%s" % (
+                vendor, model, release))
+
+    def write(self):
+        fd, path = tempfile.mkstemp(suffix=u'.moriturirc')
+        handle = os.fdopen(fd, 'w')
+        self._parser.write(handle)
+        handle.close()
+        shutil.move(path, self._path)
+
+    def _findDriveSection(self, vendor, model, release):
+        for name in self._parser.sections():
+            if not name.startswith('drive:'):
+                continue
+
+            self.debug('Looking at section %r' % name)
+            conf = {}
+            for key in ['vendor', 'model', 'release']:
+                locals()[key] = locals()[key].strip()
+                conf[key] = self._parser.get(name, key)
+                self.debug("%s: '%s' versus '%s'" % (
+                    key, locals()[key], conf[key]))
+            if vendor.strip() != conf['vendor']:
+                continue
+            if model.strip() != conf['model']:
+                continue
+            if release.strip() != conf['release']:
+                continue
+
+            return name
+
+        raise KeyError("Could not find configuration section for %s/%s/%s" % (
+                vendor, model, release))
+
+    def _findOrCreateDriveSection(self, vendor, model, release):
+        try:
+            section = self._findDriveSection(vendor, model, release)
+        except KeyError:
+            section = 'drive:' + urllib.quote('%s:%s:%s' % (
+                vendor, model, release))
+            self._parser.add_section(section)
+            __pychecker__ = 'no-local'
+            for key in ['vendor', 'model', 'release']:
+                self._parser.set(section, key, locals()[key].strip())
+
+        self.write()
+
+        return self._findDriveSection(vendor, model, release)
+
+
diff --git a/morituri/common/directory.py b/morituri/common/directory.py
new file mode 100644
index 0000000..1ad4765
--- /dev/null
+++ b/morituri/common/directory.py
@@ -0,0 +1,54 @@
+# -*- Mode: Python; test-case-name: morituri.test.test_common_directory -*-
+# vi:si:et:sw=4:sts=4:ts=4
+
+# Morituri - for those about to RIP
+
+# Copyright (C) 2013 Thomas Vander Stichele
+
+# This file is part of morituri.
+#
+# morituri is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# morituri is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with morituri.  If not, see <http://www.gnu.org/licenses/>.
+
+import os
+
+from morituri.common import log
+
+
+class Directory(log.Loggable):
+
+    def getConfig(self):
+        try:
+            from xdg import BaseDirectory
+            directory = BaseDirectory.save_config_path('morituri')
+            path = os.path.join(directory, 'morituri.conf')
+            self.info('Using XDG, configuration file is %s' % path)
+        except ImportError:
+            path = os.path.expanduser('~/.moriturirc')
+            self.info('Not using XDG, configuration file is %s' % path)
+        return path
+
+
+    def getCache(self):
+        try:
+            from xdg import BaseDirectory
+            path = BaseDirectory.save_cache_path('morituri')
+            self.info('Using XDG, cache directory is %s' % path)
+        except ImportError:
+            path = os.path.expanduser('~/.morituri/cache')
+            if not os.path.exists(path):
+                os.makedirs(path)
+            self.info('Not using XDG, cache directory is %s' % path)
+        return path
+
+
diff --git a/morituri/common/drive.py b/morituri/common/drive.py
index 95c9452..80c3de6 100644
--- a/morituri/common/drive.py
+++ b/morituri/common/drive.py
@@ -58,3 +58,15 @@ def _getAllDevicePathsStatic():
             ret.append(c)
 
     return ret
+
+
+def getDeviceInfo(path):
+    try:
+        import cdio
+    except ImportError:
+        return None
+
+    device = cdio.Device(path)
+    ok, vendor, model, release = device.get_hwinfo()
+
+    return (vendor, model, release)
diff --git a/morituri/common/encode.py b/morituri/common/encode.py
index f00fc1b..dba0eb1 100644
--- a/morituri/common/encode.py
+++ b/morituri/common/encode.py
@@ -297,7 +297,7 @@ class EncodeTask(ctask.GstPipelineTask):
             # self.peak = 0.0
 
 
-class TagReadTask(gstreamer.GstPipelineTask):
+class TagReadTask(ctask.GstPipelineTask):
     """
     I am a task that reads tags.
 
@@ -331,10 +331,15 @@ class TagReadTask(gstreamer.GstPipelineTask):
 
     def bus_tag_cb(self, bus, message):
         taglist = message.parse_tag()
-        self.taglist = taglist
+        self.debug('tag_cb, %d tags' % len(taglist.keys()))
+        if not self.taglist:
+            self.taglist = taglist
+        else:
+            import gst
+            self.taglist = self.taglist.merge(taglist, gst.TAG_MERGE_REPLACE)
 
 
-class TagWriteTask(task.Task):
+class TagWriteTask(ctask.LoggableTask):
     """
     I am a task that retags an encoded file.
     """
@@ -359,6 +364,8 @@ class TagWriteTask(task.Task):
         # here to avoid import gst eating our options
         import gst
 
+        # FIXME: this hardcodes flac; we should be using the correct
+        #        tag element instead
         self._pipeline = gst.parse_launch('''
             filesrc location="%s" !
             flactag name=tagger !
@@ -411,7 +418,7 @@ class TagWriteTask(task.Task):
         task.Task.stop(self)
 
 
-class SafeRetagTask(task.MultiSeparateTask):
+class SafeRetagTask(ctask.LoggableMultiSeparateTask):
     """
     I am a task that retags an encoded file safely in place.
     First of all, if the new tags are the same as the old ones, it doesn't
@@ -486,6 +493,9 @@ class SafeRetagTask(task.MultiSeparateTask):
                 else:
                     self.debug('failed to update tags, only have %r',
                         common.tagListToDict(self.tasks[4].taglist))
+                    self.debug('difference: %r',
+                        common.tagListDifference(self.tasks[4].taglist,
+                            self._taglist))
                     os.unlink(self._tmppath)
                     e = TypeError("Tags not written")
                     self.setAndRaiseException(e)
diff --git a/morituri/common/gstreamer.py b/morituri/common/gstreamer.py
index 2f0b1f5..f1a2a56 100644
--- a/morituri/common/gstreamer.py
+++ b/morituri/common/gstreamer.py
@@ -20,6 +20,9 @@
 # You should have received a copy of the GNU General Public License
 # along with morituri.  If not, see <http://www.gnu.org/licenses/>.
 
+import re
+import commands
+
 from morituri.common import log
 
 # workaround for issue #64
@@ -42,9 +45,41 @@ def removeAudioParsers():
         log.debug('gstreamer', 'Found audioparsers plugin from %s %s',
             plugin.get_source(), plugin.get_version())
 
-        # was fixed after 0.10.30 and before 0.10.31
-        if plugin.get_source() == 'gst-plugins-good' \
-            and plugin.get_version() > '0.10.30.1':
-            return
+        # the query bug was fixed after 0.10.30 and before 0.10.31
+        # the seek bug is still there though
+        # if plugin.get_source() == 'gst-plugins-good' \
+        #   and plugin.get_version() > '0.10.30.1':
+        #    return
 
         registry.remove_plugin(plugin)
+
+def gstreamerVersion():
+    import gst
+    return _versionify(gst.version())
+
+def gstPythonVersion():
+    import gst
+    return _versionify(gst.pygst_version)
+
+_VERSION_RE = re.compile(
+    "Version:\s*(?P<version>[\d.]+)")
+
+def elementFactoryVersion(name):
+    # surprisingly, there is no python way to get from an element factory
+    # to its plugin and its version directly; you can only compare
+    # with required versions
+    # Let's use gst-inspect-0.10 and wave hands and assume it points to the
+    # same version that python uses
+    output = commands.getoutput('gst-inspect-0.10 %s | grep Version' % name)
+    m = _VERSION_RE.search(output)
+    if not m:
+        return None
+    return m.group('version')
+
+
+def _versionify(tup):
+    l = list(tup)
+    if len(l) == 4 and l[3] == 0:
+        l = l[:3]
+    v = [str(n) for n in l]
+    return ".".join(v)
diff --git a/morituri/common/musicbrainzngs.py b/morituri/common/musicbrainzngs.py
index 83cda8c..8637c40 100644
--- a/morituri/common/musicbrainzngs.py
+++ b/morituri/common/musicbrainzngs.py
@@ -69,9 +69,11 @@ class DiscMetadata(object):
     release = None
 
     releaseTitle = None
+    releaseType = None
 
     mbid = None
     mbidArtist = None
+    url = None
 
     def __init__(self):
         self.tracks = []
@@ -88,7 +90,7 @@ def _record(record, which, name, what):
         log.info('musicbrainzngs', 'Wrote %s %s to %s', which, name, filename)
 
 
-def _getMetadata(release, discid):
+def _getMetadata(releaseShort, release, discid):
     """
     @type  release: C{dict}
     @param release: a release dict as returned in the value for key release
@@ -106,6 +108,7 @@ def _getMetadata(release, discid):
 
     metadata = DiscMetadata()
 
+    metadata.releaseType = releaseShort.get('release-group', {}).get('type')
     credit = release['artist-credit']
 
     artist = credit[0]['artist']
@@ -204,7 +207,8 @@ def _getMetadata(release, discid):
     return metadata
 
 
-# see http://bugs.musicbrainz.org/browser/python-musicbrainz2/trunk/examples/ripper.py
+# see http://bugs.musicbrainz.org/browser/python-musicbrainz2/trunk/examples/
+#     ripper.py
 
 
 def musicbrainz(discid, record=False):
@@ -244,9 +248,11 @@ def musicbrainz(discid, record=False):
 
     # Display the returned results to the user.
 
+    import json
     for release in result['disc']['release-list']:
-        log.debug('program', 'result %r: artist %r, title %r' % (
-            release, release['artist-credit-phrase'], release['title']))
+        formatted = json.dumps(release, sort_keys=False, indent=4)
+        log.debug('program', 'result %s: artist %r, title %r' % (
+            formatted, release['artist-credit-phrase'], release['title']))
 
         # to get titles of recordings, we need to query the release with
         # artist-credits
@@ -254,9 +260,11 @@ def musicbrainz(discid, record=False):
         res = musicbrainz.get_release_by_id(release['id'],
             includes=["artists", "artist-credits", "recordings", "discids"])
         _record(record, 'release', release['id'], res)
-        release = res['release']
+        releaseDetail = res['release']
+        formatted = json.dumps(releaseDetail, sort_keys=False, indent=4)
+        log.debug('program', 'release %s' % formatted)
 
-        md = _getMetadata(release, discid)
+        md = _getMetadata(release, releaseDetail, discid)
         if md:
             log.debug('program', 'duration %r', md.duration)
             ret.append(md)
diff --git a/morituri/common/program.py b/morituri/common/program.py
index eb9c0a1..69d7c80 100644
--- a/morituri/common/program.py
+++ b/morituri/common/program.py
@@ -25,10 +25,10 @@ Common functionality and class for all programs using morituri.
 """
 
 import os
+import sys
 import time
 
-from morituri.common import common, log, musicbrainzngs
-from morituri.result import result
+from morituri.common import common, log, musicbrainzngs, cache
 from morituri.program import cdrdao, cdparanoia
 from morituri.image import image
 
@@ -54,21 +54,25 @@ class Program(log.Loggable):
     outdir = None
     result = None
 
-    def __init__(self, record=False):
+    _stdout = None
+
+    def __init__(self, record=False, stdout=sys.stdout):
         """
         @param record: whether to record results of API calls for playback.
         """
         self._record = record
+        self._cache = cache.ResultCache()
+        self._stdout = stdout
 
     def _getTableCachePath(self):
         path = os.path.join(os.path.expanduser('~'), '.morituri', 'cache',
             'table')
         return path
 
-    def _getResultCachePath(self):
-        path = os.path.join(os.path.expanduser('~'), '.morituri', 'cache',
-            'result')
-        return path
+    def setWorkingDirectory(self, workingDirectory):
+        if workingDirectory:
+            self.info('Changing to working directory %s' % workingDirectory)
+            os.chdir(workingDirectory)
 
     def loadDevice(self, device):
         """
@@ -104,7 +108,7 @@ class Program(log.Loggable):
         """
         path = self._getTableCachePath()
 
-        pcache = common.PersistedCache(path)
+        pcache = cache.PersistedCache(path)
         ptable = pcache.get(cddbdiscid)
 
         if not ptable.object:
@@ -129,29 +133,15 @@ class Program(log.Loggable):
         """
         assert self.result is None
 
-        path = self._getResultCachePath()
-
-        pcache = common.PersistedCache(path)
-        presult = pcache.get(cddbdiscid)
-
-        if not presult.object:
-            self.debug('result for cddbdiscid %r not in cache, creating',
-                cddbdiscid)
-            presult.object = result.RipResult()
-            presult.persist(self.result)
-        else:
-            self.debug('result for cddbdiscid %r found in cache, reusing',
-                cddbdiscid)
-
-        self.result = presult.object
-        self._presult = presult
+        self._presult = self._cache.getRipResult(cddbdiscid)
+        self.result = self._presult.object
 
         return self.result
 
     def saveRipResult(self):
         self._presult.persist()
 
-    def getPath(self, outdir, template, mbdiscid, i):
+    def getPath(self, outdir, template, mbdiscid, i, profile=None):
         """
         Based on the template, get a complete path for the given track,
         minus extension.
@@ -163,13 +153,14 @@ class Program(log.Loggable):
         @type  template: unicode
         @param i:        track number (0 for HTOA, or for disc)
         @type  i:        int
+        @type  profile:  L{morituri.common.encode.Profile}
 
         @rtype: unicode
         """
         assert type(outdir) is unicode, "%r is not unicode" % outdir
         assert type(template) is unicode, "%r is not unicode" % template
 
-        # the template is similar to grip, except for %s/%S
+        # the template is similar to grip, except for %s/%S/%r/%R
         # see #gripswitches
 
         # returns without extension
@@ -180,7 +171,10 @@ class Program(log.Loggable):
 
         # default values
         v['A'] = 'Unknown Artist'
-        v['d'] = mbdiscid
+        v['d'] = mbdiscid # fallback for title
+        v['r'] = 'unknown'
+        v['R'] = 'Unknown'
+        v['x'] = profile and profile.extension or 'unknown'
 
         v['a'] = v['A']
         if i == 0:
@@ -195,10 +189,14 @@ class Program(log.Loggable):
             v['A'] = filterForPath(self.metadata.artist)
             v['S'] = filterForPath(self.metadata.sortName)
             v['d'] = filterForPath(self.metadata.title)
+            if self.metadata.releaseType:
+                v['R'] = self.metadata.releaseType
+                v['r'] = self.metadata.releaseType.lower()
             if i > 0:
                 try:
                     v['a'] = filterForPath(self.metadata.tracks[i - 1].artist)
-                    v['s'] = filterForPath(self.metadata.tracks[i - 1].sortName)
+                    v['s'] = filterForPath(
+                        self.metadata.tracks[i - 1].sortName)
                     v['n'] = filterForPath(self.metadata.tracks[i - 1].title)
                 except IndexError, e:
                     print 'ERROR: no track %d found, %r' % (i, e)
@@ -229,8 +227,8 @@ class Program(log.Loggable):
 
     def getMusicBrainz(self, ittoc, mbdiscid, release=None):
         # look up disc on musicbrainz
-        print 'Disc duration: %s' % common.formatTime(
-            ittoc.duration() / 1000.0)
+        self._stdout.write('Disc duration: %s\n' % common.formatTime(
+            ittoc.duration() / 1000.0))
         self.debug('MusicBrainz submit url: %r',
             ittoc.getMusicBrainzSubmitURL())
         ret = None
@@ -245,26 +243,30 @@ class Program(log.Loggable):
             except musicbrainzngs.NotFoundException, e:
                 break
             except musicbrainzngs.MusicBrainzException, e:
-                print "Warning:", e
+                self._stdout.write("Warning: %r\n" % (e, ))
                 time.sleep(5)
                 continue
 
         if not metadatas:
             if e:
-                print "Error:", e
-            print 'Continuing without metadata'
+                self._stdout.write("Error: %r\n" % (e, ))
+            self._stdout.write('Continuing without metadata\n')
 
         if metadatas:
-            print
-            print 'Matching releases:'
             deltas = {}
-            for metadata in metadatas:
 
-                print 'Artist  : %s' % metadata.artist.encode('utf-8')
-                print 'Title   : %s' % metadata.title.encode('utf-8')
-                print 'Duration: %s' % common.formatTime(
-                    metadata.duration / 1000.0)
-                print 'URL     : %s' % metadata.url
+            self._stdout.write('\nMatching releases:\n')
+
+            for metadata in metadatas:
+                self._stdout.write('Artist  : %s\n' %
+                    metadata.artist.encode('utf-8'))
+                self._stdout.write('Title   : %s\n' %
+                    metadata.title.encode('utf-8'))
+                self._stdout.write('Duration: %s\n' %
+                    common.formatTime(metadata.duration / 1000.0))
+                self._stdout.write('URL     : %s\n' % metadata.url)
+                self._stdout.write('Release : %s\n' % metadata.mbid)
+                self._stdout.write('Type    : %s\n' % metadata.releaseType)
 
                 delta = abs(metadata.duration - ittoc.duration())
                 if not delta in deltas:
@@ -276,12 +278,16 @@ class Program(log.Loggable):
                 self.debug('Asked for release %r, only kept %r',
                     release, metadatas)
                 if len(metadatas) == 1:
-                    print
-                    print 'Picked requested release id %s' % release
-                    print 'Artist : %s' % metadatas[0].artist.encode('utf-8')
-                    print 'Title :  %s' % metadatas[0].title.encode('utf-8')
+                    self._stdout.write('\n')
+                    self._stdout.write('Picked requested release id %s\n' %
+                        release)
+                    self._stdout.write('Artist : %s\n' %
+                        metadatas[0].artist.encode('utf-8'))
+                    self._stdout.write('Title :  %s\n' %
+                        metadatas[0].title.encode('utf-8'))
                 elif not metadatas:
-                    print 'Requested release id %s but none match' % release
+                    self._stdout.write(
+                        'Requested release id %s but none match' % release)
                     return
             else:
                 # Select the release that most closely matches the duration.
@@ -304,19 +310,23 @@ class Program(log.Loggable):
                                 releaseTitle, i, metadata.releaseTitle))
 
                 if (not release and len(deltas.keys()) > 1):
-                    print
-                    print 'Picked closest match in duration.'
-                    print 'Others may be wrong in musicbrainz, please correct.'
-                    print 'Artist : %s' % artist.encode('utf-8')
-                    print 'Title :  %s' % metadatas[0].title.encode('utf-8')
+                    self._stdout.write('\n')
+                    self._stdout.write('Picked closest match in duration.\n')
+                    self._stdout.write('Others may be wrong in musicbrainz, '
+                        'please correct.\n')
+                    self._stdout.write('Artist : %s\n' %
+                        artist.encode('utf-8'))
+                    self._stdout.write('Title :  %s\n' %
+                        metadatas[0].title.encode('utf-8'))
 
             # Select one of the returned releases. We just pick the first one.
             ret = metadatas[0]
         else:
-            print 'Submit this disc to MusicBrainz at the above URL.'
+            self._stdout.write(
+                'Submit this disc to MusicBrainz at the above URL.\n')
             ret = None
 
-        print
+        self._stdout.write('\n')
         return ret
 
     def getTagList(self, number):
@@ -359,7 +369,8 @@ class Program(log.Loggable):
 
         ret = gst.TagList()
 
-        # gst-python 0.10.15.1 does not handle unicode -> utf8 string conversion
+        # gst-python 0.10.15.1 does not handle unicode -> utf8 string
+        # conversion
         # see http://bugzilla.gnome.org/show_bug.cgi?id=584445
         if self.metadata and self.metadata.various:
             ret["album-artist"] = albumArtist.encode('utf-8')
@@ -380,8 +391,8 @@ class Program(log.Loggable):
             # 0.10.15.1
             # FIXME: The dates are strings and must have the format 'YYYY',
             # 'YYYY-MM' or 'YYYY-MM-DD'.
-            # GstDate expects a full date, so default to Jan and 1st if MM and DD
-            # are missing
+            # GstDate expects a full date, so default to
+            # Jan and 1st if MM and DD are missing
             date = self.metadata.release
             if date:
                 log.debug('metadata',
@@ -470,10 +481,20 @@ class Program(log.Loggable):
 
         runner.run(t)
 
+        self.debug('ripped track')
+        self.debug('test speed %.3f/%.3f seconds' % (
+            t.testspeed, t.testduration))
+        self.debug('copy speed %.3f/%.3f seconds' % (
+            t.copyspeed, t.copyduration))
         trackResult.testcrc = t.testchecksum
         trackResult.copycrc = t.copychecksum
         trackResult.peak = t.peak
         trackResult.quality = t.quality
+        trackResult.testspeed = t.testspeed
+        trackResult.copyspeed = t.copyspeed
+        # we want rerips to add cumulatively to the time
+        trackResult.testduration += t.testduration
+        trackResult.copyduration += t.copyduration
 
         if trackResult.filename != t.path:
             trackResult.filename = t.path
diff --git a/morituri/common/task.py b/morituri/common/task.py
index f7744e7..a40acc2 100644
--- a/morituri/common/task.py
+++ b/morituri/common/task.py
@@ -1,6 +1,11 @@
 # -*- Mode: Python -*-
 # vi:si:et:sw=4:sts=4:ts=4
 
+import os
+import signal
+import subprocess
+
+from morituri.extern import asyncsub
 from morituri.extern.log import log
 from morituri.extern.task import task, gstreamer
 
@@ -11,5 +16,137 @@ class SyncRunner(log.Loggable, task.SyncRunner):
     pass
 
 
+class LoggableTask(log.Loggable, task.Task):
+    pass
+
+class LoggableMultiSeparateTask(log.Loggable, task.MultiSeparateTask):
+    pass
+
 class GstPipelineTask(log.Loggable, gstreamer.GstPipelineTask):
     pass
+
+
+class PopenTask(log.Loggable, task.Task):
+    """
+    I am a task that runs a command using Popen.
+    """
+
+    logCategory = 'PopenTask'
+    bufsize = 1024
+    command = None
+    cwd = None
+
+    def start(self, runner):
+        task.Task.start(self, runner)
+
+        try:
+            self._popen = asyncsub.Popen(self.command,
+                bufsize=self.bufsize,
+                stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+                stderr=subprocess.PIPE, close_fds=True, cwd=self.cwd)
+        except OSError, e:
+            import errno
+            if e.errno == errno.ENOENT:
+                self.commandMissing()
+
+            raise
+
+        self.debug('Started %r with pid %d', self.command,
+            self._popen.pid)
+
+        self.schedule(1.0, self._read, runner)
+
+    def _read(self, runner):
+        try:
+            read = False
+
+            ret = self._popen.recv()
+
+            if ret:
+                self.log("read from stdout: %s", ret)
+                self.readbytesout(ret)
+                read = True
+
+            ret = self._popen.recv_err()
+
+            if ret:
+                self.log("read from stderr: %s", ret)
+                self.readbyteserr(ret)
+                read = True
+
+            # if we read anything, we might have more to read, so
+            # reschedule immediately
+            if read and self.runner:
+                self.schedule(0.0, self._read, runner)
+                return
+
+            # if we didn't read anything, give the command more time to
+            # produce output
+            if self._popen.poll() is None and self.runner:
+                # not finished yet
+                self.schedule(1.0, self._read, runner)
+                return
+
+            self._done()
+        except Exception, e:
+            self.debug('exception during _read()')
+            self.debug(log.getExceptionMessage(e))
+            self.setException(e)
+            self.stop()
+
+    def _done(self):
+            assert self._popen.returncode is not None, "No returncode"
+
+            if self._popen.returncode >= 0:
+                self.debug('Return code was %d', self._popen.returncode)
+            else:
+                self.debug('Terminated with signal %d',
+                    -self._popen.returncode)
+
+            self.setProgress(1.0)
+
+            if self._popen.returncode != 0:
+                self.failed()
+            else:
+                self.done()
+
+            self.stop()
+            return
+
+    def abort(self):
+        self.debug('Aborting, sending SIGTERM to %d', self._popen.pid)
+        os.kill(self._popen.pid, signal.SIGTERM)
+        # self.stop()
+
+    def readbytesout(self, bytes):
+        """
+        Called when bytes have been read from stdout.
+        """
+        pass
+
+    def readbyteserr(self, bytes):
+        """
+        Called when bytes have been read from stderr.
+        """
+        pass
+
+    def done(self):
+        """
+        Called when the command completed successfully.
+        """
+        pass
+
+    def failed(self):
+        """
+        Called when the command failed.
+        """
+        pass
+
+
+    def commandMissing(self):
+        """
+        Called when the command is missing.
+        """
+        pass
+
+
diff --git a/morituri/configure/Makefile.in b/morituri/configure/Makefile.in
index a6af6f1..efd9094 100644
--- a/morituri/configure/Makefile.in
+++ b/morituri/configure/Makefile.in
@@ -136,6 +136,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
diff --git a/morituri/configure/configure.py b/morituri/configure/configure.py
index 7538e60..f55525a 100644
--- a/morituri/configure/configure.py
+++ b/morituri/configure/configure.py
@@ -22,7 +22,7 @@ import os
 # where am I on the disk ?
 __thisdir = os.path.dirname(os.path.abspath(__file__))
 
-revision = "$Revision: 435 $"
+revision = "$Revision$"
 
 if os.path.exists(os.path.join(__thisdir, 'uninstalled.py')):
     from morituri.configure import uninstalled
diff --git a/morituri/configure/installed.py b/morituri/configure/installed.py
index af7a5e0..9349bac 100644
--- a/morituri/configure/installed.py
+++ b/morituri/configure/installed.py
@@ -5,5 +5,6 @@
 def get():
     return {
         'isinstalled': True,
-        'version': '0.1.3',
+        'pluginsdir': '/usr/local/lib/morituri/plugins',
+        'version': '0.2.0',
     }
diff --git a/morituri/configure/installed.py.in b/morituri/configure/installed.py.in
index 8a78ae0..c2c536a 100644
--- a/morituri/configure/installed.py.in
+++ b/morituri/configure/installed.py.in
@@ -5,5 +5,6 @@
 def get():
     return {
         'isinstalled': True,
+        'pluginsdir': '@PLUGINSDIR@',
         'version': '@VERSION@',
     }
diff --git a/morituri/configure/uninstalled.py b/morituri/configure/uninstalled.py
index 339b9f1..c88959a 100644
--- a/morituri/configure/uninstalled.py
+++ b/morituri/configure/uninstalled.py
@@ -1,9 +1,16 @@
 # -*- Mode: Python -*-
 # vi:si:et:sw=4:sts=4:ts=4
 
+import os
+
+
+__thisdir = os.path.dirname(os.path.abspath(__file__))
+
 
 def get():
     return {
         'isinstalled': False,
-        'version': '0.1.3',
+        'pluginsdir': os.path.abspath(os.path.join(
+            __thisdir, '..', '..', 'plugins')),
+        'version': '0.2.0',
     }
diff --git a/morituri/configure/uninstalled.py.in b/morituri/configure/uninstalled.py.in
index 4078f80..7e9aea3 100644
--- a/morituri/configure/uninstalled.py.in
+++ b/morituri/configure/uninstalled.py.in
@@ -1,9 +1,16 @@
 # -*- Mode: Python -*-
 # vi:si:et:sw=4:sts=4:ts=4
 
+import os
+
+
+__thisdir = os.path.dirname(os.path.abspath(__file__))
+
 
 def get():
     return {
         'isinstalled': False,
+        'pluginsdir': os.path.abspath(os.path.join(
+            __thisdir, '..', '..', 'plugins')),
         'version': '@VERSION@',
     }
diff --git a/morituri/extern/Makefile.am b/morituri/extern/Makefile.am
index 350933e..f17bc5e 100644
--- a/morituri/extern/Makefile.am
+++ b/morituri/extern/Makefile.am
@@ -34,20 +34,6 @@ musicbrainzngs_PYTHON = \
 	musicbrainzngs/mbxml.py
 
 
-EXTRA_DIST = log/test_log.py python-command/scripts/help2man
+EXTRA_DIST = python-command/scripts/help2man
 
 musicbrainzngs/musicbrainz.py: all
-
-python-musicbrainz-ngs:
-	git clone git://github.com/thomasvs/python-musicbrainz-ngs.git
-
-python-musicbrainz-ngs-check:
-	if test ! -e $(top_srcdir)/morituri/extern/musicbrainzngs/musicbrainz.py; then echo cloning python-musicbrainz-ngs; make python-musicbrainz-ngs; fi
-
-python-command:
-	git clone git://github.com/thomasvs/python-command.git
-
-python-command-check:
-	if test ! -e $(top_srcdir)/morituri/extern/command/command.py; then echo cloning python-command; make python-command; fi
-
-all: python-musicbrainz-ngs-check python-command-check
diff --git a/morituri/extern/Makefile.in b/morituri/extern/Makefile.in
index e8c0e55..7badc4a 100644
--- a/morituri/extern/Makefile.in
+++ b/morituri/extern/Makefile.in
@@ -138,6 +138,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
@@ -221,7 +222,7 @@ musicbrainzngs_PYTHON = \
 	musicbrainzngs/musicbrainz.py \
 	musicbrainzngs/mbxml.py
 
-EXTRA_DIST = log/test_log.py python-command/scripts/help2man
+EXTRA_DIST = python-command/scripts/help2man
 all: all-am
 
 .SUFFIXES:
@@ -610,20 +611,6 @@ uninstall-am: uninstall-commandPYTHON uninstall-logPYTHON \
 
 musicbrainzngs/musicbrainz.py: all
 
-python-musicbrainz-ngs:
-	git clone git://github.com/thomasvs/python-musicbrainz-ngs.git
-
-python-musicbrainz-ngs-check:
-	if test ! -e $(top_srcdir)/morituri/extern/musicbrainzngs/musicbrainz.py; then echo cloning python-musicbrainz-ngs; make python-musicbrainz-ngs; fi
-
-python-command:
-	git clone git://github.com/thomasvs/python-command.git
-
-python-command-check:
-	if test ! -e $(top_srcdir)/morituri/extern/command/command.py; then echo cloning python-command; make python-command; fi
-
-all: python-musicbrainz-ngs-check python-command-check
-
 # Tell versions [3.59,3.63) of GNU make to not export all variables.
 # Otherwise a system limit (for SysV at least) may be exceeded.
 .NOEXPORT:
diff --git a/morituri/extern/command/command.py b/morituri/extern/command/command.py
index 747f598..fc30fed 100644
--- a/morituri/extern/command/command.py
+++ b/morituri/extern/command/command.py
@@ -349,17 +349,19 @@ class Command(object):
             except CommandOk, e:
                 self.debug('done with exception, raised %r', e)
                 ret = e.status
-                self.stdout.write(e.output + '\n')
+                if e.output is not None:
+                    self.stdout.write(e.output + '\n')
             except CommandExited, e:
                 self.debug('done with exception, raised %r', e)
                 ret = e.status
-                self.stderr.write(e.output + '\n')
+                if e.output is not None:
+                    self.stderr.write(e.output + '\n')
             except NotImplementedError:
                 self.debug('done with NotImplementedError')
                 self.parser.print_usage(file=self.stderr)
                 self.stderr.write(
                     "Use --help to get a list of commands.\n")
-                return 1
+                ret = 1
 
 
             # if everything's fine, we return 0
@@ -483,7 +485,7 @@ class Command(object):
 
 class CommandExited(Exception):
 
-    def __init__(self, status, output):
+    def __init__(self, status, output=None):
         self.args = (status, output)
         self.status = status
         self.output = output
@@ -491,13 +493,13 @@ class CommandExited(Exception):
 
 class CommandOk(CommandExited):
 
-    def __init__(self, output):
+    def __init__(self, output=None):
         CommandExited.__init__(self, 0, output)
 
 
 class CommandError(CommandExited):
 
-    def __init__(self, output):
+    def __init__(self, output=None):
         CommandExited.__init__(self, 3, output)
 
 
diff --git a/morituri/extern/log/log.py b/morituri/extern/log/log.py
index 7fc3333..1f8411e 100644
--- a/morituri/extern/log/log.py
+++ b/morituri/extern/log/log.py
@@ -882,7 +882,13 @@ def getFailureMessage(failure):
     if len(failure.frames) == 0:
         return "failure %(exc)s: %(msg)s" % locals()
 
-    (func, filename, line, some, other) = failure.frames[-1]
+    # when using inlineCallbacks, a traceback coming from unwindGenerator
+    # is actually provoked one frame down
+    for frame in failure.frames[::-1]:
+        (func, filename, line, some, other) = frame
+        if func not in ['unwindGenerator', ]:
+            break
+
     filename = scrubFilename(filename)
     return "failure %(exc)s at %(filename)s:%(line)s: %(func)s(): %(msg)s" \
         % locals()
@@ -993,11 +999,11 @@ class TwistedLogObserver(Loggable):
 
 
 def logLevelToStdLevel(level):
-    t = {ERROR : logging.CRITICAL,
-         WARN : logging.ERROR,
-         INFO : logging.WARNING,
-         DEBUG : logging.INFO,
-         LOG : logging.DEBUG}
+    t = {ERROR: logging.CRITICAL,
+         WARN: logging.ERROR,
+         INFO: logging.WARNING,
+         DEBUG: logging.INFO,
+         LOG: logging.DEBUG}
     return t.get(level, logging.NOTSET)
 
 
diff --git a/morituri/extern/log/test_log.py b/morituri/extern/log/test_log.py
deleted file mode 100644
index 4893f19..0000000
--- a/morituri/extern/log/test_log.py
+++ /dev/null
@@ -1,406 +0,0 @@
-# -*- Mode: Python; test-case-name: test_log -*-
-# vi:si:et:sw=4:sts=4:ts=4
-#
-# Flumotion - a streaming media server
-# Copyright (C) 2004,2005,2006,2007 Fluendo, S.L. (www.fluendo.com).
-# All rights reserved.
-
-# This file may be distributed and/or modified under the terms of
-# the GNU General Public License version 2 as published by
-# the Free Software Foundation.
-# This file is distributed without any warranty; without even the implied
-# warranty of merchantability or fitness for a particular purpose.
-# See "LICENSE.GPL" in the source distribution for more information.
-
-# Licensees having purchased or holding a valid Flumotion Advanced
-# Streaming Server license may use this file in accordance with the
-# Flumotion Advanced Streaming Server Commercial License Agreement.
-# See "LICENSE.Flumotion" in the source distribution for more information.
-
-# Headers in this file shall remain intact.
-
-import logging
-
-from twisted.trial import unittest
-
-import log
-
-__version__ = "$Rev: 8678 $"
-
-
-class LogTester(log.Loggable):
-    logCategory = 'testlog'
-
-
-class LogFunctionTester(log.Loggable):
-
-    def logFunction(self, format, *args):
-        return (("override " + format), ) + args[1:]
-
-
-class TestLog(unittest.TestCase):
-
-    def setUp(self):
-        self.category = self.level = self.message = None
-        self.tester = LogTester()
-        # we want to remove the default handler so it doesn't show up stuff
-        log.reset()
-
-    # just test for parsing semi- or non-valid FLU_DEBUG variables
-
-    def testSetDebug(self):
-        log.setDebug(":5")
-        log.setDebug("*")
-        log.setDebug("5")
-
-    def testGetLevelName(self):
-        self.assertRaises(AssertionError, log.getLevelName, -1)
-        self.assertRaises(AssertionError, log.getLevelName, 6)
-
-    def testGetLevelInt(self):
-        self.assertRaises(AssertionError, log.getLevelInt, "NOLEVEL")
-
-    def testGetFormattedLevelName(self):
-        self.assertRaises(AssertionError, log.getFormattedLevelName, -1)
-        # FIXME: we're poking at internals here, but without calling this
-        # no format levels are set at all.
-        log._preformatLevels("ENVVAR")
-        self.failUnless("LOG" in log.getFormattedLevelName(log.LOG))
-
-    def testGetFileLine(self):
-        # test a function object
-        (filename, line) = log.getFileLine(where=self.testGetFileLine)
-        self.failUnless(filename.endswith('test_log.py'))
-        self.assertEquals(line, 70)
-
-        # test a lambda
-        f = lambda x: x + 1
-        (filename, line) = log.getFileLine(where=f)
-        self.failUnless(filename.endswith('test_log.py'))
-        self.assertEquals(line, 77)
-
-        # test an eval
-        f = eval("lambda x: x + 1")
-        (filename, line) = log.getFileLine(where=f)
-        self.assertEquals(filename, '<string>')
-        self.assertEquals(line, 1)
-
-    # test for adding a log handler
-
-    def testEllipsize(self):
-        self.assertEquals(log.ellipsize("*" * 1000),
-            "'" + "*" * 59 + ' ... ' + "*" * 14 + "'")
-
-    def handler(self, level, object, category, file, line, message):
-        self.level = level
-        self.object = object
-        self.category = category
-        self.file = file
-        self.line = line
-        self.message = message
-
-    def testLimitInvisible(self):
-        log.setDebug("testlog:3")
-        log.addLimitedLogHandler(self.handler)
-
-        # log 2 we shouldn't get
-        self.tester.log("not visible")
-        assert not self.category
-        assert not self.level
-        assert not self.message
-
-        self.tester.debug("not visible")
-        assert not self.category
-        assert not self.level
-        assert not self.message
-
-    def testLimitedVisible(self):
-        log.setDebug("testlog:3")
-        log.addLimitedLogHandler(self.handler)
-
-        # log 3 we should get
-        self.tester.info("visible")
-        assert self.category == 'testlog'
-        assert self.level == log.INFO
-        assert self.message == 'visible'
-
-        self.tester.warning("also visible")
-        assert self.category == 'testlog'
-        assert self.level == log.WARN
-        assert self.message == 'also visible'
-
-    def testFormatStrings(self):
-        log.setDebug("testlog:3")
-        log.addLimitedLogHandler(self.handler)
-
-        self.tester.info("%d %s", 42, 'the answer')
-        assert self.category == 'testlog'
-        assert self.level == log.INFO
-        assert self.message == '42 the answer'
-
-    def testLimitedError(self):
-        log.setDebug("testlog:3")
-        log.addLimitedLogHandler(self.handler)
-
-        self.assertRaises(SystemExit, self.tester.error, "error")
-        assert self.category == 'testlog'
-        assert self.level == log.ERROR
-        assert self.message == 'error'
-
-    def testLogHandlerLimitedLevels(self):
-        log.setDebug("testlog:3")
-        log.addLimitedLogHandler(self.handler)
-
-        # now try debug and log again too
-        log.setDebug("testlog:5")
-
-        self.tester.debug("debug")
-        assert self.category == 'testlog'
-        assert self.level == log.DEBUG
-        assert self.message == 'debug'
-
-        self.tester.log("log")
-        assert self.category == 'testlog'
-        assert self.level == log.LOG
-        assert self.message == 'log'
-
-    # test that we get all log messages
-
-    def testLogHandler(self):
-        log.setDebug("testlog:3")
-        log.addLogHandler(self.handler)
-
-        self.tester.log("visible")
-        assert self.message == 'visible'
-
-        self.tester.warning("also visible")
-        assert self.message == 'also visible'
-
-    def testAddLogHandlerRaises(self):
-        self.assertRaises(TypeError, log.addLogHandler, 1)
-
-    def testAdaptStandardLogging(self):
-        # create a standard logger
-        logger = logging.getLogger('standard.logger')
-
-        # set the debug level for the "test" category
-        log.setDebug("test:3")
-        log.addLimitedLogHandler(self.handler)
-
-        logger.warning('invisible')
-        # should not get anything, because the std module has not been adapted
-        assert not self.category
-        assert not self.level
-        assert not self.message
-
-        log.adaptStandardLogging('standard.logger', 'test', 'test_log')
-        self.assertEquals(len(logger.handlers), 1)
-
-        # trying to adapt it again should be a noop
-        log.adaptStandardLogging('standard.logger', 'test', 'test_log')
-        self.assertEquals(len(logger.handlers), 1)
-
-        logger.info('invisible')
-        # should not get anything, because INFO translates to Flu debug 4
-        assert not self.category
-        assert not self.level
-        assert not self.message
-
-        logger.warning('visible')
-        # WARNING translates to INFO, see log.stdLevelToFluLevel
-        assert self.category == 'test', self.category
-        assert self.level == log.INFO
-        assert self.message == 'visible'
-
-        self.message = self.level = self.category = None
-
-        # lower the debug level
-        log.setDebug("test:2")
-        logger.warning('visible')
-        # should not get anything now
-        assert not self.category
-        assert not self.level
-        assert not self.message
-
-
-class TestOwnLogHandler(unittest.TestCase):
-
-    def setUp(self):
-        self.category = self.level = self.message = None
-        self.tester = LogFunctionTester()
-        log.reset()
-
-    def handler(self, level, object, category, file, line, message):
-        self.level = level
-        self.object = object
-        self.category = category
-        self.file = file
-        self.line = line
-        self.message = message
-
-    # test if our own log handler correctly mangles the message
-
-    def testOwnLogHandlerLimited(self):
-        log.setDebug("testlog:3")
-        log.addLogHandler(self.handler)
-
-        self.tester.log("visible")
-        assert self.message == 'override visible'
-
-    def testLogHandlerAssertion(self):
-        self.assertRaises(TypeError, log.addLimitedLogHandler, None)
-
-
-class TestGetExceptionMessage(unittest.TestCase):
-
-    def setUp(self):
-        log.reset()
-
-    def func3(self):
-        self.func2()
-
-    def func2(self):
-        self.func1()
-
-    def func1(self):
-        raise TypeError("I am in func1")
-
-    def testLevel2(self):
-        try:
-            self.func2()
-            self.fail()
-        except TypeError, e:
-            self.verifyException(e)
-
-    def testLevel3(self):
-        try:
-            self.func3()
-            self.fail()
-        except TypeError, e:
-            self.verifyException(e)
-
-    def verifyException(self, e):
-        message = log.getExceptionMessage(e)
-        self.failUnless("func1()" in message)
-        self.failUnless("test_log.py" in message)
-        self.failUnless("TypeError" in message)
-
-
-class TestLogSettings(unittest.TestCase):
-
-    def testSet(self):
-        old = log.getLogSettings()
-        log.setDebug('*:5')
-        self.assertNotEquals(old, log.getLogSettings())
-
-        log.setLogSettings(old)
-        self.assertEquals(old, log.getLogSettings())
-
-
-class TestWriteMark(unittest.TestCase):
-
-    def handler(self, level, object, category, file, line, message):
-        self.level = level
-        self.object = object
-        self.category = category
-        self.file = file
-        self.line = line
-        self.message = message
-
-    def testWriteMarkInDebug(self):
-        loggable = log.Loggable()
-        log.setDebug("4")
-        log.addLogHandler(self.handler)
-        marker = 'test'
-        loggable.writeMarker(marker, log.DEBUG)
-        self.assertEquals(self.message, marker)
-
-    def testWriteMarkInWarn(self):
-        loggable = log.Loggable()
-        log.setDebug("2")
-        log.addLogHandler(self.handler)
-        marker = 'test'
-        loggable.writeMarker(marker, log.WARN)
-        self.assertEquals(self.message, marker)
-
-    def testWriteMarkInInfo(self):
-        loggable = log.Loggable()
-        log.setDebug("3")
-        log.addLogHandler(self.handler)
-        marker = 'test'
-        loggable.writeMarker(marker, log.INFO)
-        self.assertEquals(self.message, marker)
-
-    def testWriteMarkInLog(self):
-        loggable = log.Loggable()
-        log.setDebug("5")
-        log.addLogHandler(self.handler)
-        marker = 'test'
-        loggable.writeMarker(marker, log.LOG)
-        self.assertEquals(self.message, marker)
-
-    def testWriteMarkInError(self):
-        loggable = log.Loggable()
-        log.setDebug("4")
-        log.addLogHandler(self.handler)
-        marker = 'test'
-        self.assertRaises(SystemExit, loggable.writeMarker, marker, log.ERROR)
-        self.assertEquals(self.message, marker)
-
-
-class TestLogNames(unittest.TestCase):
-
-    def testGetLevelNames(self):
-        self.assertEquals(['ERROR', 'WARN', 'INFO', 'DEBUG', 'LOG'],
-                          log.getLevelNames())
-
-    def testGetLevelCode(self):
-        self.assertEquals(1, log.getLevelInt('ERROR'))
-        self.assertEquals(2, log.getLevelInt('WARN'))
-        self.assertEquals(3, log.getLevelInt('INFO'))
-        self.assertEquals(4, log.getLevelInt('DEBUG'))
-        self.assertEquals(5, log.getLevelInt('LOG'))
-
-    def testGetLevelName(self):
-        self.assertEquals('ERROR', log.getLevelName(1))
-        self.assertEquals('WARN', log.getLevelName(2))
-        self.assertEquals('INFO', log.getLevelName(3))
-        self.assertEquals('DEBUG', log.getLevelName(4))
-        self.assertEquals('LOG', log.getLevelName(5))
-
-
-class TestLogUnicode(unittest.TestCase):
-
-    def setUp(self):
-        self.tester = LogTester()
-        # add stderrHandler to fully test unicode handling
-        log.addLogHandler(log.stderrHandler)
-
-    def testUnicode(self):
-        # Test with a unicode input
-        self.tester.log(u'\xf3')
-
-    def testUnicodeWithArgs(self):
-        self.tester.log('abc: %s', u'\xf3')
-
-    def testNonASCIIByteString(self):
-        # Test with a non-ASCII bytestring
-        self.tester.log('\xc3\xa4')
-
-    def testNonASCIIByteStringWithArgs(self):
-        self.tester.log('abc: %s', '\xc3\xa4')
-
-    def testNonASCIIByteStringPlusUnicode(self):
-        # This should fail since were trying to combine
-        # a non-ascii string with a unicode string
-        self.assertRaises(UnicodeDecodeError,
-                          self.tester.log,
-                          'abc\xf3n%s:',
-                          u'a')
-
-    def testASCIIFormatUnicodeArgs(self):
-        self.tester.log('abc: %s', u'\xc3\xa4')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/morituri/image/Makefile.in b/morituri/image/Makefile.in
index 77b4748..6c55fac 100644
--- a/morituri/image/Makefile.in
+++ b/morituri/image/Makefile.in
@@ -135,6 +135,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
diff --git a/morituri/image/image.py b/morituri/image/image.py
index 788b888..5d16f3a 100644
--- a/morituri/image/image.py
+++ b/morituri/image/image.py
@@ -120,7 +120,11 @@ class AccurateRipChecksumTask(log.Loggable, task.MultiSeparateTask):
         for trackIndex, track in enumerate(cue.table.tracks):
             index = track.indexes[1]
             length = cue.getTrackLength(track)
-            self.debug('track %d has length %d' % (trackIndex + 1, length))
+            if length < 0:
+                self.debug('track %d has unknown length' % (trackIndex + 1, ))
+            else:
+                self.debug('track %d is %d samples long' % (
+                    trackIndex + 1, length))
 
             path = image.getRealPath(index.path)
 
@@ -129,8 +133,8 @@ class AccurateRipChecksumTask(log.Loggable, task.MultiSeparateTask):
 
             checksumTask = checksum.AccurateRipChecksumTask(path,
                 trackNumber=trackIndex + 1, trackCount=len(cue.table.tracks),
-                frameStart=index.relative * common.SAMPLES_PER_FRAME,
-                frameLength=length * common.SAMPLES_PER_FRAME)
+                sampleStart=index.relative * common.SAMPLES_PER_FRAME,
+                sampleLength=length * common.SAMPLES_PER_FRAME)
             self.addTask(checksumTask)
 
     def stop(self):
@@ -140,9 +144,9 @@ class AccurateRipChecksumTask(log.Loggable, task.MultiSeparateTask):
 
 class AudioLengthTask(log.Loggable, gstreamer.GstPipelineTask):
     """
-    I calculate the length of a track in audio frames.
+    I calculate the length of a track in audio samples.
 
-    @ivar  length: length of the decoded audio file, in audio frames.
+    @ivar  length: length of the decoded audio file, in audio samples.
     """
     logCategory = 'AudioLengthTask'
     description = 'Getting length of audio track'
diff --git a/morituri/image/toc.py b/morituri/image/toc.py
index 2ca68a5..55f902d 100644
--- a/morituri/image/toc.py
+++ b/morituri/image/toc.py
@@ -110,7 +110,7 @@ class TocFile(object, log.Loggable):
         trackNumber = 0
         indexNumber = 0
         absoluteOffset = 0 # running absolute offset of where each track starts
-        relativeOffset = 0 # running relative offset, relative to counter source
+        relativeOffset = 0 # running relative offset, relative to counter src
         currentLength = 0 # accrued during TRACK record parsing, current track
         totalLength = 0 # accrued during TRACK record parsing, total disc
         pregapLength = 0 # length of the pre-gap, current track
@@ -163,7 +163,8 @@ class TocFile(object, log.Loggable):
                     # FIXME: why not set absolute offsets too ?
                     currentTrack.index(1, path=currentFile.path,
                         absolute=absoluteOffset + pregapLength,
-                        relative=relativeOffset + pregapLength, counter=counter)
+                        relative=relativeOffset + pregapLength,
+                        counter=counter)
                     self.debug('track %d, added index %r',
                         currentTrack.number, currentTrack.getIndex(1))
 
@@ -223,7 +224,8 @@ class TocFile(object, log.Loggable):
                 if not currentFile or filePath != currentFile.path:
                     counter += 1
                     relativeOffset = 0
-                    self.debug('track %d, switched to new FILE, increased counter to %d',
+                    self.debug('track %d, switched to new FILE, '
+                               'increased counter to %d',
                         trackNumber, counter)
                 currentFile = File(filePath, start, length)
                 #absoluteOffset += common.msfToFrames(start)
@@ -240,7 +242,8 @@ class TocFile(object, log.Loggable):
                 if not currentFile or filePath != currentFile.path:
                     counter += 1
                     relativeOffset = 0
-                    self.debug('track %d, switched to new FILE, increased counter to %d',
+                    self.debug('track %d, switched to new FILE, '
+                        'increased counter to %d',
                         trackNumber, counter)
                 # FIXME: assume that a MODE2_FORM_MIX track always starts at 0
                 currentFile = File(filePath, 0, length)
diff --git a/morituri/program/Makefile.in b/morituri/program/Makefile.in
index 64e9295..53c9b43 100644
--- a/morituri/program/Makefile.in
+++ b/morituri/program/Makefile.in
@@ -135,6 +135,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
diff --git a/morituri/program/cdparanoia.py b/morituri/program/cdparanoia.py
index 0bfe298..c86ff02 100644
--- a/morituri/program/cdparanoia.py
+++ b/morituri/program/cdparanoia.py
@@ -22,6 +22,7 @@
 
 import os
 import errno
+import time
 import re
 import stat
 import shutil
@@ -29,6 +30,7 @@ import subprocess
 import tempfile
 
 from morituri.common import log, common
+from morituri.common import task as ctask
 
 from morituri.extern import asyncsub
 from morituri.extern.task import task
@@ -203,6 +205,8 @@ class ReadTrackTask(log.Loggable, task.Task):
 
     description = "Reading track"
     quality = None # set at end of reading
+    speed = None
+    duration = None # in seconds
 
     _MAXERROR = 100 # number of errors detected by parser
 
@@ -237,6 +241,7 @@ class ReadTrackTask(log.Loggable, task.Task):
         self._offset = offset
         self._parser = ProgressParser(start, stop)
         self._device = device
+        self._start_time = None
 
         self._buffer = "" # accumulate characters
         self._errors = []
@@ -288,6 +293,7 @@ class ReadTrackTask(log.Loggable, task.Task):
 
             raise
 
+        self._start_time = time.time()
         self.schedule(1.0, self._read, runner)
 
     def _read(self, runner):
@@ -339,6 +345,7 @@ class ReadTrackTask(log.Loggable, task.Task):
         self._done()
 
     def _done(self):
+        end_time = time.time()
         self.setProgress(1.0)
 
         # check if the length matches
@@ -368,6 +375,8 @@ class ReadTrackTask(log.Loggable, task.Task):
                 self.exception = ReturnCodeError(self._popen.returncode)
 
         self.quality = self._parser.getTrackQuality()
+        self.duration = end_time - self._start_time
+        self.speed = (offsetLength / 75.0) / self.duration
 
         self.stop()
         return
@@ -376,6 +385,7 @@ class ReadTrackTask(log.Loggable, task.Task):
 class ReadVerifyTrackTask(log.Loggable, task.MultiSeparateTask):
     """
     I am a task that reads and verifies a track using cdparanoia.
+    I also encode the track.
 
     The path where the file is stored can be changed if necessary, for
     example if the file name is too long.
@@ -384,6 +394,12 @@ class ReadVerifyTrackTask(log.Loggable, task.MultiSeparateTask):
     @ivar checksum:     the checksum of the track; set if they match.
     @ivar testchecksum: the test checksum of the track.
     @ivar copychecksum: the copy checksum of the track.
+    @ivar testspeed:    the test speed of the track, as a multiple of
+                        track duration.
+    @ivar copyspeed:    the copy speed of the track, as a multiple of
+                        track duration.
+    @ivar testduration: the test duration of the track, in seconds.
+    @ivar copyduration: the copy duration of the track, in seconds.
     @ivar peak:         the peak level of the track
     """
 
@@ -392,6 +408,10 @@ class ReadVerifyTrackTask(log.Loggable, task.MultiSeparateTask):
     copychecksum = None
     peak = None
     quality = None
+    testspeed = None
+    copyspeed = None
+    testduration = None
+    copyduration = None
 
     _tmpwavpath = None
     _tmppath = None
@@ -471,6 +491,10 @@ class ReadVerifyTrackTask(log.Loggable, task.MultiSeparateTask):
                     self.tasks[2].quality)
                 self.peak = self.tasks[4].peak
                 self.debug('peak: %r', self.peak)
+                self.testspeed = self.tasks[0].speed
+                self.copyspeed = self.tasks[2].speed
+                self.testduration = self.tasks[0].duration
+                self.copyduration = self.tasks[2].duration
 
                 self.testchecksum = c1 = self.tasks[1].checksum
                 self.copychecksum = c2 = self.tasks[3].checksum
@@ -502,7 +526,8 @@ class ReadVerifyTrackTask(log.Loggable, task.MultiSeparateTask):
                             self.path = common.shrinkPath(self.path)
                             shutil.move(self._tmppath, self.path)
                     except Exception, e:
-                        self.debug('Exception while moving to final path %r: %r',
+                        self.debug('Exception while moving to final path %r: '
+                            '%r',
                             self.path, log.getExceptionMessage(e))
                         self.exception = e
                 else:
@@ -513,3 +538,70 @@ class ReadVerifyTrackTask(log.Loggable, task.MultiSeparateTask):
             print 'WARNING: unhandled exception %r' % (e, )
 
         task.MultiSeparateTask.stop(self)
+
+_VERSION_RE = re.compile(
+    "^cdparanoia (?P<version>.+) release (?P<release>.+) \(.*\)")
+
+
+def getCdParanoiaVersion():
+    version = "(Unknown)"
+
+    try:
+        p = asyncsub.Popen(["cdparanoia", "-V"],
+                stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+                stderr=subprocess.PIPE, close_fds=True)
+        version = asyncsub.recv_some(p, e=0, stderr=1)
+        vre = _VERSION_RE.search(version)
+        if vre and len(vre.groups()) == 2:
+            version = "%s %s" % (
+                vre.groupdict().get('version'),
+                vre.groupdict().get('release'))
+    except OSError, e:
+        import errno
+        if e.errno == errno.ENOENT:
+            raise common.MissingDependencyException('cdparanoia')
+        raise
+
+    return version
+
+
+_OK_RE = re.compile(r'Drive tests OK with Paranoia.')
+
+
+class AnalyzeTask(ctask.PopenTask):
+
+    logCategory = 'AnalyzeTask'
+    description = 'Analyzing drive caching behaviour'
+
+    defeatsCache = None
+
+    cwd = None
+
+    _output = []
+
+    def __init__(self, device=None):
+        # cdparanoia -A *always* writes cdparanoia.log
+        self.cwd = tempfile.mkdtemp(suffix='.morituri.cache')
+        self.command = ['cdparanoia', '-A']
+        if device:
+            self.command += ['-d', device]
+        
+    def commandMissing(self):
+        raise common.MissingDependencyException('cdparanoia')
+
+    def readbyteserr(self, bytes):
+        self._output.append(bytes)
+
+    def done(self):
+        if self.cwd:
+            shutil.rmtree(self.cwd)
+        output = "".join(self._output)
+        m = _OK_RE.search(output)
+        if m:
+            self.defeatsCache = True
+        else:
+            self.defeatsCache = False
+
+    def failed(self):
+        if self.cwd:
+            shutil.rmtree(self.cwd)
diff --git a/morituri/program/cdrdao.py b/morituri/program/cdrdao.py
index 0b1fb7b..15f90bf 100644
--- a/morituri/program/cdrdao.py
+++ b/morituri/program/cdrdao.py
@@ -23,14 +23,12 @@
 
 import re
 import os
-import signal
-import subprocess
 import tempfile
 
 from morituri.common import log, common
 from morituri.image import toc, table
+from morituri.common import task as ctask
 
-from morituri.extern import asyncsub
 from morituri.extern.task import task
 
 
@@ -244,7 +242,7 @@ class OutputParser(object, log.Loggable):
 # FIXME: handle errors
 
 
-class CDRDAOTask(task.Task):
+class CDRDAOTask(ctask.PopenTask):
     """
     I am a task base class that runs CDRDAO.
     """
@@ -258,97 +256,20 @@ class CDRDAOTask(task.Task):
         self.debug('creating CDRDAOTask')
 
     def start(self, runner):
-        task.Task.start(self, runner)
+        self.debug('Starting cdrdao with options %r', self.options)
+        self.command = ['cdrdao', ] + self.options
 
-        bufsize = 1024
-        try:
-            self._popen = asyncsub.Popen(["cdrdao", ] + self.options,
-                bufsize=bufsize,
-                stdin=subprocess.PIPE, stdout=subprocess.PIPE,
-                stderr=subprocess.PIPE, close_fds=True)
-        except OSError, e:
-            import errno
-            if e.errno == errno.ENOENT:
-                raise common.MissingDependencyException('cdrdao')
+        ctask.PopenTask.start(self, runner)
 
-            raise
+    def commandMissing(self):
+        raise common.MissingDependencyException('cdrdao')
 
-        self.debug('Started cdrdao with pid %d and options %r',
-            self._popen.pid, self.options)
-        self.debug('command: cdrdao %s', ' '.join(self.options))
 
-        self.schedule(1.0, self._read, runner)
-
-    def _read(self, runner):
-        try:
-            ret = self._popen.recv()
-
-            if ret:
-                self.log("read from stdout: %s", ret)
-                self.readbytesout(ret)
-
-            ret = self._popen.recv_err()
-
-            if ret:
-                self.log("read from stderr: %s", ret)
-                self.readbyteserr(ret)
-
-            if self._popen.poll() is None and self.runner:
-                # not finished yet
-                self.schedule(1.0, self._read, runner)
-                return
-
-            self._done()
-        except Exception, e:
-            self.debug('exception during _read()')
-            self.debug(log.getExceptionMessage(e))
-            self.setException(e)
-            self.stop()
-
-    def _done(self):
-            assert self._popen.returncode is not None, "No returncode"
-
-            if self._popen.returncode >= 0:
-                self.debug('Return code was %d', self._popen.returncode)
-            else:
-                self.debug('Terminated with signal %d',
-                    -self._popen.returncode)
-
-            self.setProgress(1.0)
-
-            if self._popen.returncode != 0:
-                if self.errors:
-                    raise DeviceOpenException("\n".join(self.errors))
-                else:
-                    raise ProgramFailedException(self._popen.returncode)
-            else:
-                self.done()
-
-            self.stop()
-            return
-
-    def abort(self):
-        self.debug('Aborting, sending SIGTERM to %d', self._popen.pid)
-        os.kill(self._popen.pid, signal.SIGTERM)
-        # self.stop()
-
-    def readbytesout(self, bytes):
-        """
-        Called when bytes have been read from stdout.
-        """
-        pass
-
-    def readbyteserr(self, bytes):
-        """
-        Called when bytes have been read from stderr.
-        """
-        pass
-
-    def done(self):
-        """
-        Called when cdrdao completed successfully.
-        """
-        raise NotImplementedError
+    def failed(self):
+        if self.errors:
+            raise DeviceOpenException("\n".join(self.errors))
+        else:
+            raise ProgramFailedException(self._popen.returncode)
 
 
 class DiscInfoTask(CDRDAOTask):
@@ -369,6 +290,7 @@ class DiscInfoTask(CDRDAOTask):
         @param device:  the device to rip from
         @type  device:  str
         """
+        self.debug('creating DiscInfoTask')
         CDRDAOTask.__init__(self)
 
         self.options = ['disk-info', ]
diff --git a/morituri/result/Makefile.in b/morituri/result/Makefile.in
index 013ee8c..760e9e1 100644
--- a/morituri/result/Makefile.in
+++ b/morituri/result/Makefile.in
@@ -135,6 +135,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
diff --git a/morituri/result/logger.py b/morituri/result/logger.py
index 7ae1871..cb03845 100644
--- a/morituri/result/logger.py
+++ b/morituri/result/logger.py
@@ -24,11 +24,15 @@ import time
 
 from morituri.common import common
 from morituri.configure import configure
+from morituri.result import result
 
 
-class MorituriLogger(object):
+class MorituriLogger(result.Logger):
 
     def log(self, ripResult, epoch=time.time()):
+        """
+        @type  ripResult: L{morituri.result.result.RipResult}
+        """
         lines = self.logRip(ripResult, epoch=epoch)
         return '\n'.join(lines)
 
@@ -52,6 +56,11 @@ class MorituriLogger(object):
         lines.append("Album: %s - %s" % (ripResult.artist, ripResult.title))
         lines.append("")
 
+        lines.append("CDDB disc id:           %s" % ripResult. table.getCDDBDiscId())
+        lines.append("MusicBrainz disc id:    %s" % ripResult. table.getMusicBrainzDiscId())
+        lines.append("MusicBrainz lookup URL: %s" % ripResult. table.getMusicBrainzSubmitURL())
+        lines.append("")
+
         # drive
         lines.append(
             "Drive: vendor %s, model %s" % (
@@ -105,6 +114,13 @@ class MorituriLogger(object):
             lines.append('')
 
         lines.append('  Peak level %.1f %%' % (trackResult.peak * 100.0))
+        if trackResult.copyspeed:
+            lines.append('  Extraction Speed (Copy) %.4f X' % (
+                trackResult.copyspeed))
+        if trackResult.testspeed:
+            lines.append('  Extraction Speed (Test) %.4f X' % (
+                trackResult.testspeed))
+
         if trackResult.copycrc is not None:
             lines.append('  Copy CRC %08X' % trackResult.copycrc)
         if trackResult.testcrc is not None:
@@ -116,6 +132,7 @@ class MorituriLogger(object):
         else:
             lines.append("  WARNING: no CRC check done")
 
+
         if trackResult.accurip:
             lines.append('  Accurately ripped (confidence %d) [%08X]' % (
                 trackResult.ARDBConfidence, trackResult.ARCRC))
diff --git a/morituri/result/result.py b/morituri/result/result.py
index 1d0f0eb..7d25707 100644
--- a/morituri/result/result.py
+++ b/morituri/result/result.py
@@ -20,10 +20,9 @@
 # You should have received a copy of the GNU General Public License
 # along with morituri.  If not, see <http://www.gnu.org/licenses/>.
 
+import pkg_resources
 import time
 
-from morituri.result import logger
-
 
 class TrackResult:
     """
@@ -61,6 +60,10 @@ class TrackResult:
 
     peak = 0.0
     quality = 0.0
+    testspeed = 0.0
+    copyspeed = 0.0
+    testduration = 0.0
+    copyduration = 0.0
     testcrc = None
     copycrc = None
     accurip = False # whether it's in the database
@@ -69,7 +72,7 @@ class TrackResult:
     ARDBConfidence = None
     ARDBMaxConfidence = None
 
-    classVersion = 2
+    classVersion = 3
 
 
 class RipResult:
@@ -80,6 +83,13 @@ class RipResult:
     @ivar offset: sample read offset
     @ivar table:  the full index table
     @type table:  L{morituri.image.table.Table}
+
+    @ivar vendor:  vendor of the CD drive
+    @ivar model:   model of the CD drive
+    @ivar release: release of the CD drive
+
+    @ivar cdrdaoVersion:     version of cdrdao used for the rip
+    @ivar cdparanoiaVersion: version of cdparanoia used for the rip
     """
 
     offset = 0
@@ -89,6 +99,20 @@ class RipResult:
 
     vendor = None
     model = None
+    release = None
+
+    cdrdaoVersion = None
+    cdparanoiaVersion = None
+    cdparanoiaDefeatsCache = None
+
+    gstreamerVersion = None
+    gstPythonVersion = None
+    encoderVersion = None
+
+    profileName = None
+    profilePipeline = None
+
+    classVersion = 3
 
     def __init__(self):
         self.tracks = []
@@ -117,6 +141,7 @@ class Logger(object):
         Create a log from the given ripresult.
 
         @param epoch:     when the log file gets generated
+        @type  epoch:     float
         @type  ripResult: L{RipResult}
 
         @rtype: str
@@ -124,5 +149,28 @@ class Logger(object):
         raise NotImplementedError
 
 
-def getLogger():
-    return logger.MorituriLogger()
+# A setuptools-like entry point
+
+
+class EntryPoint(object):
+    name = 'morituri'
+
+    def load(self):
+        from morituri.result import logger
+        return logger.MorituriLogger
+
+
+def getLoggers():
+    """
+    Get all logger plugins with entry point 'morituri.logger'.
+
+    @rtype: dict of C{str} -> C{Logger}
+    """
+    d = {}
+
+    pluggables = list(pkg_resources.iter_entry_points("morituri.logger"))
+    for entrypoint in [EntryPoint(), ] + pluggables:
+        plugin_class = entrypoint.load()
+        d[entrypoint.name] = plugin_class
+
+    return d
diff --git a/morituri/rip/Makefile.am b/morituri/rip/Makefile.am
index 4beacde..f7d32f6 100644
--- a/morituri/rip/Makefile.am
+++ b/morituri/rip/Makefile.am
@@ -6,6 +6,7 @@ morituri_PYTHON = \
 	__init__.py \
 	accurip.py \
 	cd.py \
+	common.py \
 	debug.py \
 	drive.py \
 	image.py \
diff --git a/morituri/rip/Makefile.in b/morituri/rip/Makefile.in
index e02157b..e98e419 100644
--- a/morituri/rip/Makefile.in
+++ b/morituri/rip/Makefile.in
@@ -135,6 +135,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
@@ -195,6 +196,7 @@ morituri_PYTHON = \
 	__init__.py \
 	accurip.py \
 	cd.py \
+	common.py \
 	debug.py \
 	drive.py \
 	image.py \
diff --git a/morituri/rip/cd.py b/morituri/rip/cd.py
index abb2732..ebe8283 100644
--- a/morituri/rip/cd.py
+++ b/morituri/rip/cd.py
@@ -26,16 +26,14 @@ import math
 import gobject
 gobject.threads_init()
 
-from morituri.common import logcommand, common, accurip
-from morituri.common import drive, program
+from morituri.common import logcommand, common, accurip, gstreamer
+from morituri.common import drive, program, cache
 from morituri.result import result
-from morituri.program import cdrdao
+from morituri.program import cdrdao, cdparanoia
+from morituri.rip import common as rcommon
 
-from morituri.extern.command import command
 from morituri.extern.task import task
 
-DEFAULT_TRACK_TEMPLATE = u'%A - %d/%t. %a - %n'
-DEFAULT_DISC_TEMPLATE = u'%A - %d/%A - %d'
 
 MAX_TRIES = 5
 
@@ -43,58 +41,51 @@ MAX_TRIES = 5
 class Rip(logcommand.LogCommand):
     summary = "rip CD"
 
+    # see morituri.common.program.Program.getPath for expansion
     description = """
 Rips a CD.
 
-Tracks are named according to the track template, filling in the variables
-and expanding the file extension.  Variables are:
- - %t: track number
- - %a: track artist
- - %n: track title
- - %s: track sort name
-
-Disc files (.cue, .log, .m3u) are named according to the disc template,
-filling in the variables and expanding the file extension. Variables are:
- - %A: album artist
- - %S: album sort name
- - %d: disc title
- - %y: release year
+%s
 
 Paths to track files referenced in .cue and .m3u files will be made
 relative to the directory of the disc files.
 
 All files will be created relative to the given output directory.
 Log files will log the path to tracks relative to this directory.
-"""
+""" % rcommon.TEMPLATE_DESCRIPTION
 
     def addOptions(self):
+        loggers = result.getLoggers().keys()
+
+        self.parser.add_option('-L', '--logger',
+            action="store", dest="logger",
+            default='morituri',
+            help="logger to use "
+                "(default '%default', choose from '" +
+                    "', '".join(loggers) + "')")
         # FIXME: get from config
-        default = 0
         self.parser.add_option('-o', '--offset',
             action="store", dest="offset",
-            help="sample read offset (defaults to %d)" % default,
-            default=default)
+            help="sample read offset (defaults to configured value, or 0)")
         self.parser.add_option('-O', '--output-directory',
             action="store", dest="output_directory",
-            help="output directory "
-                "(defaults to absolute path to current directory) "
-        )
+            help="output directory; will be included in file paths in result "
+                "files "
+                "(defaults to absolute path to current directory; set to "
+                "empty if you want paths to be relative instead) ")
+        self.parser.add_option('-W', '--working-directory',
+            action="store", dest="working_directory",
+            help="working directory; morituri will change to this directory "
+                "and files will be created relative to it when not absolute ")
         # FIXME: have a cache of these pickles somewhere
         self.parser.add_option('-T', '--toc-pickle',
             action="store", dest="toc_pickle",
-            help="pickle to use for reading and writing the TOC",
-            default=default)
-        # FIXME: get from config
-        self.parser.add_option('', '--track-template',
-            action="store", dest="track_template",
-            help="template for track file naming (default %default)",
-            default=DEFAULT_TRACK_TEMPLATE)
-        self.parser.add_option('', '--disc-template',
-            action="store", dest="disc_template",
-            help="template for disc file naming (default %default)",
-            default=DEFAULT_DISC_TEMPLATE)
+            help="pickle to use for reading and writing the TOC")
+
+        rcommon.addTemplate(self)
+
         self.parser.add_option('-R', '--release-id',
-            action="store", dest="release",
+            action="store", dest="release_id",
             help="MusicBrainz release id to match to (if there are multiple)")
 
         default = 'flac'
@@ -116,8 +107,27 @@ Log files will log the path to tracks relative to this directory.
         options.track_template = options.track_template.decode('utf-8')
         options.disc_template = options.disc_template.decode('utf-8')
 
+        if options.offset is None:
+            info = drive.getDeviceInfo(self.parentCommand.options.device)
+            if info:
+                try:
+                    options.offset = self.getRootCommand(
+                        ).config.getReadOffset(*info)
+                    self.stdout.write("Using configured read offset %d\n" %
+                        options.offset)
+                except KeyError:
+                    pass
+
+        if options.offset is None:
+            options.offset = 0
+            self.stdout.write("Using fallback read offset %d\n" %
+                        options.offset)
+        if self.options.output_directory is None:
+            self.options.output_directory = os.getcwd()
+
     def do(self, args):
-        prog = program.Program(record=self.getRootCommand().record)
+        prog = program.Program(record=self.getRootCommand().record,
+            stdout=self.stdout)
         runner = task.SyncRunner()
 
         def function(r, t):
@@ -127,22 +137,35 @@ Log files will log the path to tracks relative to this directory.
         device = self.parentCommand.options.device
         self.stdout.write('Checking device %s\n' % device)
 
+        prog.setWorkingDirectory(self.options.working_directory)
         prog.loadDevice(device)
         prog.unmountDevice(device)
 
+        version = None
+
         # first, read the normal TOC, which is fast
-        ptoc = common.Persister(self.options.toc_pickle or None)
+        ptoc = cache.Persister(self.options.toc_pickle or None)
         if not ptoc.object:
-            t = cdrdao.ReadTOCTask(device=device)
-            function(runner, t)
+            tries = 0
+            while True:
+                tries += 1
+                t = cdrdao.ReadTOCTask(device=device)
+                try:
+                    function(runner, t)
+                    break
+                except:
+                    if tries > 3:
+                        raise
+                    self.debug('failed to read TOC after %d tries, retrying' % tries)
+
             version = t.tasks[1].parser.version
             from pkg_resources import parse_version as V
             # we've built a cdrdao 1.2.3rc2 modified package with the patch
             if V(version) < V('1.2.3rc2p1'):
-                self.stdout.write('''
-Warning: cdrdao older than 1.2.3 has a pre-gap length bug.
-See  http://sourceforge.net/tracker/?func=detail&aid=604751&group_id=2171&atid=102171
-''')
+                self.stdout.write('Warning: cdrdao older than 1.2.3 has a '
+                    'pre-gap length bug.\n'
+                    'See http://sourceforge.net/tracker/?func=detail'
+                    '&aid=604751&group_id=2171&atid=102171\n')
             ptoc.persist(t.table)
         ittoc = ptoc.object
         assert ittoc.hasTOC()
@@ -157,7 +180,7 @@ See  http://sourceforge.net/tracker/?func=detail&aid=604751&group_id=2171&atid=1
             ittoc.getMusicBrainzSubmitURL())
 
         prog.metadata = prog.getMusicBrainz(ittoc, mbdiscid,
-            self.options.release)
+            release=self.options.release_id)
 
         if not prog.metadata:
             # fall back to FreeDB for lookup
@@ -183,7 +206,7 @@ See  http://sourceforge.net/tracker/?func=detail&aid=604751&group_id=2171&atid=1
             "full table's AR URL %s differs from toc AR URL %s" % (
             itable.getAccurateRipURL(), ittoc.getAccurateRipURL())
 
-        prog.outdir = (self.options.output_directory or os.getcwd())
+        prog.outdir = self.options.output_directory
         prog.outdir = prog.outdir.decode('utf-8')
         # here to avoid import gst eating our options
         from morituri.common import encode
@@ -191,6 +214,15 @@ See  http://sourceforge.net/tracker/?func=detail&aid=604751&group_id=2171&atid=1
 
         # result
 
+        prog.result.cdrdaoVersion = version
+        prog.result.cdparanoiaVersion = cdparanoia.getCdParanoiaVersion()
+        info = drive.getDeviceInfo(self.parentCommand.options.device)
+        if info:
+            try:
+                prog.result.cdparanoiaDefeatsCache = self.getRootCommand(
+                    ).config.getDefeatsCache(*info)
+            except KeyError, e:
+                self.debug('Got key error: %r' % (e, ))
         prog.result.offset = int(self.options.offset)
         prog.result.artist = prog.metadata and prog.metadata.artist \
             or 'Unknown Artist'
@@ -199,13 +231,22 @@ See  http://sourceforge.net/tracker/?func=detail&aid=604751&group_id=2171&atid=1
         # cdio is optional for now
         try:
             import cdio
-            _, prog.result.vendor, prog.result.model, __ = \
+            _, prog.result.vendor, prog.result.model, prog.result.release = \
                 cdio.Device(device).get_hwinfo()
         except ImportError:
             self.stdout.write(
                 'WARNING: pycdio not installed, cannot identify drive\n')
             prog.result.vendor = 'Unknown'
             prog.result.model = 'Unknown'
+            prog.result.release = 'Unknown'
+
+        prog.result.profileName = profile.name
+        prog.result.profilePipeline = profile.pipeline
+        elementFactory = profile.pipeline.split(' ')[0]
+        prog.result.gstreamerVersion = gstreamer.gstreamerVersion()
+        prog.result.gstPythonVersion = gstreamer.gstPythonVersion()
+        prog.result.encoderVersion = gstreamer.elementFactoryVersion(
+            elementFactory)
 
         # FIXME: turn this into a method
 
@@ -221,7 +262,7 @@ See  http://sourceforge.net/tracker/?func=detail&aid=604751&group_id=2171&atid=1
                     trackResult.filename)
 
             path = prog.getPath(prog.outdir, self.options.track_template,
-                mbdiscid, number) + '.' + profile.extension
+                mbdiscid, number, profile=profile) + '.' + profile.extension
             self.debug('ripIfNotRipped: path %r' % path)
             trackResult.number = number
 
@@ -246,7 +287,11 @@ See  http://sourceforge.net/tracker/?func=detail&aid=604751&group_id=2171&atid=1
                     os.unlink(path)
 
             if not os.path.exists(path):
+                self.debug('path %r does not exist, ripping...' % path)
                 tries = 0
+                # we reset durations for test and copy here
+                trackResult.testduration = 0.0
+                trackResult.copyduration = 0.0
                 self.stdout.write('Ripping track %d of %d: %s\n' % (
                     number, len(itable.tracks),
                     os.path.basename(path).encode('utf-8')))
@@ -324,7 +369,7 @@ See  http://sourceforge.net/tracker/?func=detail&aid=604751&group_id=2171&atid=1
 
         ### write disc files
         discName = prog.getPath(prog.outdir, self.options.disc_template,
-            mbdiscid, 0)
+            mbdiscid, 0, profile=profile)
         dirname = os.path.dirname(discName)
         if not os.path.exists(dirname):
             os.makedirs(dirname)
@@ -355,7 +400,7 @@ See  http://sourceforge.net/tracker/?func=detail&aid=604751&group_id=2171&atid=1
                 continue
 
             path = prog.getPath(prog.outdir, self.options.track_template,
-                mbdiscid, i + 1) + '.' + profile.extension
+                mbdiscid, i + 1, profile=profile) + '.' + profile.extension
             writeFile(handle, path,
                 itable.getTrackLength(i + 1) / common.FRAMES_PER_SECOND)
 
@@ -365,8 +410,8 @@ See  http://sourceforge.net/tracker/?func=detail&aid=604751&group_id=2171&atid=1
         url = ittoc.getAccurateRipURL()
         self.stdout.write("AccurateRip URL %s\n" % url)
 
-        cache = accurip.AccuCache()
-        responses = cache.retrieve(url)
+        accucache = accurip.AccuCache()
+        responses = accucache.retrieve(url)
 
         if not responses:
             self.stdout.write('Album not found in AccurateRip database\n')
@@ -385,9 +430,15 @@ See  http://sourceforge.net/tracker/?func=detail&aid=604751&group_id=2171&atid=1
 
         self.stdout.write("\n".join(prog.getAccurateRipResults()) + "\n")
 
+        prog.saveRipResult()
+
         # write log file
-        logger = result.getLogger()
-        prog.writeLog(discName, logger)
+        try:
+            klazz = result.getLoggers()[self.options.logger]
+            prog.writeLog(discName, klazz())
+        except KeyError:
+            self.stderr.write("No logger named %s found!\n" % (
+                self.options.logger))
 
         prog.ejectDevice(device)
 
diff --git a/morituri/rip/common.py b/morituri/rip/common.py
new file mode 100644
index 0000000..5b59e19
--- /dev/null
+++ b/morituri/rip/common.py
@@ -0,0 +1,41 @@
+# -*- Mode: Python -*-
+# vi:si:et:sw=4:sts=4:ts=4
+
+# options and arguments shared between commands
+
+DEFAULT_TRACK_TEMPLATE = u'%r/%A - %d/%t. %a - %n'
+DEFAULT_DISC_TEMPLATE = u'%r/%A - %d/%A - %d'
+
+TEMPLATE_DESCRIPTION = '''
+Tracks are named according to the track template, filling in the variables
+and adding the file extension.  Variables exclusive to the track template are:
+ - %t: track number
+ - %a: track artist
+ - %n: track title
+ - %s: track sort name
+
+Disc files (.cue, .log, .m3u) are named according to the disc template,
+filling in the variables and adding the file extension. Variables for both
+disc and track template are:
+ - %A: album artist
+ - %S: album sort name
+ - %d: disc title
+ - %y: release year
+ - %r: release type, lowercase
+ - %R: Release type, normal case
+ - %x: audio extension
+
+'''
+
+def addTemplate(self):
+    # FIXME: get from config
+    self.parser.add_option('', '--track-template',
+        action="store", dest="track_template",
+        help="template for track file naming (default %default)",
+        default=DEFAULT_TRACK_TEMPLATE)
+    self.parser.add_option('', '--disc-template',
+        action="store", dest="disc_template",
+        help="template for disc file naming (default %default)",
+        default=DEFAULT_DISC_TEMPLATE)
+
+
diff --git a/morituri/rip/debug.py b/morituri/rip/debug.py
index ff7dda2..6614f55 100644
--- a/morituri/rip/debug.py
+++ b/morituri/rip/debug.py
@@ -21,8 +21,78 @@
 # along with morituri.  If not, see <http://www.gnu.org/licenses/>.
 
 from morituri.common import logcommand
+from morituri.result import result
 
-from morituri.common import task
+from morituri.common import task, cache
+
+
+class RCList(logcommand.LogCommand):
+
+    name = "list"
+    summary = "list cached results"
+
+    def do(self, args):
+        self._cache = cache.ResultCache()
+        results = []
+
+        for i in self._cache.getIds():
+            r = self._cache.getRipResult(i, create=False)
+            results.append((r.object.artist, r.object.title, i))
+
+        results.sort()
+
+        for artist, title, cddbid in results:
+            if artist is None:
+                artist = '(None)'
+            if title is None:
+                title = '(None)'
+
+            self.stdout.write('%s: %s - %s\n' % (
+                cddbid, artist.encode('utf-8'), title.encode('utf-8')))
+        
+
+class RCLog(logcommand.LogCommand):
+
+    name = "log"
+    summary = "write a log file for the cached result"
+
+    def addOptions(self):
+        loggers = result.getLoggers().keys()
+
+        self.parser.add_option('-L', '--logger',
+            action="store", dest="logger",
+            default='morituri',
+            help="logger to use "
+                "(default '%default', choose from '" +
+                    "', '".join(loggers) + "')")
+
+    def do(self, args):
+        self._cache = cache.ResultCache()
+
+        persisted = self._cache.getRipResult(args[0], create=False)
+
+        if not persisted:
+            self.stderr.write(
+                'Could not find a result for cddb disc id %s\n' % args[0])
+            return 3
+
+        try:
+            klazz = result.getLoggers()[self.options.logger]
+        except KeyError:
+            self.stderr.write("No logger named %s found!\n" % (
+                self.options.logger))
+            return 3
+
+        logger = klazz()
+        self.stdout.write(logger.log(persisted.object).encode('utf-8'))
+ 
+
+class ResultCache(logcommand.LogCommand):
+
+    summary = "debug result cache"
+    aliases = ['rc', ]
+
+    subCommandClasses = [RCList, RCLog, ]
 
 
 class Checksum(logcommand.LogCommand):
@@ -85,10 +155,36 @@ class Encode(logcommand.LogCommand):
 
         runner.run(encodetask)
 
+class Tag(logcommand.LogCommand):
+
+    summary = "run a tag reading task"
+
+    def do(self, args):
+        try:
+            path = unicode(args[0])
+        except IndexError:
+            self.stdout.write('Please specify an input file.\n')
+            return 3
+
+        runner = task.SyncRunner()
+
+        from morituri.common import encode
+        self.debug('Reading tags from %s' % path.encode('utf-8'))
+        tagtask = encode.TagReadTask(path)
+
+        runner.run(tagtask)
+
+        for key in tagtask.taglist.keys():
+            self.stdout.write('%s: %r\n' % (key, tagtask.taglist[key]))
+
 
 class MusicBrainzNGS(logcommand.LogCommand):
 
+    usage = "[MusicBrainz disc id]"
     summary = "examine MusicBrainz NGS info"
+    description = """Look up a MusicBrainz disc id and output information.
+
+Example disc id: KnpGsLhvH.lPrNc1PBL21lb9Bg4-"""
 
     def do(self, args):
         try:
@@ -105,6 +201,7 @@ class MusicBrainzNGS(logcommand.LogCommand):
             self.stdout.write('- Release %d:\n' % (i + 1, ))
             self.stdout.write('    Artist: %s\n' % md.artist.encode('utf-8'))
             self.stdout.write('    Title:  %s\n' % md.title.encode('utf-8'))
+            self.stdout.write('    Type:   %s\n' % md.releaseType.encode('utf-8'))
             self.stdout.write('    URL: %s\n' % md.url)
             self.stdout.write('    Tracks: %d\n' % len(md.tracks))
             for j, track in enumerate(md.tracks):
@@ -117,4 +214,4 @@ class Debug(logcommand.LogCommand):
 
     summary = "debug internals"
 
-    subCommandClasses = [Checksum, Encode, MusicBrainzNGS]
+    subCommandClasses = [Checksum, Encode, Tag, MusicBrainzNGS, ResultCache]
diff --git a/morituri/rip/drive.py b/morituri/rip/drive.py
index cae3873..8aad4cd 100644
--- a/morituri/rip/drive.py
+++ b/morituri/rip/drive.py
@@ -20,7 +20,61 @@
 # You should have received a copy of the GNU General Public License
 # along with morituri.  If not, see <http://www.gnu.org/licenses/>.
 
+import os
+
+from morituri.extern.task import task
+
 from morituri.common import logcommand, drive
+from morituri.program import cdparanoia
+
+
+class Analyze(logcommand.LogCommand):
+
+    summary = "analyze caching behaviour of drive"
+
+    def addOptions(self):
+        self.parser.add_option('-d', '--device',
+            action="store", dest="device",
+            help="CD-DA device")
+
+    def handleOptions(self, options):
+        if not options.device:
+            drives = drive.getAllDevicePaths()
+            if not drives:
+                self.error('No CD-DA drives found!')
+                return 3
+
+            # pick the first
+            self.options.device = drives[0]
+
+        # this can be a symlink to another device
+        self.options.device = os.path.realpath(self.options.device)
+
+    def do(self, args):
+        runner = task.SyncRunner()
+        t = cdparanoia.AnalyzeTask(self.options.device)
+        runner.run(t)
+
+        if t.defeatsCache is None:
+            self.stdout.write(
+                'Cannot analyze the drive.  Is there a CD in it?\n')
+            return
+        if not t.defeatsCache:
+            self.stdout.write(
+                'cdparanoia cannot defeat the audio cache on this drive.\n')
+        else:
+            self.stdout.write(
+                'cdparanoia can defeat the audio cache on this drive.\n')
+
+        info = drive.getDeviceInfo(self.options.device)
+        if not info:
+            return
+        
+        self.stdout.write(
+            'Adding drive cache behaviour to configuration file.\n')
+
+        self.getRootCommand().config.setDefeatsCache(info[0], info[1], info[2],
+            t.defeatsCache)
 
 
 class List(logcommand.LogCommand):
@@ -31,30 +85,51 @@ class List(logcommand.LogCommand):
         paths = drive.getAllDevicePaths()
 
         if not paths:
-            print 'No drives found.'
-            print 'Create /dev/cdrom if you have a CD drive, '
-            print 'or install pycdio for better detection.'
+            self.stdout.write('No drives found.\n')
+            self.stdout.write('Create /dev/cdrom if you have a CD drive, \n')
+            self.stdout.write('or install pycdio for better detection.\n')
 
             return
 
         try:
-            import cdio
+            import cdio as _
         except ImportError:
-            print 'Install pycdio for vendor/model/release detection.'
+            self.stdout.write(
+                'Install pycdio for vendor/model/release detection.\n')
             return
 
         for path in paths:
-            device = cdio.Device(path)
-            ok, vendor, model, release = device.get_hwinfo()
-            print "drive: %s, vendor: %s, model: %s, release: %s" % (
-                path, vendor, model, release)
+            vendor, model, release = drive.getDeviceInfo(path)
+            self.stdout.write(
+                "drive: %s, vendor: %s, model: %s, release: %s\n" % (
+                path, vendor, model, release))
+
+            try:
+                offset = self.getRootCommand().config.getReadOffset(
+                    vendor, model, release)
+                self.stdout.write(
+                    "       Configured read offset: %d\n" % offset)
+            except KeyError:
+                self.stdout.write(
+                    "       No read offset found.  Run 'rip offset find'\n")
+
+            try:
+                defeats = self.getRootCommand().config.getDefeatsCache(
+                    vendor, model, release)
+                self.stdout.write(
+                    "       Can defeat audio cache: %s\n" % defeats)
+            except KeyError:
+                self.stdout.write(
+                    "       Unknown whether audio cache can be defeated. "
+                    "Run 'rip drive analyze'\n")
+
 
         if not paths:
-            print 'No drives found.'
+            self.stdout.write('No drives found.\n')
 
 
 class Drive(logcommand.LogCommand):
 
     summary = "handle drives"
 
-    subCommandClasses = [List, ]
+    subCommandClasses = [Analyze, List, ]
diff --git a/morituri/rip/image.py b/morituri/rip/image.py
index 589fbfc..a9afef8 100644
--- a/morituri/rip/image.py
+++ b/morituri/rip/image.py
@@ -22,7 +22,7 @@
 
 import os
 
-from morituri.common import logcommand, accurip, program, encode
+from morituri.common import logcommand, accurip, program, encode, renamer
 from morituri.image import image
 from morituri.result import result
 
@@ -103,18 +103,26 @@ class Retag(logcommand.LogCommand):
 
     summary = "retag image files"
 
+    def addOptions(self):
+        self.parser.add_option('-R', '--release-id',
+            action="store", dest="release_id",
+            help="MusicBrainz release id to match to (if there are multiple)")
+
+
     def do(self, args):
-        prog = program.Program()
+        prog = program.Program(stdout=self.stdout)
         runner = task.SyncRunner()
 
         for arg in args:
-            print 'Retagging image %r' % arg
+            self.stdout.write('Retagging image %r\n' % arg)
             arg = arg.decode('utf-8')
             cueImage = image.Image(arg)
             cueImage.setup(runner)
 
             mbdiscid = cueImage.table.getMusicBrainzDiscId()
-            prog.metadata = prog.getMusicBrainz(cueImage.table, mbdiscid)
+            self.stdout.write('MusicBrainz disc id is %s\n' % mbdiscid)
+            prog.metadata = prog.getMusicBrainz(cueImage.table, mbdiscid,
+                release=self.options.release_id)
 
             if not prog.metadata:
                 print 'Not in MusicBrainz database, skipping'
@@ -124,10 +132,12 @@ class Retag(logcommand.LogCommand):
             prog.cuePath = arg
             prog.result = result.RipResult()
             for track in cueImage.table.tracks:
-                path = track.indexes[1].path
+                path = cueImage.getRealPath(track.indexes[1].path)
+
                 taglist = prog.getTagList(track.number)
-                self.debug('possibly retagging %r with taglist %r',
-                    path, taglist)
+                self.debug(
+                    'possibly retagging %r from cue path %r with taglist %r',
+                    path, arg, taglist)
                 t = encode.SafeRetagTask(path, taglist)
                 runner.run(t)
                 path = os.path.basename(path)
@@ -137,6 +147,56 @@ class Retag(logcommand.LogCommand):
                     print '%s already tagged correctly' % path
             print
 
+class Rename(logcommand.LogCommand):
+
+    summary = "rename image and all files based on metadata"
+
+    def addOptions(self):
+        self.parser.add_option('-R', '--release-id',
+            action="store", dest="release_id",
+            help="MusicBrainz release id to match to (if there are multiple)")
+
+
+    def do(self, args):
+        prog = program.Program(stdout=self.stdout)
+        runner = task.SyncRunner()
+
+        for arg in args:
+            self.stdout.write('Renaming image %r\n' % arg)
+            arg = arg.decode('utf-8')
+            cueImage = image.Image(arg)
+            cueImage.setup(runner)
+
+            mbdiscid = cueImage.table.getMusicBrainzDiscId()
+
+            operator = renamer.Operator(statePath, mbdiscid)
+
+            self.stdout.write('MusicBrainz disc id is %s\n' % mbdiscid)
+            prog.metadata = prog.getMusicBrainz(cueImage.table, mbdiscid,
+                release=self.options.release_id)
+
+            if not prog.metadata:
+                print 'Not in MusicBrainz database, skipping'
+                continue
+
+            # FIXME: this feels like we're poking at internals.
+            prog.cuePath = arg
+            prog.result = result.RipResult()
+            for track in cueImage.table.tracks:
+                path = cueImage.getRealPath(track.indexes[1].path)
+
+                taglist = prog.getTagList(track.number)
+                self.debug(
+                    'possibly retagging %r from cue path %r with taglist %r',
+                    path, arg, taglist)
+                t = encode.SafeRetagTask(path, taglist)
+                runner.run(t)
+                path = os.path.basename(path)
+                if t.changed:
+                    print 'Retagged %s' % path
+                else:
+                    print '%s already tagged correctly' % path
+            print
 
 class Verify(logcommand.LogCommand):
 
diff --git a/morituri/rip/main.py b/morituri/rip/main.py
index 523ccef..bab3b8c 100644
--- a/morituri/rip/main.py
+++ b/morituri/rip/main.py
@@ -1,9 +1,11 @@
 # -*- Mode: Python -*-
 # vi:si:et:sw=4:sts=4:ts=4
 
+import os
 import sys
+import pkg_resources
 
-from morituri.common import log, logcommand, common
+from morituri.common import log, logcommand, common, config
 
 from morituri.rip import cd, offset, drive, image, accurip, debug
 
@@ -12,6 +14,20 @@ from morituri.extern.task import task
 
 
 def main(argv):
+    # load plugins
+
+    from morituri.configure import configure
+    pluginsdir = configure.pluginsdir
+    homepluginsdir = os.path.join(os.path.expanduser('~'),
+        '.morituri', 'plugins')
+
+    distributions, errors = pkg_resources.working_set.find_plugins(
+        pkg_resources.Environment([pluginsdir, homepluginsdir]))
+    if errors:
+        log.warning('errors finding plugins: %r', errors)
+    log.debug('mapping distributions %r', distributions)
+    map(pkg_resources.working_set.add, distributions)
+
     c = Rip()
     try:
         ret = c.parse(argv)
@@ -88,6 +104,8 @@ You can get help on subcommands by using the -h option to the subcommand.
 
         self.record = options.record
 
+        self.config = config.Config()
+
     def parse(self, argv):
         log.debug("morituri", "rip %s" % " ".join(argv))
         logcommand.LogCommand.parse(self, argv)
diff --git a/morituri/rip/offset.py b/morituri/rip/offset.py
index 39f55a6..43e3131 100644
--- a/morituri/rip/offset.py
+++ b/morituri/rip/offset.py
@@ -188,8 +188,7 @@ CD in the AccurateRip database."""
                         count += 1
 
                 if count == len(table.tracks):
-                    self.stdout.write('\nRead offset of device is: %d.\n' %
-                        offset)
+                    self._foundOffset(device, offset)
                     return 0
                 else:
                     self.stdout.write(
@@ -225,6 +224,19 @@ CD in the AccurateRip database."""
         os.unlink(path)
         return "%08x" % t.checksum
 
+    def _foundOffset(self, device, offset):
+        self.stdout.write('\nRead offset of device is: %d.\n' %
+            offset)
+
+        info = drive.getDeviceInfo(device)
+        if not info:
+            return
+
+        self.stdout.write('Adding read offset to configuration file.\n')
+
+        self.getRootCommand().config.setReadOffset(info[0], info[1], info[2],
+            offset)
+
 
 class Offset(logcommand.LogCommand):
     summary = "handle drive offsets"
diff --git a/morituri/test/Makefile.am b/morituri/test/Makefile.am
index b12725d..2eb066a 100644
--- a/morituri/test/Makefile.am
+++ b/morituri/test/Makefile.am
@@ -4,8 +4,13 @@ EXTRA_DIST = \
 	__init__.py \
 	common.py \
 	test_common_accurip.py \
+	test_common_cache.py \
 	test_common_checksum.py \
+	test_common_common.py \
+	test_common_config.py \
 	test_common_drive.py \
+	test_common_encode.py \
+	test_common_gstreamer.py \
 	test_common_musicbrainzngs.py \
 	test_common_program.py \
 	test_common_renamer.py \
@@ -41,7 +46,12 @@ EXTRA_DIST = \
 	cdparanoia.progress.error \
 	cdrdao.readtoc.progress \
 	silentalarm.result.pickle \
-	track.flac
+	track.flac \
+	cache/result/fe105a11.pickle \
+	cdparanoia/MATSHITA.cdparanoia-A.log \
+	cdparanoia/MATSHITA.cdparanoia-A.stderr \
+	cdparanoia/PX-L890SA.cdparanoia-A.log \
+	cdparanoia/PX-L890SA.cdparanoia-A.stderr
 
 
 # re-generation of test files when needed
diff --git a/morituri/test/Makefile.in b/morituri/test/Makefile.in
index 8ecb3c8..5fe1d49 100644
--- a/morituri/test/Makefile.in
+++ b/morituri/test/Makefile.in
@@ -104,6 +104,7 @@ PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
 PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
 PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
 PATH_SEPARATOR = @PATH_SEPARATOR@
+PLUGINSDIR = @PLUGINSDIR@
 PYCHECKER = @PYCHECKER@
 PYTHON = @PYTHON@
 PYTHONLIBDIR = @PYTHONLIBDIR@
@@ -163,8 +164,13 @@ EXTRA_DIST = \
 	__init__.py \
 	common.py \
 	test_common_accurip.py \
+	test_common_cache.py \
 	test_common_checksum.py \
+	test_common_common.py \
+	test_common_config.py \
 	test_common_drive.py \
+	test_common_encode.py \
+	test_common_gstreamer.py \
 	test_common_musicbrainzngs.py \
 	test_common_program.py \
 	test_common_renamer.py \
@@ -200,7 +206,12 @@ EXTRA_DIST = \
 	cdparanoia.progress.error \
 	cdrdao.readtoc.progress \
 	silentalarm.result.pickle \
-	track.flac
+	track.flac \
+	cache/result/fe105a11.pickle \
+	cdparanoia/MATSHITA.cdparanoia-A.log \
+	cdparanoia/MATSHITA.cdparanoia-A.stderr \
+	cdparanoia/PX-L890SA.cdparanoia-A.log \
+	cdparanoia/PX-L890SA.cdparanoia-A.stderr
 
 all: all-am
 
diff --git a/morituri/test/cache/result/fe105a11.pickle b/morituri/test/cache/result/fe105a11.pickle
new file mode 100644
index 0000000..97c91cc
Binary files /dev/null and b/morituri/test/cache/result/fe105a11.pickle differ
diff --git a/morituri/test/cdparanoia/MATSHITA.cdparanoia-A.log b/morituri/test/cdparanoia/MATSHITA.cdparanoia-A.log
new file mode 100644
index 0000000..e5f18b1
--- /dev/null
+++ b/morituri/test/cdparanoia/MATSHITA.cdparanoia-A.log
@@ -0,0 +1,373 @@
+cdparanoia -A 
+cdparanoia III release 10.2 (September 11, 2008)
+
+Using cdda library version: 10.2
+Using paranoia library version: 10.2
+
+Attempting to set cdrom to full speed... 
+	drive returned OK.
+
+=================== Checking drive cache/timing behavior ===================
+
+Seek/read timing:
+
+	[45:24.28]: 
+204328:1:46 204329:27:33 204356:27:35 204383:27:33 204410:27:35 204437:27:33 204464:27:35 204491:27:33 204518:27:35 204545:27:33 204572:27:35 204599:27:32 204626:27:35 204653:27:33 204680:27:35 204707:27:33 204734:27:35 204761:27:33 204788:27:35 204815:27:33 204842:27:35 204869:27:33 204896:27:35 204923:27:32 204950:27:35 204977:27:33 205004:27:35 205031:27:33 205058:27:35 205085:27:33 205112:27:35 205139:27:33 205166:27:35 205193:27:33 205220:27:35 205247:27:33 205274:27:35 205301:27:33 
+	Initial seek latency (1000 sectors): 46ms
+	Average read latency: 1.26ms/sector (raw speed: 10.6x)
+	Read latency standard deviation: 0.04ms/sector
+	[45:24.27]: 
+204327:1:45 204328:27:33 204355:27:35 204382:27:33 204409:27:35 204436:27:33 204463:27:35 204490:27:33 204517:27:35 204544:27:32 204571:27:35 204598:27:32 204625:27:35 204652:27:33 204679:27:35 204706:27:33 204733:27:35 204760:27:33 204787:27:35 204814:27:33 204841:27:35 204868:27:32 204895:27:35 204922:27:32 204949:27:35 204976:27:33 205003:27:35 205030:27:33 205057:27:35 205084:27:33 205111:27:35 205138:27:33 205165:27:35 205192:27:33 205219:27:35 205246:27:33 205273:27:35 205300:27:33 
+	Initial seek latency (1000 sectors): 45ms
+	Average read latency: 1.25ms/sector (raw speed: 10.6x)
+	Read latency standard deviation: 0.04ms/sector
+	[45:24.26]: 
+204326:1:45 204327:27:33 204354:27:35 204381:27:33 204408:27:35 204435:27:33 204462:27:35 204489:27:33 204516:27:35 204543:27:33 204570:27:35 204597:27:33 204624:27:35 204651:27:33 204678:27:35 204705:27:33 204732:27:35 204759:27:33 204786:27:35 204813:27:33 204840:27:35 204867:27:33 204894:27:35 204921:27:33 204948:27:35 204975:27:33 205002:27:35 205029:27:33 205056:27:35 205083:27:32 205110:27:35 205137:27:33 205164:27:35 205191:27:33 205218:27:35 205245:27:33 205272:27:35 205299:27:32 
+	Initial seek latency (1000 sectors): 45ms
+	Average read latency: 1.26ms/sector (raw speed: 10.6x)
+	Read latency standard deviation: 0.04ms/sector
+	[45:24.25]: 
+204325:1:44 204326:27:33 204353:27:35 204380:27:33 204407:27:35 204434:27:32 204461:27:35 204488:27:33 204515:27:35 204542:27:33 204569:27:35 204596:27:33 204623:27:35 204650:27:33 204677:27:35 204704:27:33 204731:27:35 204758:27:32 204785:27:35 204812:27:33 204839:27:35 204866:27:33 204893:27:35 204920:27:33 204947:27:35 204974:27:33 205001:27:35 205028:27:33 205055:27:35 205082:27:33 205109:27:35 205136:27:33 205163:27:35 205190:27:32 205217:27:35 205244:27:33 205271:27:35 205298:27:33 
+	Initial seek latency (1000 sectors): 44ms
+	Average read latency: 1.26ms/sector (raw speed: 10.6x)
+	Read latency standard deviation: 0.04ms/sector
+	[45:24.24]: 
+204324:1:45 204325:27:33 204352:27:35 204379:27:32 204406:27:35 204433:27:33 204460:27:35 204487:27:33 204514:27:35 204541:27:33 204568:27:35 204595:27:32 204622:27:35 204649:27:33 204676:27:35 204703:27:33 204730:27:35 204757:27:33 204784:27:35 204811:27:32 204838:27:35 204865:27:33 204892:27:35 204919:27:33 204946:27:35 204973:27:33 205000:27:35 205027:27:33 205054:27:35 205081:27:33 205108:27:35 205135:27:33 205162:27:35 205189:27:33 205216:27:35 205243:27:33 205270:27:35 205297:27:33 
+	Initial seek latency (1000 sectors): 45ms
+	Average read latency: 1.26ms/sector (raw speed: 10.6x)
+	Read latency standard deviation: 0.04ms/sector
+	[40:00.00]: 
+180000:1:50 180001:27:34 180028:27:37 180055:27:34 180082:27:37 180109:27:34 180136:27:37 180163:27:34 180190:27:37 180217:27:34 180244:27:37 180271:27:34 180298:27:37 180325:27:34 180352:27:37 180379:27:34 180406:27:37 180433:27:34 180460:27:37 180487:27:34 180514:27:37 180541:27:34 180568:27:37 180595:27:34 180622:27:37 180649:27:34 180676:27:37 180703:27:34 180730:27:37 180757:27:34 180784:27:37 180811:27:34 180838:27:37 180865:27:34 180892:27:37 180919:27:34 180946:27:37 180973:27:34 
+	Initial seek latency (1000 sectors): 50ms
+	Average read latency: 1.31ms/sector (raw speed: 10.2x)
+	Read latency standard deviation: 0.06ms/sector
+	[30:00.00]: 
+135000:1:64 135001:27:38 135028:27:41 135055:27:38 135082:27:41 135109:27:38 135136:27:41 135163:27:38 135190:27:41 135217:27:38 135244:27:41 135271:27:38 135298:27:41 135325:27:38 135352:27:41 135379:27:38 135406:27:41 135433:27:38 135460:27:41 135487:27:38 135514:27:41 135541:27:38 135568:27:41 135595:27:38 135622:27:41 135649:27:38 135676:27:41 135703:27:38 135730:27:41 135757:27:38 135784:27:41 135811:27:38 135838:27:41 135865:27:38 135892:27:41 135919:27:38 135946:27:41 135973:27:38 
+	Initial seek latency (1000 sectors): 64ms
+	Average read latency: 1.46ms/sector (raw speed: 9.1x)
+	Read latency standard deviation: 0.06ms/sector
+	[20:00.00]: 
+90000:1:63 90001:27:43 90028:27:47 90055:27:43 90082:27:47 90109:27:43 90136:27:46 90163:27:43 90190:27:47 90217:27:43 90244:27:46 90271:27:43 90298:27:47 90325:27:43 90352:27:46 90379:27:43 90406:27:46 90433:27:43 90460:27:46 90487:27:43 90514:27:46 90541:27:43 90568:27:46 90595:27:43 90622:27:46 90649:27:43 90676:27:46 90703:27:43 90730:27:46 90757:27:43 90784:27:46 90811:27:43 90838:27:46 90865:27:43 90892:27:46 90919:27:43 90946:27:46 90973:27:43 
+	Initial seek latency (1000 sectors): 63ms
+	Average read latency: 1.65ms/sector (raw speed: 8.1x)
+	Read latency standard deviation: 0.06ms/sector
+	[10:00.00]: 
+45000:1:61 45001:27:51 45028:27:55 45055:27:51 45082:27:55 45109:27:52 45136:27:55 45163:27:51 45190:27:55 45217:27:51 45244:27:55 45271:27:51 45298:27:55 45325:27:51 45352:27:55 45379:27:51 45406:27:55 45433:27:51 45460:27:55 45487:27:51 45514:27:55 45541:27:51 45568:27:55 45595:27:51 45622:27:55 45649:27:51 45676:27:55 45703:27:51 45730:27:55 45757:27:51 45784:27:55 45811:27:51 45838:27:55 45865:27:51 45892:27:55 45919:27:51 45946:27:55 45973:27:51 
+	Initial seek latency (1000 sectors): 61ms
+	Average read latency: 1.96ms/sector (raw speed: 6.8x)
+	Read latency standard deviation: 0.07ms/sector
+	[00:00.00]: 
+0:1:84 1:27:67 28:27:72 55:27:67 82:27:72 109:27:67 136:27:72 163:27:67 190:27:72 217:27:67 244:27:72 271:27:67 298:27:72 325:27:67 352:27:72 379:27:67 406:27:72 433:27:67 460:27:72 487:27:67 514:27:72 541:27:67 568:27:72 595:27:67 622:27:72 649:27:67 676:27:72 703:27:67 730:27:72 757:27:67 784:27:72 811:27:67 838:27:72 865:27:67 892:27:72 919:27:67 946:27:72 973:27:67 
+	Initial seek latency (1000 sectors): 84ms
+	Average read latency: 2.57ms/sector (raw speed: 5.2x)
+	Read latency standard deviation: 0.09ms/sector
+
+Analyzing cache behavior...
+	Fast search for approximate cache size... 0 sectors            
+		>>> fast_read=10:1:71 seek_read=10:1:0
+	Fast search for approximate cache size... 1 sectors            
+		>>> fast_read=11:1:0 seek_read=10:1:65
+		>>> fast_read=11:1:0 
+	Fast search for approximate cache size... 2 sectors            
+		>>> fast_read=12:1:4 seek_read=10:1:61
+		>>> fast_read=12:1:5 
+	Fast search for approximate cache size... 3 sectors            
+		>>> fast_read=13:1:0 seek_read=10:1:61
+		>>> fast_read=13:1:5 
+	Fast search for approximate cache size... 4 sectors            
+		>>> fast_read=14:1:5 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+		>>> fast_read=14:1:10 seek_read=10:1:56
+	Slow verify for approximate cache size... 4 sectors
+	Attempting to reduce read speed to 1x... drive said OK
+		>>> slow_read=10:5:10 seek_read=10:1:0
+	Attempting to reset read speed to full... drive said OK
+	Fast search for approximate cache size... 5 sectors            
+		>>> fast_read=15:1:0 seek_read=10:1:54
+		>>> fast_read=15:1:10 seek_read=10:1:56
+		>>> fast_read=15:1:10 seek_read=10:1:56
+		>>> fast_read=15:1:10 seek_read=10:1:56
+		>>> fast_read=15:1:10 seek_read=10:1:56
+		>>> fast_read=15:1:10 seek_read=10:1:56
+		>>> fast_read=15:1:10 seek_read=10:1:56
+		>>> fast_read=15:1:10 seek_read=10:1:56
+		>>> fast_read=15:1:10 seek_read=10:1:56
+		>>> fast_read=15:1:10 seek_read=10:1:56
+		>>> fast_read=15:1:10 seek_read=10:1:56
+		>>> fast_read=15:1:10 seek_read=10:1:56
+		>>> fast_read=15:1:10 seek_read=10:1:56
+		>>> fast_read=15:1:10 seek_read=10:1:56
+		>>> fast_read=15:1:10 seek_read=10:1:56
+	Slow verify for approximate cache size... 5 sectors
+	Attempting to reduce read speed to 1x... drive said OK
+		>>> slow_read=10:6:9 seek_read=10:1:0
+	Attempting to reset read speed to full... drive said OK
+	Fast search for approximate cache size... 6 sectors            
+		>>> fast_read=16:1:4 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+		>>> fast_read=16:1:15 seek_read=10:1:51
+	Slow verify for approximate cache size... 6 sectors
+	Attempting to reduce read speed to 1x... drive said OK
+		>>> slow_read=10:7:15 seek_read=10:1:0
+	Attempting to reset read speed to full... drive said OK
+	Fast search for approximate cache size... 7 sectors            
+		>>> fast_read=17:1:0 seek_read=10:1:49
+		>>> fast_read=17:1:15 seek_read=10:1:51
+		>>> fast_read=17:1:15 seek_read=10:1:51
+		>>> fast_read=17:1:15 seek_read=10:1:51
+		>>> fast_read=17:1:15 seek_read=10:1:51
+		>>> fast_read=17:1:15 seek_read=10:1:51
+		>>> fast_read=17:1:15 seek_read=10:1:51
+		>>> fast_read=17:1:15 seek_read=10:1:51
+		>>> fast_read=17:1:15 seek_read=10:1:51
+		>>> fast_read=17:1:15 seek_read=10:1:51
+		>>> fast_read=17:1:15 seek_read=10:1:51
+		>>> fast_read=17:1:15 seek_read=10:1:51
+		>>> fast_read=17:1:15 seek_read=10:1:51
+		>>> fast_read=17:1:15 seek_read=10:1:51
+		>>> fast_read=17:1:15 seek_read=10:1:51
+	Slow verify for approximate cache size... 7 sectors
+	Attempting to reduce read speed to 1x... drive said OK
+		>>> slow_read=10:8:15 seek_read=10:1:0
+	Attempting to reset read speed to full... drive said OK
+	Fast search for approximate cache size... 8 sectors            
+		>>> fast_read=18:1:3 seek_read=10:1:46
+		>>> fast_read=18:1:20 seek_read=10:1:46
+		>>> fast_read=18:1:20 seek_read=10:1:46
+		>>> fast_read=18:1:21 seek_read=10:1:46
+		>>> fast_read=18:1:20 seek_read=10:1:46
+		>>> fast_read=18:1:20 seek_read=10:1:46
+		>>> fast_read=18:1:20 seek_read=10:1:46
+		>>> fast_read=18:1:21 seek_read=10:1:45
+		>>> fast_read=18:1:21 seek_read=10:1:45
+		>>> fast_read=18:1:20 seek_read=10:1:46
+		>>> fast_read=18:1:20 seek_read=10:1:46
+		>>> fast_read=18:1:20 seek_read=10:1:46
+		>>> fast_read=18:1:20 seek_read=10:1:46
+		>>> fast_read=18:1:20 seek_read=10:1:46
+		>>> fast_read=18:1:20 seek_read=10:1:46
+	Slow verify for approximate cache size... 8 sectors
+	Attempting to reduce read speed to 1x... drive said OK
+		>>> slow_read=10:9:18 seek_read=10:1:46
+		>>> slow_read=10:9:20 seek_read=10:1:46
+		>>> slow_read=10:9:20 seek_read=10:1:45
+		>>> slow_read=10:9:20 seek_read=10:1:46
+		>>> slow_read=10:9:20 seek_read=10:1:46
+		>>> slow_read=10:9:20 seek_read=10:1:46
+		>>> slow_read=10:9:20 seek_read=10:1:46
+		>>> slow_read=10:9:20 seek_read=10:1:46
+		>>> slow_read=10:9:20 seek_read=10:1:46
+		>>> slow_read=10:9:20 seek_read=10:1:46
+	Approximate random access cache size: 8 sector(s)               
+	Attempting to reset read speed to full... drive said OK
+	Verifying that cache is contiguous...		>>> 34:1:61 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+		>>> 34:1:62 seek_read:10:1:49
+	Drive cache tests as contiguous                           
+
+	Testing background readahead past read cursor... 64
+		0 >>> 10:8:15 sleep=197299us seek=81:1:0
+	Testing background readahead past read cursor... 128
+		0 >>> 10:8:69 sleep=394598us seek=145:1:0
+	Testing background readahead past read cursor... 192
+		0 >>> 10:8:73 sleep=591897us seek=209:1:0
+	Testing background readahead past read cursor... 256
+		0 >>> 10:8:77 sleep=789196us seek=273:1:0
+	Testing background readahead past read cursor... 320
+		0 >>> 10:8:81 sleep=986496us seek=337:1:50
+		1 >>> 10:8:81 sleep=1150912us seek=337:1:64
+	Retiming drive...                               
+10:1:65 11:27:67 38:27:72 65:27:67 92:27:72 119:27:67 146:27:72 173:27:67 200:27:72 227:27:67 254:27:72 281:27:67 308:27:72 335:27:67 362:27:72 389:27:67 416:27:72 443:27:67 470:27:72 497:27:67 524:27:72 551:27:67 578:27:72 605:27:67 632:27:72 659:27:67 686:27:72 713:27:67 740:27:72 767:27:67 794:27:72 821:27:67 848:27:72 875:27:67 902:27:72 929:27:67 956:27:72 983:27:67 1010:27:72 1037:27:67 1064:27:72 1091:27:67 1118:27:72 1145:27:67 1172:27:72 1199:27:67 1226:27:72 1253:27:67 1280:27:72 1307:27:67 1334:27:72 1361:27:67 1388:27:72 1415:27:66 1442:27:72 1469:27:67 1496:27:72 1523:27:66 1550:27:72 1577:27:66 1604:27:71 1631:27:66 1658:27:72 1685:27:66 1712:27:71 1739:27:66 1766:27:71 1793:27:66 1820:27:71 1847:27:66 1874:27:71 1901:27:66 1928:27:71 1955:1:0 
+	Initial seek latency (1946 sectors): 65ms
+	Average read latency: 2.57ms/sector (raw speed: 5.2x)
+	Read latency standard deviation: 0.11ms/sector
+	Old mean=2.57ms/sec, New mean=2.56ms/sec
+
+		2 >>> 10:8:106 sleep=1315328us seek=337:1:56
+	Testing background readahead past read cursor... 264
+		0 >>> 10:8:81 sleep=813859us seek=281:1:0
+	Testing background readahead past read cursor... 272
+		0 >>> 10:8:79 sleep=838521us seek=289:1:50
+		1 >>> 10:8:72 sleep=978275us seek=289:1:45
+	Retiming drive...                               
+10:1:56 11:27:67 38:27:72 65:27:67 92:27:72 119:27:67 146:27:72 173:27:67 200:27:72 227:27:67 254:27:72 281:27:67 308:27:72 335:27:67 362:27:72 389:27:67 416:27:72 443:27:67 470:27:72 497:27:67 524:27:72 551:27:67 578:27:72 605:27:67 632:27:72 659:27:67 686:27:72 713:27:67 740:27:72 767:27:67 794:27:72 821:27:67 848:27:72 875:27:67 902:27:72 929:27:67 956:27:72 983:27:67 1010:27:72 1037:27:67 1064:27:72 1091:27:67 1118:27:72 1145:27:67 1172:27:72 1199:27:67 1226:27:72 1253:27:67 1280:27:72 1307:27:67 1334:27:72 1361:27:67 1388:27:72 1415:27:67 1442:27:72 1469:27:66 1496:27:72 1523:27:66 1550:27:72 1577:27:66 1604:27:72 1631:27:66 1658:27:72 1685:27:66 1712:27:71 1739:27:66 1766:27:71 1793:27:66 1820:27:71 1847:27:66 1874:27:71 1901:27:66 1928:27:71 1955:1:0 
+	Initial seek latency (1946 sectors): 56ms
+	Average read latency: 2.57ms/sector (raw speed: 5.2x)
+	Read latency standard deviation: 0.11ms/sector
+	Old mean=2.57ms/sec, New mean=2.56ms/sec
+
+		2 >>> 10:8:105 sleep=1118028us seek=289:1:39
+	Testing background readahead past read cursor... 265
+		0 >>> 10:8:71 sleep=816942us seek=282:1:54
+		1 >>> 10:8:67 sleep=953098us seek=282:1:52
+	Retiming drive...                               
+10:1:51 11:27:67 38:27:72 65:27:67 92:27:72 119:27:67 146:27:72 173:27:67 200:27:72 227:27:67 254:27:72 281:27:67 308:27:72 335:27:67 362:27:72 389:27:67 416:27:72 443:27:67 470:27:72 497:27:67 524:27:72 551:27:67 578:27:72 605:27:67 632:27:72 659:27:67 686:27:72 713:27:67 740:27:72 767:27:67 794:27:72 821:27:67 848:27:72 875:27:67 902:27:72 929:27:67 956:27:72 983:27:67 1010:27:72 1037:27:67 1064:27:72 1091:27:67 1118:27:72 1145:27:67 1172:27:72 1199:27:67 1226:27:72 1253:27:67 1280:27:72 1307:27:67 1334:27:72 1361:27:67 1388:27:72 1415:27:67 1442:27:71 1469:27:67 1496:27:72 1523:27:66 1550:27:72 1577:27:67 1604:27:71 1631:27:66 1658:27:72 1685:27:66 1712:27:72 1739:27:67 1766:27:71 1793:27:66 1820:27:71 1847:27:66 1874:27:71 1901:27:67 1928:27:71 1955:1:0 
+	Initial seek latency (1946 sectors): 51ms
+	Average read latency: 2.57ms/sector (raw speed: 5.2x)
+	Read latency standard deviation: 0.11ms/sector
+	Old mean=2.57ms/sec, New mean=2.57ms/sec
+
+		2 >>> 10:8:106 sleep=1089256us seek=282:1:50
+		3 >>> 10:8:67 sleep=1225413us seek=282:1:48
+		4 >>> 10:8:67 sleep=1361569us seek=282:1:46
+	Retiming drive...                               
+10:1:52 11:27:67 38:27:72 65:27:67 92:27:72 119:27:67 146:27:72 173:27:67 200:27:72 227:27:67 254:27:72 281:27:67 308:27:72 335:27:67 362:27:72 389:27:67 416:27:72 443:27:67 470:27:72 497:27:67 524:27:72 551:27:67 578:27:72 605:27:67 632:27:72 659:27:67 686:27:72 713:27:67 740:27:72 767:27:67 794:27:72 821:27:67 848:27:72 875:27:67 902:27:72 929:27:67 956:27:72 983:27:67 1010:27:72 1037:27:67 1064:27:72 1091:27:67 1118:27:72 1145:27:67 1172:27:72 1199:27:67 1226:27:72 1253:27:67 1280:27:72 1307:27:67 1334:27:72 1361:27:67 1388:27:72 1415:27:66 1442:27:72 1469:27:66 1496:27:72 1523:27:67 1550:27:72 1577:27:66 1604:27:72 1631:27:66 1658:27:72 1685:27:66 1712:27:71 1739:27:67 1766:27:71 1793:27:66 1820:27:71 1847:27:66 1874:27:71 1901:27:66 1928:27:71 1955:1:0 
+	Initial seek latency (1946 sectors): 52ms
+	Average read latency: 2.57ms/sector (raw speed: 5.2x)
+	Read latency standard deviation: 0.11ms/sector
+	Old mean=2.57ms/sec, New mean=2.56ms/sec
+
+		5 >>> 10:8:106 sleep=1497727us seek=282:1:44
+		6 >>> 10:8:90 sleep=1633884us seek=282:1:42
+		7 >>> 10:8:67 sleep=1770041us seek=282:1:40
+	Retiming drive...                               
+10:1:52 11:27:67 38:27:72 65:27:67 92:27:72 119:27:67 146:27:72 173:27:67 200:27:72 227:27:67 254:27:72 281:27:67 308:27:72 335:27:67 362:27:72 389:27:67 416:27:72 443:27:67 470:27:72 497:27:67 524:27:72 551:27:67 578:27:72 605:27:67 632:27:72 659:27:67 686:27:72 713:27:67 740:27:72 767:27:67 794:27:72 821:27:67 848:27:72 875:27:67 902:27:72 929:27:67 956:27:72 983:27:67 1010:27:72 1037:27:67 1064:27:72 1091:27:67 1118:27:72 1145:27:67 1172:27:72 1199:27:67 1226:27:72 1253:27:67 1280:27:72 1307:27:66 1334:27:72 1361:27:67 1388:27:72 1415:27:67 1442:27:72 1469:27:66 1496:27:72 1523:27:66 1550:27:72 1577:27:67 1604:27:72 1631:27:66 1658:27:72 1685:27:66 1712:27:71 1739:27:66 1766:27:72 1793:27:66 1820:27:71 1847:27:66 1874:27:71 1901:27:66 1928:27:71 1955:1:0 
+	Initial seek latency (1946 sectors): 52ms
+	Average read latency: 2.57ms/sector (raw speed: 5.2x)
+	Read latency standard deviation: 0.11ms/sector
+	Old mean=2.57ms/sec, New mean=2.56ms/sec
+
+		8 >>> 10:8:106 sleep=1906197us seek=282:1:60
+		9 >>> 10:8:67 sleep=2042354us seek=282:1:58
+	Drive readahead past read cursor: 264 sector(s)                
+	Testing cache tail cursor...
+		>>> 10:8:67 
+		sleeping 1017324 microseconds
+		<<< 7:1:0 6:1:55 
+		>>> 10:8:66 
+		sleeping 1017324 microseconds
+		<<< 6:1:0 5:1:52 
+		>>> 10:8:69 
+		sleeping 1017324 microseconds
+		<<< 5:1:0 4:1:49 
+		>>> 10:8:72 
+		sleeping 1017324 microseconds
+		<<< 4:1:0 3:1:69 
+		>>> 10:8:74 
+		sleeping 1017324 microseconds
+		<<< 3:1:0 2:1:67 
+		>>> 10:8:77 
+		sleeping 1017324 microseconds
+		<<< 2:1:0 1:1:64 
+		>>> 10:8:79 
+		sleeping 1017324 microseconds
+		<<< 1:1:0 0:1:61 
+		>>> 10:8:15 
+		sleeping 1017324 microseconds
+		<<< 0:1:0 
+	Retiming drive...                               
+10:1:0 11:27:1 38:27:1 65:27:1 92:27:1 119:27:1 146:27:3 173:27:3 200:27:3 227:27:3 254:27:3 281:27:91 308:27:72 335:27:67 362:27:72 389:27:67 416:27:72 443:27:67 470:27:72 497:27:67 524:27:72 551:27:67 578:27:72 605:27:67 632:27:72 659:27:67 686:27:72 713:27:67 740:27:72 767:27:67 794:27:72 821:27:67 848:27:72 875:27:67 902:27:72 929:27:67 956:27:72 983:27:67 1010:27:72 1037:27:67 1064:27:72 1091:27:67 1118:27:72 1145:27:67 1172:27:72 1199:27:67 1226:27:72 1253:27:67 1280:27:72 1307:27:67 1334:27:72 1361:27:67 1388:27:72 1415:27:66 1442:27:71 1469:27:67 1496:27:72 1523:27:66 1550:27:71 1577:27:67 1604:27:71 1631:27:66 1658:27:72 1685:27:66 1712:27:71 1739:27:66 1766:27:71 1793:27:66 1820:27:71 1847:27:66 1874:27:71 1901:27:66 1928:27:71 1955:1:0 
+	Initial seek latency (1946 sectors): 0ms
+	Average read latency: 2.23ms/sector (raw speed: 6.0x)
+	Read latency standard deviation: 0.88ms/sector
+	Old mean=2.57ms/sec, New mean=2.23ms/sec
+
+	Cache tail cursor tied to read cursor                      
+	Testing granularity of cache tail
+		>>> 10:9:112 
+		sleeping 1017324 microseconds
+		<<< 18:1:0 17:1:52 
+		>>> 10:9:69 
+		sleeping 1017324 microseconds
+		<<< 17:1:0 16:1:72 
+		>>> 10:9:80 
+		sleeping 1017324 microseconds
+		<<< 16:1:0 15:1:69 
+		>>> 10:9:74 
+		sleeping 1017324 microseconds
+		<<< 15:1:0 14:1:67 
+		>>> 10:9:77 
+		sleeping 1017324 microseconds
+		<<< 14:1:0 13:1:64 
+		>>> 10:9:79 
+		sleeping 1017324 microseconds
+		<<< 13:1:0 12:1:61 
+		>>> 10:9:82 
+		sleeping 1017324 microseconds
+		<<< 12:1:0 11:1:59 
+		>>> 10:9:85 
+		sleeping 1017324 microseconds
+		<<< 11:1:0 10:1:56 
+		>>> 10:9:20 
+		sleeping 1017324 microseconds
+		<<< 10:1:57 
+		>>> 10:9:20 
+		sleeping 1017324 microseconds
+		<<< 10:1:57 
+		>>> 10:9:20 
+		sleeping 1017324 microseconds
+		<<< 10:1:57 
+		>>> 10:9:20 
+		sleeping 1017324 microseconds
+		<<< 10:1:57 
+		>>> 10:9:20 
+		sleeping 1017324 microseconds
+		<<< 10:1:57 
+		>>> 10:9:20 
+		sleeping 1017324 microseconds
+		<<< 10:1:57 
+		>>> 10:9:20 
+		sleeping 1017324 microseconds
+		<<< 10:1:57 
+		>>> 10:9:20 
+		sleeping 1017324 microseconds
+		<<< 10:1:57 
+		>>> 10:9:20 
+		sleeping 1017324 microseconds
+		<<< 10:1:57 
+		>>> 10:9:20 
+		sleeping 1017324 microseconds
+		<<< 10:1:57 
+	Retiming drive...                               
+10:1:0 11:27:66 38:27:72 65:27:67 92:27:72 119:27:67 146:27:72 173:27:67 200:27:72 227:27:67 254:27:72 281:27:67 308:27:72 335:27:67 362:27:72 389:27:67 416:27:72 443:27:67 470:27:72 497:27:67 524:27:72 551:27:67 578:27:72 605:27:67 632:27:72 659:27:67 686:27:72 713:27:67 740:27:72 767:27:67 794:27:72 821:27:67 848:27:72 875:27:67 902:27:72 929:27:67 956:27:72 983:27:67 1010:27:72 1037:27:67 1064:27:72 1091:27:67 1118:27:72 1145:27:67 1172:27:72 1199:27:67 1226:27:72 1253:27:67 1280:27:72 1307:27:67 1334:27:71 1361:27:67 1388:27:72 1415:27:67 1442:27:72 1469:27:66 1496:27:72 1523:27:66 1550:27:72 1577:27:66 1604:27:71 1631:27:66 1658:27:72 1685:27:66 1712:27:71 1739:27:66 1766:27:71 1793:27:67 1820:27:71 1847:27:66 1874:27:71 1901:27:66 1928:27:71 1955:1:0 
+	Initial seek latency (1946 sectors): 0ms
+	Average read latency: 2.56ms/sector (raw speed: 5.2x)
+	Read latency standard deviation: 0.11ms/sector
+	Old mean=2.57ms/sec, New mean=2.56ms/sec
+
+	Cache tail granularity: 1 sector(s)                      
+	Cache size (considering rollbehind) too small to test cache speed.
+
+Drive tests OK with Paranoia.
+
diff --git a/morituri/test/cdparanoia/MATSHITA.cdparanoia-A.stderr b/morituri/test/cdparanoia/MATSHITA.cdparanoia-A.stderr
new file mode 100644
index 0000000..fd1fd8c
--- /dev/null
+++ b/morituri/test/cdparanoia/MATSHITA.cdparanoia-A.stderr
@@ -0,0 +1,111 @@
+cdparanoia III release 10.2 (September 11, 2008)
+
+Using cdda library version: 10.2
+Using paranoia library version: 10.2
+Checking /dev/cdrom for cdrom...
+	Could not stat /dev/cdrom: No such file or directory
+
+Checking /dev/cdroms/cdrom0 for cdrom...
+	Could not stat /dev/cdroms/cdrom0: No such file or directory
+
+Checking /dev/cdroms/cdroma for cdrom...
+	Could not stat /dev/cdroms/cdroma: No such file or directory
+
+Checking /dev/cdroms/cdrom1 for cdrom...
+	Could not stat /dev/cdroms/cdrom1: No such file or directory
+
+Checking /dev/cdroms/cdromb for cdrom...
+	Could not stat /dev/cdroms/cdromb: No such file or directory
+
+Checking /dev/cdroms/cdrom2 for cdrom...
+	Could not stat /dev/cdroms/cdrom2: No such file or directory
+
+Checking /dev/cdroms/cdromc for cdrom...
+	Could not stat /dev/cdroms/cdromc: No such file or directory
+
+Checking /dev/cdroms/cdrom3 for cdrom...
+	Could not stat /dev/cdroms/cdrom3: No such file or directory
+
+Checking /dev/cdroms/cdromd for cdrom...
+	Could not stat /dev/cdroms/cdromd: No such file or directory
+
+Checking /dev/hd0 for cdrom...
+	Could not stat /dev/hd0: No such file or directory
+
+Checking /dev/hda for cdrom...
+	Could not stat /dev/hda: No such file or directory
+
+Checking /dev/hd1 for cdrom...
+	Could not stat /dev/hd1: No such file or directory
+
+Checking /dev/hdb for cdrom...
+	Could not stat /dev/hdb: No such file or directory
+
+Checking /dev/hd2 for cdrom...
+	Could not stat /dev/hd2: No such file or directory
+
+Checking /dev/hdc for cdrom...
+	Could not stat /dev/hdc: No such file or directory
+
+Checking /dev/hd3 for cdrom...
+	Could not stat /dev/hd3: No such file or directory
+
+Checking /dev/hdd for cdrom...
+	Could not stat /dev/hdd: No such file or directory
+
+Checking /dev/sg0 for cdrom...
+	Testing /dev/sg0 for SCSI/MMC interface
+		Could not access device /dev/sg0 to test for SG_IO support: Permission denied
+		no SG_IO support for device: /dev/sg0
+		Could not access device /dev/sg0: Permission denied
+		generic device: /dev/sg0
+		ioctl device: not found
+		Could not open generic SCSI device /dev/sg0: Permission denied
+	Testing /dev/sg0 for cooked ioctl() interface
+		/dev/sg0 is not a cooked ioctl CDROM.
+
+Checking /dev/sga for cdrom...
+	Could not stat /dev/sga: No such file or directory
+
+Checking /dev/sg1 for cdrom...
+	Testing /dev/sg1 for SCSI/MMC interface
+		SG_IO device: /dev/sg1
+
+CDROM model sensed sensed: MATSHITA DVD-RAM UJ8A0A SB02 
+
+Checking for SCSI emulation...
+	Drive is ATAPI (using SG_IO host adaptor emulation)
+
+Checking for MMC style command set...
+	Drive is MMC style
+	DMA scatter/gather table entries: 167
+	table entry size: 524288 bytes
+	maximum theoretical transfer: 37074 sectors
+	Setting default read size to 27 sectors (63504 bytes).
+
+Verifying CDDA command set...
+	Expected command set reads OK.
+
+Attempting to set cdrom to full speed... 
+	drive returned OK.
+
+=================== Checking drive cache/timing behavior ===================
+
+Seek/read timing:
+
	[45:24.28]:   46ms seek, 1.26ms/sec read [10.6x] spinning up...  
	[45:24.27]:   45ms seek, 1.25ms/sec read [10.6x] spinning up...  
	[45:24.26]:   45ms seek, 1.26ms/sec read [10.6x] spinning up...  
	[45:24.25]:   44ms seek, 1.26ms/sec read [10.6x] spinning up...  
	[45:24.24]:   45ms seek, 1.26ms/sec read [10.6x]                 
+	[40:00.00]:   50ms seek, 1.31ms/sec read [10.2x]                 
+	[30:00.00]:   64ms seek, 1.46ms/sec read [9.1x]                 
+	[20:00.00]:   63ms seek, 1.65ms/sec read [8.1x]                 
+	[10:00.00]:   61ms seek, 1.96ms/sec read [6.8x]                 
+	[00:00.00]:   84ms seek, 2.57ms/sec read [5.2x]                 
+
+Analyzing cache behavior...
+
	Fast search for approximate cache size... 0 sectors            
	Fast search for approximate cache size... 1 sectors            
	Fast search for approximate cache size... 2 sectors            
	Fast search for approximate cache size... 3 sectors            
	Fast search for approximate cache size... 4 sectors            
	Slow verify for approximate cache size... 4 sectors.
	Fast search for approximate cache size... 5 sectors            
	Slow verify for approximate cache size... 5 sectors.
	Fast search for approximate cache size... 6 sectors            
	Slow verify for approximate cache size... 6 sectors.
	Fast search for approximate cache size... 7 sectors            
	Slow verify for approximate cache size... 7 sectors.
	Fast search for approximate cache size... 8 sectors            
	Slow verify for approximate cache size... 8 sectors..........
	Approximate random access cache size: 8 sector(s)               
+	Verifying that cache is contiguous.......................
	Drive cache tests as contiguous                           
+
	Testing background readahead past read cursor... 64           .
	Testing background readahead past read cursor... 128           .
	Testing background readahead past read cursor... 192           .
	Testing background readahead past read cursor... 256           .
	Testing background readahead past read cursor... 320           ..o.
	Testing background readahead past read cursor... 264           .
	Testing background readahead past read cursor... 272           ..o.
	Testing background readahead past read cursor... 265           ..o...o...o..
	Drive readahead past read cursor: 264 sector(s)                
+	Testing cache tail cursor...........o
	Cache tail cursor tied to read cursor                      
+	Testing granularity of cache tail..................o
	Cache tail granularity: 1 sector(s)                      
+	Cache size (considering rollbehind) too small to test cache speed.
+
+Drive tests OK with Paranoia.
+
diff --git a/morituri/test/cdparanoia/PX-L890SA.cdparanoia-A.log b/morituri/test/cdparanoia/PX-L890SA.cdparanoia-A.log
new file mode 100644
index 0000000..28adbeb
--- /dev/null
+++ b/morituri/test/cdparanoia/PX-L890SA.cdparanoia-A.log
@@ -0,0 +1,158 @@
+cdparanoia -A 
+cdparanoia III release 10.2 (September 11, 2008)
+
+Using cdda library version: 10.2
+Using paranoia library version: 10.2
+ 
+
+Attempting to set cdrom to full speed... 
+	drive returned OK.
+
+=================== Checking drive cache/timing behavior ===================
+
+Seek/read timing:
+
+	[39:43.53]: 
+178778:1:19 178779:27:19 178806:27:19 178833:27:19 178860:27:19 178887:27:19 178914:27:19 178941:27:19 178968:27:19 178995:27:19 179022:27:19 179049:27:19 179076:27:19 179103:27:19 179130:27:19 179157:27:19 179184:27:19 179211:27:19 179238:27:19 179265:27:19 179292:27:19 179319:27:19 179346:27:19 179373:27:19 179400:27:19 179427:27:19 179454:27:20 179481:27:19 179508:27:19 179535:27:19 179562:27:19 179589:27:19 179616:27:19 179643:27:19 179670:27:19 179697:27:19 179724:27:19 179751:27:19 
+	Initial seek latency (1000 sectors): 19ms
+	Average read latency: 0.70ms/sector (raw speed: 18.9x)
+	Read latency standard deviation: 0.01ms/sector
+	[39:43.52]: 
+178777:1:19 178778:27:20 178805:27:20 178832:27:20 178859:27:20 178886:27:20 178913:27:20 178940:27:20 178967:27:732 178994:27:14 179021:27:14 179048:27:14 179075:27:14 179102:27:14 179129:27:14 179156:27:14 179183:27:14 179210:27:14 179237:27:14 179264:27:14 179291:27:14 179318:27:14 179345:27:14 179372:27:14 179399:27:14 179426:27:14 179453:27:14 179480:27:14 179507:27:14 179534:27:14 179561:27:14 179588:27:14 179615:27:14 179642:27:14 179669:27:14 179696:27:14 179723:27:14 179750:27:14 
+	Initial seek latency (1000 sectors): 19ms
+	Average read latency: 1.28ms/sector (raw speed: 10.4x)
+	Read latency standard deviation: 4.31ms/sector
+	[39:43.51]: 
+178776:1:23 178777:27:14 178804:27:14 178831:27:14 178858:27:14 178885:27:14 178912:27:14 178939:27:14 178966:27:14 178993:27:14 179020:27:14 179047:27:14 179074:27:14 179101:27:14 179128:27:14 179155:27:14 179182:27:14 179209:27:14 179236:27:14 179263:27:14 179290:27:14 179317:27:14 179344:27:14 179371:27:14 179398:27:14 179425:27:14 179452:27:14 179479:27:14 179506:27:14 179533:27:14 179560:27:14 179587:27:14 179614:27:14 179641:27:14 179668:27:14 179695:27:14 179722:27:14 179749:27:14 
+	Initial seek latency (1000 sectors): 23ms
+	Average read latency: 0.52ms/sector (raw speed: 25.7x)
+	Read latency standard deviation: -nanms/sector
+	[39:43.50]: 
+178775:1:231 178776:27:13 178803:27:13 178830:27:13 178857:27:13 178884:27:13 178911:27:13 178938:27:13 178965:27:13 178992:27:13 179019:27:13 179046:27:13 179073:27:13 179100:27:13 179127:27:13 179154:27:12 179181:27:12 179208:27:12 179235:27:13 179262:27:12 179289:27:12 179316:27:12 179343:27:12 179370:27:12 179397:27:12 179424:27:12 179451:27:12 179478:27:12 179505:27:12 179532:27:12 179559:27:12 179586:27:12 179613:27:12 179640:27:12 179667:27:12 179694:27:12 179721:27:12 179748:27:12 
+	Initial seek latency (1000 sectors): 231ms
+	Average read latency: 0.46ms/sector (raw speed: 29.0x)
+	Read latency standard deviation: 0.02ms/sector
+	[39:43.49]: 
+178774:1:18 178775:27:11 178802:27:11 178829:27:11 178856:27:11 178883:27:11 178910:27:11 178937:27:11 178964:27:11 178991:27:11 179018:27:11 179045:27:11 179072:27:11 179099:27:11 179126:27:11 179153:27:11 179180:27:11 179207:27:11 179234:27:11 179261:27:11 179288:27:11 179315:27:11 179342:27:11 179369:27:11 179396:27:11 179423:27:11 179450:27:11 179477:27:11 179504:27:11 179531:27:11 179558:27:11 179585:27:11 179612:27:11 179639:27:11 179666:27:11 179693:27:11 179720:27:11 179747:27:11 
+	Initial seek latency (1000 sectors): 18ms
+	Average read latency: 0.41ms/sector (raw speed: 32.7x)
+	Read latency standard deviation: 0.00ms/sector
+	[39:43.48]: 
+178773:1:18 178774:27:11 178801:27:11 178828:27:11 178855:27:11 178882:27:11 178909:27:941 178936:27:9 178963:27:9 178990:27:9 179017:27:9 179044:27:9 179071:27:9 179098:27:9 179125:27:9 179152:27:9 179179:27:9 179206:27:9 179233:27:9 179260:27:9 179287:27:9 179314:27:10 179341:27:9 179368:27:10 179395:27:9 179422:27:9 179449:27:9 179476:27:9 179503:27:10 179530:27:9 179557:27:9 179584:27:9 179611:27:10 179638:27:9 179665:27:9 179692:27:9 179719:27:10 179746:27:9 
+	Initial seek latency (1000 sectors): 18ms
+	Average read latency: 1.28ms/sector (raw speed: 10.4x)
+	Read latency standard deviation: 5.60ms/sector
+	[39:43.47]: 
+178772:1:21 178773:27:10 178800:27:10 178827:27:10 178854:27:10 178881:27:9 178908:27:10 178935:27:10 178962:27:10 178989:27:10 179016:27:10 179043:27:10 179070:27:10 179097:27:10 179124:27:10 179151:27:10 179178:27:10 179205:27:10 179232:27:9 179259:27:10 179286:27:10 179313:27:10 179340:27:10 179367:27:10 179394:27:10 179421:27:10 179448:27:10 179475:27:10 179502:27:10 179529:27:10 179556:27:10 179583:27:10 179610:27:10 179637:27:10 179664:27:10 179691:27:10 179718:27:10 179745:27:9 
+	Initial seek latency (1000 sectors): 21ms
+	Average read latency: 0.37ms/sector (raw speed: 36.3x)
+	Read latency standard deviation: 0.01ms/sector
+	[39:43.46]: 
+178771:1:21 178772:27:10 178799:27:9 178826:27:10 178853:27:10 178880:27:10 178907:27:10 178934:27:10 178961:27:10 178988:27:10 179015:27:10 179042:27:10 179069:27:10 179096:27:10 179123:27:10 179150:27:10 179177:27:10 179204:27:10 179231:27:10 179258:27:10 179285:27:10 179312:27:10 179339:27:10 179366:27:10 179393:27:10 179420:27:10 179447:27:10 179474:27:10 179501:27:10 179528:27:10 179555:27:10 179582:27:10 179609:27:10 179636:27:10 179663:27:10 179690:27:10 179717:27:10 179744:27:10 
+	Initial seek latency (1000 sectors): 21ms
+	Average read latency: 0.37ms/sector (raw speed: 36.1x)
+	Read latency standard deviation: 0.01ms/sector
+	[39:43.45]: 
+178770:1:15 178771:27:10 178798:27:10 178825:27:10 178852:27:10 178879:27:10 178906:27:10 178933:27:10 178960:27:10 178987:27:10 179014:27:10 179041:27:10 179068:27:10 179095:27:10 179122:27:10 179149:27:10 179176:27:10 179203:27:10 179230:27:10 179257:27:10 179284:27:10 179311:27:10 179338:27:10 179365:27:10 179392:27:10 179419:27:10 179446:27:10 179473:27:10 179500:27:10 179527:27:10 179554:27:10 179581:27:10 179608:27:10 179635:27:10 179662:27:10 179689:27:10 179716:27:10 179743:27:10 
+	Initial seek latency (1000 sectors): 15ms
+	Average read latency: 0.37ms/sector (raw speed: 36.0x)
+	Read latency standard deviation: -nanms/sector
+	[39:43.44]: 
+178769:1:21 178770:27:10 178797:27:10 178824:27:10 178851:27:10 178878:27:10 178905:27:10 178932:27:10 178959:27:10 178986:27:10 179013:27:10 179040:27:10 179067:27:10 179094:27:10 179121:27:10 179148:27:10 179175:27:10 179202:27:10 179229:27:10 179256:27:10 179283:27:10 179310:27:10 179337:27:10 179364:27:10 179391:27:10 179418:27:10 179445:27:10 179472:27:10 179499:27:10 179526:27:10 179553:27:10 179580:27:10 179607:27:10 179634:27:10 179661:27:10 179688:27:10 179715:27:10 179742:27:10 
+	Initial seek latency (1000 sectors): 21ms
+	Average read latency: 0.37ms/sector (raw speed: 36.0x)
+	Read latency standard deviation: -nanms/sector
+	[39:43.43]: 
+178768:1:15 178769:27:10 178796:27:10 178823:27:10 178850:27:10 178877:27:10 178904:27:10 178931:27:10 178958:27:10 178985:27:10 179012:27:10 179039:27:10 179066:27:10 179093:27:10 179120:27:10 179147:27:10 179174:27:10 179201:27:10 179228:27:10 179255:27:10 179282:27:10 179309:27:10 179336:27:10 179363:27:10 179390:27:10 179417:27:10 179444:27:10 179471:27:10 179498:27:10 179525:27:10 179552:27:10 179579:27:10 179606:27:10 179633:27:10 179660:27:10 179687:27:10 179714:27:10 179741:27:10 
+	Initial seek latency (1000 sectors): 15ms
+	Average read latency: 0.37ms/sector (raw speed: 36.0x)
+	Read latency standard deviation: -nanms/sector
+	[39:43.42]: 
+178767:1:21 178768:27:10 178795:27:10 178822:27:10 178849:27:10 178876:27:10 178903:27:10 178930:27:10 178957:27:10 178984:27:10 179011:27:10 179038:27:10 179065:27:10 179092:27:10 179119:27:10 179146:27:10 179173:27:10 179200:27:10 179227:27:10 179254:27:10 179281:27:10 179308:27:10 179335:27:10 179362:27:10 179389:27:10 179416:27:10 179443:27:10 179470:27:10 179497:27:10 179524:27:10 179551:27:10 179578:27:10 179605:27:10 179632:27:10 179659:27:10 179686:27:10 179713:27:10 179740:27:10 
+	Initial seek latency (1000 sectors): 21ms
+	Average read latency: 0.37ms/sector (raw speed: 36.0x)
+	Read latency standard deviation: -nanms/sector
+	[39:43.41]: 
+178766:1:15 178767:27:10 178794:27:10 178821:27:10 178848:27:10 178875:27:10 178902:27:10 178929:27:10 178956:27:10 178983:27:10 179010:27:10 179037:27:10 179064:27:10 179091:27:10 179118:27:10 179145:27:10 179172:27:10 179199:27:10 179226:27:10 179253:27:10 179280:27:10 179307:27:10 179334:27:10 179361:27:10 179388:27:10 179415:27:10 179442:27:10 179469:27:10 179496:27:10 179523:27:10 179550:27:10 179577:27:10 179604:27:10 179631:27:10 179658:27:10 179685:27:10 179712:27:10 179739:27:10 
+	Initial seek latency (1000 sectors): 15ms
+	Average read latency: 0.37ms/sector (raw speed: 36.0x)
+	Read latency standard deviation: -nanms/sector
+	[39:43.40]: 
+178765:1:21 178766:27:10 178793:27:10 178820:27:10 178847:27:10 178874:27:10 178901:27:10 178928:27:10 178955:27:10 178982:27:10 179009:27:10 179036:27:10 179063:27:10 179090:27:10 179117:27:10 179144:27:10 179171:27:10 179198:27:10 179225:27:10 179252:27:10 179279:27:10 179306:27:10 179333:27:10 179360:27:10 179387:27:10 179414:27:10 179441:27:10 179468:27:10 179495:27:10 179522:27:10 179549:27:10 179576:27:10 179603:27:10 179630:27:10 179657:27:10 179684:27:10 179711:27:10 179738:27:10 
+	Initial seek latency (1000 sectors): 21ms
+	Average read latency: 0.37ms/sector (raw speed: 36.0x)
+	Read latency standard deviation: -nanms/sector
+	[39:43.39]: 
+178764:1:21 178765:27:10 178792:27:10 178819:27:10 178846:27:10 178873:27:10 178900:27:10 178927:27:10 178954:27:10 178981:27:10 179008:27:10 179035:27:10 179062:27:10 179089:27:10 179116:27:10 179143:27:10 179170:27:10 179197:27:10 179224:27:10 179251:27:10 179278:27:10 179305:27:10 179332:27:10 179359:27:10 179386:27:10 179413:27:10 179440:27:10 179467:27:10 179494:27:10 179521:27:10 179548:27:10 179575:27:10 179602:27:10 179629:27:10 179656:27:10 179683:27:10 179710:27:10 179737:27:10 
+	Initial seek latency (1000 sectors): 21ms
+	Average read latency: 0.37ms/sector (raw speed: 36.0x)
+	Read latency standard deviation: -nanms/sector
+	[39:43.38]: 
+178763:1:15 178764:27:10 178791:27:10 178818:27:10 178845:27:10 178872:27:10 178899:27:10 178926:27:10 178953:27:10 178980:27:10 179007:27:10 179034:27:10 179061:27:10 179088:27:10 179115:27:10 179142:27:10 179169:27:10 179196:27:10 179223:27:10 179250:27:10 179277:27:10 179304:27:10 179331:27:10 179358:27:10 179385:27:10 179412:27:10 179439:27:10 179466:27:10 179493:27:10 179520:27:10 179547:27:10 179574:27:10 179601:27:10 179628:27:10 179655:27:10 179682:27:10 179709:27:10 179736:27:10 
+	Initial seek latency (1000 sectors): 15ms
+	Average read latency: 0.37ms/sector (raw speed: 36.0x)
+	Read latency standard deviation: -nanms/sector
+	[39:43.37]: 
+178762:1:21 178763:27:10 178790:27:10 178817:27:10 178844:27:10 178871:27:10 178898:27:10 178925:27:10 178952:27:10 178979:27:10 179006:27:10 179033:27:10 179060:27:10 179087:27:10 179114:27:10 179141:27:10 179168:27:10 179195:27:10 179222:27:10 179249:27:10 179276:27:10 179303:27:10 179330:27:10 179357:27:10 179384:27:10 179411:27:10 179438:27:10 179465:27:10 179492:27:10 179519:27:10 179546:27:10 179573:27:10 179600:27:10 179627:27:10 179654:27:10 179681:27:10 179708:27:10 179735:27:10 
+	Initial seek latency (1000 sectors): 21ms
+	Average read latency: 0.37ms/sector (raw speed: 36.0x)
+	Read latency standard deviation: -nanms/sector
+	[39:43.36]: 
+178761:1:15 178762:27:10 178789:27:10 178816:27:10 178843:27:10 178870:27:10 178897:27:10 178924:27:10 178951:27:10 178978:27:10 179005:27:10 179032:27:10 179059:27:10 179086:27:10 179113:27:10 179140:27:10 179167:27:10 179194:27:10 179221:27:10 179248:27:10 179275:27:10 179302:27:10 179329:27:10 179356:27:10 179383:27:10 179410:27:10 179437:27:10 179464:27:10 179491:27:10 179518:27:10 179545:27:10 179572:27:10 179599:27:10 179626:27:10 179653:27:10 179680:27:10 179707:27:10 179734:27:10 
+	Initial seek latency (1000 sectors): 15ms
+	Average read latency: 0.37ms/sector (raw speed: 36.0x)
+	Read latency standard deviation: -nanms/sector
+	[30:00.00]: 
+135000:1:21 135001:27:11 135028:27:11 135055:27:11 135082:27:11 135109:27:11 135136:27:11 135163:27:11 135190:27:11 135217:27:11 135244:27:11 135271:27:11 135298:27:11 135325:27:11 135352:27:11 135379:27:11 135406:27:11 135433:27:11 135460:27:11 135487:27:11 135514:27:11 135541:27:11 135568:27:11 135595:27:11 135622:27:11 135649:27:11 135676:27:11 135703:27:11 135730:27:11 135757:27:11 135784:27:11 135811:27:11 135838:27:11 135865:27:11 135892:27:11 135919:27:11 135946:27:11 135973:27:11 
+	Initial seek latency (1000 sectors): 21ms
+	Average read latency: 0.41ms/sector (raw speed: 32.7x)
+	Read latency standard deviation: 0.00ms/sector
+	[20:00.00]: 
+90000:1:22 90001:27:12 90028:27:12 90055:27:12 90082:27:12 90109:27:12 90136:27:12 90163:27:12 90190:27:12 90217:27:12 90244:27:12 90271:27:12 90298:27:12 90325:27:12 90352:27:12 90379:27:12 90406:27:12 90433:27:12 90460:27:12 90487:27:12 90514:27:12 90541:27:12 90568:27:12 90595:27:12 90622:27:12 90649:27:12 90676:27:12 90703:27:12 90730:27:12 90757:27:12 90784:27:12 90811:27:12 90838:27:12 90865:27:12 90892:27:12 90919:27:12 90946:27:12 90973:27:12 
+	Initial seek latency (1000 sectors): 22ms
+	Average read latency: 0.44ms/sector (raw speed: 30.0x)
+	Read latency standard deviation: 0.00ms/sector
+	[10:00.00]: 
+45000:1:30 45001:27:14 45028:27:14 45055:27:14 45082:27:14 45109:27:14 45136:27:14 45163:27:14 45190:27:14 45217:27:14 45244:27:14 45271:27:14 45298:27:14 45325:27:14 45352:27:14 45379:27:14 45406:27:14 45433:27:14 45460:27:14 45487:27:14 45514:27:14 45541:27:14 45568:27:14 45595:27:14 45622:27:14 45649:27:14 45676:27:14 45703:27:14 45730:27:14 45757:27:14 45784:27:14 45811:27:14 45838:27:14 45865:27:14 45892:27:14 45919:27:14 45946:27:14 45973:27:14 
+	Initial seek latency (1000 sectors): 30ms
+	Average read latency: 0.52ms/sector (raw speed: 25.7x)
+	Read latency standard deviation: -nanms/sector
+	[00:00.00]: 
+0:1:33 1:27:19 28:27:19 55:27:19 82:27:19 109:27:19 136:27:19 163:27:19 190:27:19 217:27:19 244:27:19 271:27:19 298:27:19 325:27:19 352:27:19 379:27:19 406:27:19 433:27:19 460:27:19 487:27:19 514:27:19 541:27:19 568:27:19 595:27:19 622:27:19 649:27:19 676:27:19 703:27:19 730:27:19 757:27:19 784:27:19 811:27:19 838:27:19 865:27:19 892:27:19 919:27:19 946:27:19 973:27:19 
+	Initial seek latency (1000 sectors): 33ms
+	Average read latency: 0.70ms/sector (raw speed: 18.9x)
+	Read latency standard deviation: -nanms/sector
+
+Analyzing cache behavior...
+	Fast search for approximate cache size... 0 sectors            
+		>>> fast_read=10:1:35 seek_read=10:1:18
+		>>> fast_read=10:1:18 seek_read=10:1:18
+		>>> fast_read=10:1:18 seek_read=10:1:18
+		>>> fast_read=10:1:18 seek_read=10:1:18
+		>>> fast_read=10:1:18 seek_read=10:1:18
+		>>> fast_read=10:1:18 seek_read=10:1:18
+		>>> fast_read=10:1:18 seek_read=10:1:18
+		>>> fast_read=10:1:18 seek_read=10:1:18
+		>>> fast_read=10:1:18 seek_read=10:1:364
+		>>> fast_read=10:1:21 seek_read=10:1:21
+		>>> fast_read=10:1:22 seek_read=10:1:22
+		>>> fast_read=10:1:22 seek_read=10:1:22
+		>>> fast_read=10:1:22 seek_read=10:1:22
+		>>> fast_read=10:1:22 seek_read=10:1:22
+		>>> fast_read=10:1:22 seek_read=10:1:22
+	Slow verify for approximate cache size... 0 sectors
+	Attempting to reduce read speed to 1x... drive said OK
+		>>> slow_read=10:1:21 seek_read=10:1:22
+		>>> slow_read=10:1:22 seek_read=10:1:22
+		>>> slow_read=10:1:22 seek_read=10:1:22
+		>>> slow_read=10:1:22 seek_read=10:1:22
+		>>> slow_read=10:1:22 seek_read=10:1:22
+		>>> slow_read=10:1:22 seek_read=10:1:22
+		>>> slow_read=10:1:22 seek_read=10:1:22
+		>>> slow_read=10:1:22 seek_read=10:1:22
+		>>> slow_read=10:1:22 seek_read=10:1:22
+		>>> slow_read=10:1:22 seek_read=10:1:22
+	Drive does not cache nonlinear access                            
+
+Drive tests OK with Paranoia.
+
diff --git a/morituri/test/cdparanoia/PX-L890SA.cdparanoia-A.stderr b/morituri/test/cdparanoia/PX-L890SA.cdparanoia-A.stderr
new file mode 100644
index 0000000..cc2defc
--- /dev/null
+++ b/morituri/test/cdparanoia/PX-L890SA.cdparanoia-A.stderr
@@ -0,0 +1,41 @@
+cdparanoia III release 10.2 (September 11, 2008)
+
+Using cdda library version: 10.2
+Using paranoia library version: 10.2
+Checking /dev/cdrom for cdrom...
+	Testing /dev/cdrom for SCSI/MMC interface
+		SG_IO device: /dev/sr0
+
+CDROM model sensed sensed: PLEXTOR DVDR   PX-L890SA 1.05 
+ 
+
+Checking for SCSI emulation...
+	Drive is ATAPI (using SG_IO host adaptor emulation)
+
+Checking for MMC style command set...
+	Drive is MMC style
+	DMA scatter/gather table entries: 1
+	table entry size: 524288 bytes
+	maximum theoretical transfer: 222 sectors
+	Setting default read size to 27 sectors (63504 bytes).
+
+Verifying CDDA command set...
+	Expected command set reads OK.
+
+Attempting to set cdrom to full speed... 
+	drive returned OK.
+
+=================== Checking drive cache/timing behavior ===================
+
+Seek/read timing:
+
	[39:43.53]:   19ms seek, 0.70ms/sec read [18.9x] spinning up...  
	[39:43.52]:   19ms seek, 1.28ms/sec read [10.4x] spinning up...  
	[39:43.51]:   23ms seek, 0.52ms/sec read [25.7x] spinning up...  
	[39:43.50]:  231ms seek, 0.46ms/sec read [29.0x] spinning up...  
	[39:43.49]:   18ms seek, 0.41ms/sec read [32.7x] spinning up...  
	[39:43.48]:   18ms seek, 1.28ms/sec read [10.4x] spinning up...  
	[39:43.47]:   21ms seek, 0.37ms/sec read [36.3x] spinning up...  
	[39:43.46]:   21ms seek, 0.37ms/sec read [36.1x] spinning up...  
	[39:43.45]:   15ms seek, 0.37ms/sec read [36.0x] spinning up...  
	[39:43.44]:   21ms seek, 0.37ms/sec read [36.0x] spinning up...  
	[39:43.43]:   15ms seek, 0.37ms/sec read [36.0x] spinning up...  
	[39:43.42]:   21ms seek, 0.37ms/sec read [36.0x] spinning up...  
	[39:43.41]:   15ms seek, 0.37ms/sec read [36.0x] spinning up...  
	[39:43.40]:   21ms seek, 0.37ms/sec read [36.0x] spinning up...  
	[39:43.39]:   21ms seek, 0.37ms/sec read [36.0x] spinning up...  
	[39:43.38]:   15ms seek, 0.37ms/sec read [36.0x] spinning up...  
	[39:43.37]:   21ms seek, 0.37ms/sec read [36.0x] spinning up...  
	[39:43.36]:   15ms seek, 0.37ms/sec read [36.0x]                 
+	[30:00.00]:   21ms seek, 0.41ms/sec read [32.7x]                 
+	[20:00.00]:   22ms seek, 0.44ms/sec read [30.0x]                 
+	[10:00.00]:   30ms seek, 0.52ms/sec read [25.7x]                 
+	[00:00.00]:   33ms seek, 0.70ms/sec read [18.9x]                 
+
+Analyzing cache behavior...
+
	Fast search for approximate cache size... 0 sectors            
	Slow verify for approximate cache size... 0 sectors..........
	Drive does not cache nonlinear access                            
+
+Drive tests OK with Paranoia.
+
diff --git a/morituri/test/test_common_cache.py b/morituri/test/test_common_cache.py
new file mode 100644
index 0000000..55460a5
--- /dev/null
+++ b/morituri/test/test_common_cache.py
@@ -0,0 +1,23 @@
+# -*- Mode: Python; test-case-name: morituri.test.test_common_cache -*-
+# vi:si:et:sw=4:sts=4:ts=4
+
+import os
+
+from morituri.common import cache
+
+from morituri.test import common as tcommon
+
+
+class ResultCacheTestCase(tcommon.TestCase):
+
+    def setUp(self):
+        self.cache = cache.ResultCache(
+            os.path.join(os.path.dirname(__file__), 'cache', 'result'))
+
+    def testGetResult(self):
+        result = self.cache.getRipResult('fe105a11')
+        self.assertEquals(result.object.title, "The Writing's on the Wall")
+
+    def testGetIds(self):
+        ids = self.cache.getIds()
+        self.assertEquals(ids, ['fe105a11'])
diff --git a/morituri/test/test_common_common.py b/morituri/test/test_common_common.py
new file mode 100644
index 0000000..073a93a
--- /dev/null
+++ b/morituri/test/test_common_common.py
@@ -0,0 +1,59 @@
+# -*- Mode: Python; test-case-name: morituri.test.test_common_common -*-
+# vi:si:et:sw=4:sts=4:ts=4
+
+import os
+import tempfile
+
+from morituri.common import common
+
+from morituri.test import common as tcommon
+
+
+class ShrinkTestCase(tcommon.TestCase):
+
+    def testSufjan(self):
+        path = (u'morituri/Sufjan Stevens - Illinois/02. Sufjan Stevens - '
+                 'The Black Hawk War, or, How to Demolish an Entire '
+                 'Civilization and Still Feel Good About Yourself in the '
+                 'Morning, or, We Apologize for the Inconvenience but '
+                 'You\'re Going to Have to Leave Now, or, "I Have Fought '
+                 'the Big Knives and Will Continue to Fight Them Until They '
+                 'Are Off Our Lands!".flac')
+
+        shorter = common.shrinkPath(path)
+        self.failUnless(os.path.splitext(path)[0].startswith(
+            os.path.splitext(shorter)[0]))
+        self.failIfEquals(path, shorter)
+
+
+class FramesTestCase(tcommon.TestCase):
+
+    def testFrames(self):
+        self.assertEquals(common.framesToHMSF(123456), '00:27:26.06')
+
+
+class FormatTimeTestCase(tcommon.TestCase):
+
+    def testFormatTime(self):
+        self.assertEquals(common.formatTime(7202), '02:00:02.000')
+
+
+class GetRelativePathTestCase(tcommon.TestCase):
+
+    def testRelativeOutputDirectory(self):
+        directory = '.Placebo - Black Market Music (2000)'
+        cue = './' + directory + '/Placebo - Black Market Music (2000)'
+        track = './' + directory + '/01. Placebo - Taste in Men.flac'
+
+        self.assertEquals(common.getRelativePath(track, cue),
+            '01. Placebo - Taste in Men.flac')
+
+
+class GetRealPathTestCase(tcommon.TestCase):
+
+    def testRealWithBackslash(self):
+        fd, path = tempfile.mkstemp(suffix=u'back\\slash.flac')
+        refPath = os.path.join(os.path.dirname(path), 'fake.cue')
+
+        os.close(fd)
+        os.unlink(path)
diff --git a/morituri/test/test_common_config.py b/morituri/test/test_common_config.py
new file mode 100644
index 0000000..2930c1f
--- /dev/null
+++ b/morituri/test/test_common_config.py
@@ -0,0 +1,52 @@
+# -*- Mode: Python; test-case-name: morituri.test.test_common_config -*-
+# vi:si:et:sw=4:sts=4:ts=4
+
+import os
+import tempfile
+
+from morituri.common import config
+
+from morituri.test import common as tcommon
+
+
+class OffsetTestCase(tcommon.TestCase):
+
+    def setUp(self):
+        fd, self._path = tempfile.mkstemp(suffix=u'.morituri.test.config')
+        os.close(fd)
+        self._config = config.Config(self._path)
+
+    def tearDown(self):
+        os.unlink(self._path)
+
+    def testAddReadOffset(self):
+        self.assertRaises(KeyError,
+            self._config.getReadOffset, 'PLEXTOR ', 'DVDR   PX-L890SA', '1.05')
+        self._config.setReadOffset('PLEXTOR ', 'DVDR   PX-L890SA', '1.05', 6)
+
+        # getting it from memory should work
+        offset = self._config.getReadOffset('PLEXTOR ', 'DVDR   PX-L890SA',
+            '1.05')
+        self.assertEquals(offset, 6)
+
+        # and so should getting it after reading it again
+        self._config.open()
+        offset = self._config.getReadOffset('PLEXTOR ', 'DVDR   PX-L890SA',
+            '1.05')
+        self.assertEquals(offset, 6)
+
+    def testAddReadOffsetSpaced(self):
+        self.assertRaises(KeyError,
+            self._config.getReadOffset, 'Slimtype', 'eSAU208   2     ', 'ML03')
+        self._config.setReadOffset('Slimtype', 'eSAU208   2     ', 'ML03', 6)
+
+        # getting it from memory should work
+        offset = self._config.getReadOffset(
+            'Slimtype', 'eSAU208   2     ', 'ML03')
+        self.assertEquals(offset, 6)
+
+        # and so should getting it after reading it again
+        self._config.open()
+        offset = self._config.getReadOffset(
+            'Slimtype', 'eSAU208   2     ', 'ML03')
+        self.assertEquals(offset, 6)
diff --git a/morituri/test/test_common_encode.py b/morituri/test/test_common_encode.py
new file mode 100644
index 0000000..4a74759
--- /dev/null
+++ b/morituri/test/test_common_encode.py
@@ -0,0 +1,146 @@
+# -*- Mode: Python; test-case-name: morituri.test.test_common_encode -*-
+# vi:si:et:sw=4:sts=4:ts=4
+
+import os
+import tempfile
+
+import gobject
+gobject.threads_init()
+
+import gst
+
+from morituri.common import encode
+
+from morituri.extern.task import task, gstreamer
+
+from morituri.test import common
+
+
+class PathTestCase(common.TestCase):
+
+    def _testSuffix(self, suffix):
+        # because of https://bugzilla.gnome.org/show_bug.cgi?id=688625
+        # we first create the file with a 'normal' filename, then rename
+        self.runner = task.SyncRunner(verbose=False)
+        fd, path = tempfile.mkstemp()
+
+        cmd = "gst-launch " \
+            "audiotestsrc num-buffers=100 samplesperbuffer=1024 ! " \
+            "audioconvert ! audio/x-raw-int,width=16,depth=16,channels =2 ! " \
+            "wavenc ! " \
+            "filesink location=\"%s\" > /dev/null 2>&1" % (
+            gstreamer.quoteParse(path).encode('utf-8'), )
+        self.debug('Running cmd %r' % cmd)
+        os.system(cmd)
+        self.failUnless(os.path.exists(path))
+        os.close(fd)
+
+        fd, newpath = tempfile.mkstemp(suffix=suffix)
+        os.rename(path, newpath)
+
+        encodetask = encode.EncodeTask(newpath, newpath + '.out',
+            encode.WavProfile())
+        self.runner.run(encodetask, verbose=False)
+        os.close(fd)
+        os.unlink(newpath)
+        os.unlink(newpath + '.out')
+
+
+class UnicodePathTestCase(PathTestCase, common.UnicodeTestMixin):
+
+    def testUnicodePath(self):
+        # this test makes sure we can checksum a unicode path
+        self._testSuffix(u'.morituri.test_encode.B\xeate Noire')
+
+
+class NormalPathTestCase(PathTestCase):
+
+    def testSingleQuote(self):
+        self._testSuffix(u".morituri.test_encode.Guns 'N Roses")
+
+    def testDoubleQuote(self):
+        self._testSuffix(u'.morituri.test_encode.12" edit')
+
+
+class TagReadTestCase(common.TestCase):
+
+    def testRead(self):
+        path = os.path.join(os.path.dirname(__file__), u'track.flac')
+        self.runner = task.SyncRunner(verbose=False)
+        t = encode.TagReadTask(path)
+        self.runner.run(t)
+        self.failUnless(t.taglist)
+        self.assertEquals(t.taglist['audio-codec'], 'FLAC')
+        self.assertEquals(t.taglist['description'], 'audiotest wave')
+
+
+class TagWriteTestCase(common.TestCase):
+
+    def testWrite(self):
+        fd, inpath = tempfile.mkstemp(suffix=u'.morituri.tagwrite.flac')
+
+        # wave is pink-noise because a pure sine is encoded too efficiently
+        # by flacenc and triggers not enough frames in parsing
+        # FIXME: file a bug for this in GStreamer
+        os.system('gst-launch '
+            'audiotestsrc '
+                'wave=pink-noise num-buffers=10 samplesperbuffer=588 ! '
+            'audioconvert ! '
+            'audio/x-raw-int,channels=2,width=16,height=16,rate=44100 ! '
+            'flacenc ! filesink location=%s > /dev/null 2>&1' % inpath)
+        os.close(fd)
+
+        fd, outpath = tempfile.mkstemp(suffix=u'.morituri.tagwrite.flac')
+        self.runner = task.SyncRunner(verbose=False)
+        taglist = gst.TagList()
+        taglist[gst.TAG_ARTIST] = 'Artist'
+        taglist[gst.TAG_TITLE] = 'Title'
+
+        t = encode.TagWriteTask(inpath, outpath, taglist)
+        self.runner.run(t)
+
+        t = encode.TagReadTask(outpath)
+        self.runner.run(t)
+        self.failUnless(t.taglist)
+        self.assertEquals(t.taglist['audio-codec'], 'FLAC')
+        self.assertEquals(t.taglist['description'], 'audiotest wave')
+        self.assertEquals(t.taglist[gst.TAG_ARTIST], 'Artist')
+        self.assertEquals(t.taglist[gst.TAG_TITLE], 'Title')
+
+        os.unlink(inpath)
+        os.unlink(outpath)
+
+
+class SafeRetagTestCase(common.TestCase):
+
+    def setUp(self):
+        self._fd, self._path = tempfile.mkstemp(suffix=u'.morituri.retag.flac')
+
+        os.system('gst-launch '
+            'audiotestsrc '
+                'num-buffers=40 samplesperbuffer=588 wave=pink-noise ! '
+            'audioconvert ! '
+            'audio/x-raw-int,channels=2,width=16,height=16,rate=44100 ! '
+            'flacenc ! filesink location=%s > /dev/null 2>&1' % self._path)
+        os.close(self._fd)
+        self.runner = task.SyncRunner(verbose=False)
+
+    def tearDown(self):
+        os.unlink(self._path)
+
+    def testNoChange(self):
+        taglist = gst.TagList()
+        taglist[gst.TAG_DESCRIPTION] = 'audiotest wave'
+        taglist[gst.TAG_AUDIO_CODEC] = 'FLAC'
+
+        t = encode.SafeRetagTask(self._path, taglist)
+        self.runner.run(t)
+
+    def testChange(self):
+        taglist = gst.TagList()
+        taglist[gst.TAG_DESCRIPTION] = 'audiotest retagged'
+        taglist[gst.TAG_AUDIO_CODEC] = 'FLAC'
+        taglist[gst.TAG_ARTIST] = 'Artist'
+
+        t = encode.SafeRetagTask(self._path, taglist)
+        self.runner.run(t)
diff --git a/morituri/test/test_common_gstreamer.py b/morituri/test/test_common_gstreamer.py
new file mode 100644
index 0000000..94de5c4
--- /dev/null
+++ b/morituri/test/test_common_gstreamer.py
@@ -0,0 +1,21 @@
+# -*- Mode: Python -*-
+# vi:si:et:sw=4:sts=4:ts=4
+
+from morituri.common import gstreamer
+
+from morituri.test import common
+
+
+class VersionTestCase(common.TestCase):
+
+    def testGStreamer(self):
+        version = gstreamer.gstreamerVersion()
+        self.failUnless(version.startswith('0.'))
+
+    def testGSTPython(self):
+        version = gstreamer.gstPythonVersion()
+        self.failUnless(version.startswith('0.'))
+
+    def testFlacEnc(self):
+        version = gstreamer.elementFactoryVersion('flacenc')
+        self.failUnless(version.startswith('0.'))
diff --git a/morituri/test/test_common_musicbrainzngs.py b/morituri/test/test_common_musicbrainzngs.py
index 53627e3..ffe9c1b 100644
--- a/morituri/test/test_common_musicbrainzngs.py
+++ b/morituri/test/test_common_musicbrainzngs.py
@@ -19,6 +19,6 @@ class MetadataTestCase(unittest.TestCase):
         handle.close()
         discid = "wbjbST2jUHRZaB1inCyxxsL7Eqc-"
 
-        metadata = musicbrainzngs._getMetadata(response['release'], discid)
+        metadata = musicbrainzngs._getMetadata({}, response['release'], discid)
 
         self.failIf(metadata.release)
diff --git a/morituri/test/test_common_program.py b/morituri/test/test_common_program.py
index 7b49fb1..102540b 100644
--- a/morituri/test/test_common_program.py
+++ b/morituri/test/test_common_program.py
@@ -8,7 +8,7 @@ import unittest
 
 from morituri.result import result
 from morituri.common import program, accurip, musicbrainzngs
-from morituri.rip import cd
+from morituri.rip import common as rcommon
 
 
 class TrackImageVerifyTestCase(unittest.TestCase):
@@ -88,9 +88,10 @@ class PathTestCase(unittest.TestCase):
     def testStandardTemplateEmpty(self):
         prog = program.Program()
 
-        path = prog.getPath(u'/tmp', cd.DEFAULT_DISC_TEMPLATE, 'mbdiscid', 0)
+        path = prog.getPath(u'/tmp', rcommon.DEFAULT_DISC_TEMPLATE,
+            'mbdiscid', 0)
         self.assertEquals(path,
-            u'/tmp/Unknown Artist - mbdiscid/Unknown Artist - mbdiscid')
+            u'/tmp/unknown/Unknown Artist - mbdiscid/Unknown Artist - mbdiscid')
 
     def testStandardTemplateFilled(self):
         prog = program.Program()
@@ -99,9 +100,10 @@ class PathTestCase(unittest.TestCase):
         md.title = 'Grace'
         prog.metadata = md
 
-        path = prog.getPath(u'/tmp', cd.DEFAULT_DISC_TEMPLATE, 'mbdiscid', 0)
+        path = prog.getPath(u'/tmp', rcommon.DEFAULT_DISC_TEMPLATE,
+            'mbdiscid', 0)
         self.assertEquals(path,
-            u'/tmp/Jeff Buckley - Grace/Jeff Buckley - Grace')
+            u'/tmp/unknown/Jeff Buckley - Grace/Jeff Buckley - Grace')
 
     def testIssue66TemplateFilled(self):
         prog = program.Program()
diff --git a/morituri/test/test_image_table.py b/morituri/test/test_image_table.py
index de01407..5986b6e 100644
--- a/morituri/test/test_image_table.py
+++ b/morituri/test/test_image_table.py
@@ -9,7 +9,9 @@ from morituri.test import common as tcommon
 def h(i):
     return "0x%08x" % i
 
+
 class TrackTestCase(tcommon.TestCase):
+
     def testRepr(self):
         track = table.Track(1)
         self.assertEquals(repr(track), "<Track 01>")
@@ -17,6 +19,7 @@ class TrackTestCase(tcommon.TestCase):
         track.index(1, 100)
         self.failUnless(repr(track.indexes[1]).startswith('<Index 01 '))
 
+
 class LadyhawkeTestCase(tcommon.TestCase):
     # Ladyhawke - Ladyhawke - 0602517818866
     # contains 12 audio tracks and one data track
@@ -50,7 +53,9 @@ class LadyhawkeTestCase(tcommon.TestCase):
 
     def testMusicBrainz(self):
         # output from mb-submit-disc:
-        # http://mm.musicbrainz.org/bare/cdlookup.html?toc=1+12+195856+150+15687+31841+51016+66616+81352+99559+116070+133243+149997+161710+177832&tracks=12&id=KnpGsLhvH.lPrNc1PBL21lb9Bg4-
+        # http://mm.musicbrainz.org/bare/cdlookup.html?toc=1+12+195856+150+
+        # 15687+31841+51016+66616+81352+99559+116070+133243+149997+161710+
+        # 177832&tracks=12&id=KnpGsLhvH.lPrNc1PBL21lb9Bg4-
         # however, not (yet) in musicbrainz database
 
         self.assertEquals(self.table.getMusicBrainzDiscId(),
@@ -60,7 +65,8 @@ class LadyhawkeTestCase(tcommon.TestCase):
         self.assertEquals(self.table.getAccurateRipIds(), (
             "0013bd5a", "00b8d489"))
         self.assertEquals(self.table.getAccurateRipURL(),
-        "http://www.accuraterip.com/accuraterip/a/5/d/dBAR-012-0013bd5a-00b8d489-c60af50d.bin")
+        "http://www.accuraterip.com/accuraterip/a/5/d/"
+        "dBAR-012-0013bd5a-00b8d489-c60af50d.bin")
 
     def testDuration(self):
         self.assertEquals(self.table.duration(), 2761413)
@@ -91,6 +97,7 @@ class MusicBrainzTestCase(tcommon.TestCase):
         self.assertEquals(self.table.getMusicBrainzDiscId(),
             '49HHV7Eb8UKF3aQiNmu1GR8vKTY-')
 
+
 class PregapTestCase(tcommon.TestCase):
 
     def setUp(self):
@@ -108,5 +115,3 @@ class PregapTestCase(tcommon.TestCase):
     def testPreGap(self):
         self.assertEquals(self.table.tracks[0].getPregap(), 0)
         self.assertEquals(self.table.tracks[1].getPregap(), 200)
-
-
diff --git a/morituri/test/test_image_toc.py b/morituri/test/test_image_toc.py
index 8616a5b..02b51df 100644
--- a/morituri/test/test_image_toc.py
+++ b/morituri/test/test_image_toc.py
@@ -93,7 +93,6 @@ class CureTestCase(common.TestCase):
             'http://www.accuraterip.com/accuraterip/'
             '3/c/4/dBAR-013-0019d4c3-00fe8924-b90c650d.bin')
 
-
     def testGetRealPath(self):
         self.assertRaises(KeyError, self.toc.getRealPath, u'track01.wav')
         (fd, path) = tempfile.mkstemp(suffix=u'.morituri.test.wav')
@@ -211,7 +210,10 @@ class LadyhawkeTestCase(common.TestCase):
         self.assertEquals(self.toc.table.getMusicBrainzDiscId(),
             "KnpGsLhvH.lPrNc1PBL21lb9Bg4-")
         self.assertEquals(self.toc.table.getMusicBrainzSubmitURL(),
-            "http://mm.musicbrainz.org/bare/cdlookup.html?toc=1+12+195856+150+15687+31841+51016+66616+81352+99559+116070+133243+149997+161710+177832&tracks=12&id=KnpGsLhvH.lPrNc1PBL21lb9Bg4-")
+            "http://mm.musicbrainz.org/bare/cdlookup.html?toc="
+            "1+12+195856+150+15687+31841+51016+66616+81352+99559+"
+            "116070+133243+149997+161710+177832&"
+            "tracks=12&id=KnpGsLhvH.lPrNc1PBL21lb9Bg4-")
 
     # FIXME: I don't trust this toc, but I can't find the CD anymore
 
@@ -227,6 +229,7 @@ class LadyhawkeTestCase(common.TestCase):
         lines = data.split("\n")
         self.assertEquals(lines[0], "REM DISCID C60AF50D")
 
+
 class CapitalMergeTestCase(common.TestCase):
 
     def setUp(self):
@@ -249,15 +252,19 @@ class CapitalMergeTestCase(common.TestCase):
         self.table.absolutize()
         self.assertEquals(self.table.getCDDBDiscId(), 'b910140c')
         # output from cd-discid:
-        # b910140c 12 24320 44855 64090 77885 88095 104020 118245 129255 141765 164487 181780 209250 4440
+        # b910140c 12 24320 44855 64090 77885 88095 104020 118245 129255 141765
+        # 164487 181780 209250 4440
 
     def testMusicBrainz(self):
-        # URL to submit: http://mm.musicbrainz.org/bare/cdlookup.html?toc=1+11+197850+24320+44855+64090+77885+88095+104020+118245+129255+141765+164487+181780&tracks=11&id=MAj3xXf6QMy7G.BIFOyHyq4MySE-
+        # URL to submit: http://mm.musicbrainz.org/bare/cdlookup.html?toc=1+11+
+        # 197850+24320+44855+64090+77885+88095+104020+118245+129255+141765+
+        # 164487+181780&tracks=11&id=MAj3xXf6QMy7G.BIFOyHyq4MySE-
         self.assertEquals(self.table.getMusicBrainzDiscId(),
             "MAj3xXf6QMy7G.BIFOyHyq4MySE-")
 
     def testDuration(self):
-        # this matches track 11 end sector - track 1 start sector on musicbrainz
+        # this matches track 11 end sector - track 1 start sector on
+        # musicbrainz
         # compare to 3rd and 4th value in URL above
         self.assertEquals(self.table.getFrameLength(), 173530)
         self.assertEquals(self.table.duration(), 2313733)
diff --git a/morituri/test/test_program_cdparanoia.py b/morituri/test/test_program_cdparanoia.py
index fe77b94..58e0bbc 100644
--- a/morituri/test/test_program_cdparanoia.py
+++ b/morituri/test/test_program_cdparanoia.py
@@ -2,12 +2,15 @@
 # vi:si:et:sw=4:sts=4:ts=4
 
 import os
-import unittest
+
+from morituri.extern.task import task
 
 from morituri.program import cdparanoia
 
+from morituri.test import common
+
 
-class ParseTestCase(unittest.TestCase):
+class ParseTestCase(common.TestCase):
 
     def setUp(self):
         # report from Afghan Whigs - Sweet Son Of A Bitch
@@ -25,7 +28,7 @@ class ParseTestCase(unittest.TestCase):
         self.assertEquals(q, '99.7 %')
 
 
-class ErrorTestCase(unittest.TestCase):
+class ErrorTestCase(common.TestCase):
 
     def setUp(self):
         # report from a rip with offset -1164 causing scsi errors
@@ -41,3 +44,31 @@ class ErrorTestCase(unittest.TestCase):
 
         q = '%.01f %%' % (self._parser.getTrackQuality() * 100.0, )
         self.assertEquals(q, '79.6 %')
+
+
+class VersionTestCase(common.TestCase):
+
+    def testGetVersion(self):
+        self.failUnless(cdparanoia.getCdParanoiaVersion())
+
+
+class AnalyzeFileTask(cdparanoia.AnalyzeTask):
+
+    def __init__(self, path):
+        self.command = ['cat', path]
+
+    def readbytesout(self, bytes):
+        self.readbyteserr(bytes)
+
+
+class CacheTestCase(common.TestCase):
+
+    def testDefeatsCache(self):
+        self.runner = task.SyncRunner(verbose=False)
+
+        path = os.path.join(os.path.dirname(__file__),
+            'cdparanoia', 'PX-L890SA.cdparanoia-A.stderr')
+        t = AnalyzeFileTask(path)
+        self.runner.run(t)
+        self.failUnless(t.defeatsCache)
+

-- 
morituri packaging



More information about the pkg-multimedia-commits mailing list