[SCM] Debian packaging of libdbix-class-perl branch, master, updated. debian/0.08195-1-6-g30f69dd
Olly Betts
olly at survex.com
Wed Dec 7 01:25:29 UTC 2011
The following commit has been merged in the master branch:
commit 9f8c21002d21b0a4344111d2706e23c0dbf030a6
Author: Olly Betts <olly at survex.com>
Date: Wed Dec 7 13:58:28 2011 +1300
Imported Upstream version 0.08196
diff --git a/Changes b/Changes
index a25af19..1590ce6 100644
--- a/Changes
+++ b/Changes
@@ -1,5 +1,32 @@
Revision history for DBIx::Class
+0.08196 2011-11-29 05:35 (UTC)
+ * Fixes
+ - Fix tests for DBD::SQLite >= 1.34.
+ - Fix test failures with DBICTEST_SQLITE_USE_FILE set
+ - Fix the find() condition heuristics being invoked even when the
+ call defaults to 'primary' (i.e. when invoked with bare values)
+ - Throw much clearer error on incorrect inflation spec
+ - Fix incorrect storage behavior when first call on a fresh schema
+ is with_deferred_fk_checks
+ - Fix incorrect dependency on Test::Simple/Builder (RT#72282)
+ - Fix uninitialized warning in ::Storage::Sybase::ASE
+ - Improve/cache DBD-specific datatype bind checks (also solves a
+ nasty memleak with version.pm on multiple ->VERSION invocations)
+ - The internal carp module now correctly skips CAG frames when
+ reporting a callsite
+ - Fix test failures on perl < 5.8.7 and new Package::Stash::XS
+ - Fix TxnScopeGuard not behaving correctly when $@ is set at the
+ time of $guard instantiation
+ - Fix the join/prefetch resolver when dealing with ''/undef/()
+ relation specifications
+
+ * Misc
+ - No longer depend on Variable::Magic now that a pure-perl
+ namespace::clean is available
+ - Drop Oracle's Math::BigInt req down to 1.80 - no fixes concerning
+ us were made since
+
0.08195 2011-07-27 16:20 (UTC)
* Fixes
- Fix horrible oversight in the Oracle sqlmaker when dealing with
diff --git a/MANIFEST b/MANIFEST
index 1c2e333..b16614a 100644
--- a/MANIFEST
+++ b/MANIFEST
@@ -98,6 +98,7 @@ lib/DBIx/Class/Relationship/ManyToMany.pm
lib/DBIx/Class/Relationship/ProxyMethods.pm
lib/DBIx/Class/ResultClass/HashRefInflator.pm
lib/DBIx/Class/ResultSet.pm
+lib/DBIx/Class/ResultSet/Pager.pm
lib/DBIx/Class/ResultSetColumn.pm
lib/DBIx/Class/ResultSetManager.pm
lib/DBIx/Class/ResultSetProxy.pm
@@ -462,6 +463,7 @@ t/lib/DBICTest/SyntaxErrorComponent3.pm
t/lib/DBICTest/Taint/Classes/Auto.pm
t/lib/DBICTest/Taint/Classes/Manual.pm
t/lib/DBICTest/Taint/Namespaces/Result/Test.pm
+t/lib/DBICTest/Util/OverrideRequire.pm
t/lib/DBICVersion_v1.pm
t/lib/DBICVersion_v2.pm
t/lib/DBICVersion_v3.pm
diff --git a/META.yml b/META.yml
index f284968..1390296 100644
--- a/META.yml
+++ b/META.yml
@@ -6,9 +6,9 @@ build_requires:
DBD::SQLite: 1.29
File::Temp: 0.22
Package::Stash: 0.28
- Test::Builder: 0.33
+ Test::Builder: 0.94
Test::Exception: 0.31
- Test::More: 0.92
+ Test::More: 0.94
Test::Warn: 0.21
configure_requires:
ExtUtils::MakeMaker: 6.42
@@ -31,6 +31,7 @@ no_index:
package:
- DBIx::Class::Storage::DBIHacks
- DBIx::Class::Carp
+ - DBIx::Class::ResultSet::Pager
requires:
Class::Accessor::Grouped: 0.10002
Class::C3::Componentised: 1.0009
@@ -50,7 +51,6 @@ requires:
Scope::Guard: 0.03
Sub::Name: 0.04
Try::Tiny: 0.04
- Variable::Magic: 0.44
namespace::clean: 0.20
perl: 5.8.1
resources:
@@ -60,4 +60,4 @@ resources:
homepage: http://www.dbix-class.org/
license: http://dev.perl.org/licenses/
repository: git://git.shadowcat.co.uk/dbsrgits/DBIx-Class.git
-version: 0.08195
+version: 0.08196
diff --git a/Makefile.PL b/Makefile.PL
index 7fd81e6..b41e281 100644
--- a/Makefile.PL
+++ b/Makefile.PL
@@ -32,10 +32,11 @@ my $build_requires = {
my $test_requires = {
'File::Temp' => '0.22',
- 'Test::Builder' => '0.33',
'Test::Exception' => '0.31',
- 'Test::More' => '0.92',
'Test::Warn' => '0.21',
+ 'Test::More' => '0.94',
+ # not sure if this is necessary at all, ask schwern some day
+ 'Test::Builder' => '0.94',
# this is already a dep of n::c, but just in case - used by t/55namespaces_cleaned.t
# remove and do a manual glob-collection if n::c is no longer a dep
@@ -53,20 +54,21 @@ my $runtime_requires = {
'Hash::Merge' => '0.12',
'MRO::Compat' => '0.09',
'Module::Find' => '0.06',
+ 'namespace::clean' => '0.20',
'Path::Class' => '0.18',
'Scope::Guard' => '0.03',
'SQL::Abstract' => '1.72',
'Try::Tiny' => '0.04',
- 'Data::Compare' => '1.22',
# XS (or XS-dependent) libs
'DBI' => '1.57',
- 'namespace::clean' => '0.20',
'Sub::Name' => '0.04',
- 'Variable::Magic' => '0.44',
# dual-life corelibs needing a specific bugfixed version
'File::Path' => '2.07',
+
+ # FIXME - temporary, needs throwing out for something more efficient
+ 'Data::Compare' => '1.22',
};
@@ -279,6 +281,7 @@ no_index directory => $_ for (qw|
no_index package => $_ for (qw/
DBIx::Class::Storage::DBIHacks
DBIx::Class::Carp
+ DBIx::Class::ResultSet::Pager
/);
WriteAll();
diff --git a/README b/README
index 3360506..f41ed42 100644
--- a/README
+++ b/README
@@ -289,6 +289,8 @@ CONTRIBUTORS
milki: Jonathan Chu <milki at rescomp.berkeley.edu>
+ mstratman: Mark A. Stratman <stratman at gmail.com>
+
ned: Neil de Carteret
nigel: Nigel Metheringham <nigelm at cpan.org>
@@ -315,6 +317,8 @@ CONTRIBUTORS
peter: Peter Collingbourne <peter at pcc.me.uk>
+ Peter Valdemar Mørch <peter at morch.com>
+
phaylon: Robert Sedlacek <phaylon at dunkelheit.at>
plu: Johannes Plunien <plu at cpan.org>
@@ -388,7 +392,7 @@ CONTRIBUTORS
zamolxes: Bogdan Lucaciu <bogdan at wiz.ro>
COPYRIGHT
- Copyright (c) 2005 - 2010 the DBIx::Class "AUTHOR" and "CONTRIBUTORS" as
+ Copyright (c) 2005 - 2011 the DBIx::Class "AUTHOR" and "CONTRIBUTORS" as
listed above.
LICENSE
diff --git a/lib/DBIx/Class.pm b/lib/DBIx/Class.pm
index 772c026..412cb26 100644
--- a/lib/DBIx/Class.pm
+++ b/lib/DBIx/Class.pm
@@ -11,12 +11,13 @@ our $VERSION;
# $VERSION declaration must stay up here, ahead of any other package
# declarations, as to not confuse various modules attempting to determine
# this ones version, whether that be s.c.o. or Module::Metadata, etc
-$VERSION = '0.08195';
+$VERSION = '0.08196';
$VERSION = eval $VERSION if $VERSION =~ /_/; # numify for warning-free dev releases
BEGIN {
- package DBIx::Class::_ENV_;
+ package # hide from pause
+ DBIx::Class::_ENV_;
if ($] < 5.009_005) {
require MRO::Compat;
@@ -70,7 +71,7 @@ use base qw/DBIx::Class::Componentised DBIx::Class::AccessorGroup/;
use DBIx::Class::StartupCheck;
__PACKAGE__->mk_group_accessors(inherited => '_skip_namespace_frames');
-__PACKAGE__->_skip_namespace_frames('^DBIx::Class|^SQL::Abstract|^Try::Tiny');
+__PACKAGE__->_skip_namespace_frames('^DBIx::Class|^SQL::Abstract|^Try::Tiny|^Class::Accessor::Grouped$');
sub mk_classdata {
shift->mk_classaccessor(@_);
@@ -405,6 +406,8 @@ michaelr: Michael Reddick <michael.reddick at gmail.com>
milki: Jonathan Chu <milki at rescomp.berkeley.edu>
+mstratman: Mark A. Stratman <stratman at gmail.com>
+
ned: Neil de Carteret
nigel: Nigel Metheringham <nigelm at cpan.org>
@@ -421,7 +424,7 @@ Numa: Dan Sully <daniel at cpan.org>
ovid: Curtis "Ovid" Poe <ovid at cpan.org>
-oyse: Øystein Torget <oystein.torget at dnv.com>
+oyse: E<Oslash>ystein Torget <oystein.torget at dnv.com>
paulm: Paul Makepeace
@@ -431,6 +434,8 @@ perigrin: Chris Prather <chris at prather.org>
peter: Peter Collingbourne <peter at pcc.me.uk>
+Peter Valdemar ME<oslash>rch <peter at morch.com>
+
phaylon: Robert Sedlacek <phaylon at dunkelheit.at>
plu: Johannes Plunien <plu at cpan.org>
@@ -505,7 +510,7 @@ zamolxes: Bogdan Lucaciu <bogdan at wiz.ro>
=head1 COPYRIGHT
-Copyright (c) 2005 - 2010 the DBIx::Class L</AUTHOR> and L</CONTRIBUTORS>
+Copyright (c) 2005 - 2011 the DBIx::Class L</AUTHOR> and L</CONTRIBUTORS>
as listed above.
=head1 LICENSE
diff --git a/lib/DBIx/Class/Carp.pm b/lib/DBIx/Class/Carp.pm
index 5f40094..6bec374 100644
--- a/lib/DBIx/Class/Carp.pm
+++ b/lib/DBIx/Class/Carp.pm
@@ -5,9 +5,9 @@ use warnings;
# This is here instead of DBIx::Class because of load-order issues
BEGIN {
- ## FIXME FIXME FIXME - something is tripping up V::M on 5.8.1, leading
- # to segfaults. When n::c/B::H::EndOfScope is rewritten in terms of tie()
- # see if this starts working
+ # something is tripping up V::M on 5.8.1, leading to segfaults.
+ # A similar test in n::c itself is disabled on 5.8.1 for the same
+ # reason. There isn't much motivation to try to find why it happens
*DBIx::Class::_ENV_::BROKEN_NAMESPACE_CLEAN = ($] < 5.008005)
? sub () { 1 }
: sub () { 0 }
@@ -30,6 +30,14 @@ sub __find_caller {
my @f;
while (@f = caller($fr_num++)) {
last unless $f[0] =~ $skip_pattern;
+
+ if (
+ $f[0]->can('_skip_namespace_frames')
+ and
+ my $extra_skip = $f[0]->_skip_namespace_frames
+ ) {
+ $skip_pattern = qr/$skip_pattern|$extra_skip/;
+ }
}
my ($ln, $calling) = @f # if empty - nothing matched - full stack
@@ -133,7 +141,8 @@ In addition to the classic interface:
this module also supports a class-data based way to specify the exclusion
regex. A message is only carped from a callsite that matches neither the
closed over string, nor the value of L</_skip_namespace_frames> as declared
-on the B<first> callframe origin.
+on any callframe already skipped due to the same mechanism. This is to ensure
+that intermediate callsites can declare their own additional skip-namespaces.
=head1 CLASS ATTRIBUTES
diff --git a/lib/DBIx/Class/Optional/Dependencies.pm b/lib/DBIx/Class/Optional/Dependencies.pm
index 5e63d7b..173ae32 100644
--- a/lib/DBIx/Class/Optional/Dependencies.pm
+++ b/lib/DBIx/Class/Optional/Dependencies.pm
@@ -38,7 +38,7 @@ my $datetime_basic = {
};
my $id_shortener = {
- 'Math::BigInt' => '1.89',
+ 'Math::BigInt' => '1.80',
'Math::Base36' => '0.07',
};
diff --git a/lib/DBIx/Class/Optional/Dependencies.pod b/lib/DBIx/Class/Optional/Dependencies.pod
index a86c8f8..12be715 100644
--- a/lib/DBIx/Class/Optional/Dependencies.pod
+++ b/lib/DBIx/Class/Optional/Dependencies.pod
@@ -18,7 +18,7 @@ Somewhere in your build-file (e.g. L<Module::Install>'s Makefile.PL):
...
- configure_requires 'DBIx::Class' => '0.08195';
+ configure_requires 'DBIx::Class' => '0.08196';
require DBIx::Class::Optional::Dependencies;
@@ -279,7 +279,7 @@ Modules required to connect to Oracle
=item * Math::Base36 >= 0.07
-=item * Math::BigInt >= 1.89
+=item * Math::BigInt >= 1.80
=back
diff --git a/lib/DBIx/Class/Relationship.pm b/lib/DBIx/Class/Relationship.pm
index d4141d1..f2c33f5 100644
--- a/lib/DBIx/Class/Relationship.pm
+++ b/lib/DBIx/Class/Relationship.pm
@@ -159,7 +159,7 @@ OR
=item cond
A hashref, arrayref or coderef specifying a custom join expression. For
-more info see L<DBIx::Class::Relationship/condition>.
+more info see L<DBIx::Class::Relationship::Base/condition>.
=back
@@ -271,7 +271,7 @@ OR
=item cond
A hashref, arrayref or coderef specifying a custom join expression. For
-more info see L<DBIx::Class::Relationship/condition>.
+more info see L<DBIx::Class::Relationship::Base/condition>.
=back
@@ -392,7 +392,7 @@ OR
=item cond
A hashref, arrayref or coderef specifying a custom join expression. For
-more info see L<DBIx::Class::Relationship/condition>.
+more info see L<DBIx::Class::Relationship::Base/condition>.
=back
@@ -487,7 +487,7 @@ OR
=item cond
A hashref, arrayref or coderef specifying a custom join expression. For
-more info see L<DBIx::Class::Relationship/condition>.
+more info see L<DBIx::Class::Relationship::Base/condition>.
=back
diff --git a/lib/DBIx/Class/ResultSet.pm b/lib/DBIx/Class/ResultSet.pm
index aaff394..d883551 100644
--- a/lib/DBIx/Class/ResultSet.pm
+++ b/lib/DBIx/Class/ResultSet.pm
@@ -702,22 +702,33 @@ sub find {
my $rsrc = $self->result_source;
+ my $constraint_name;
+ if (exists $attrs->{key}) {
+ $constraint_name = defined $attrs->{key}
+ ? $attrs->{key}
+ : $self->throw_exception("An undefined 'key' resultset attribute makes no sense")
+ ;
+ }
+
# Parse out the condition from input
my $call_cond;
+
if (ref $_[0] eq 'HASH') {
$call_cond = { %{$_[0]} };
}
else {
- my $constraint = exists $attrs->{key} ? $attrs->{key} : 'primary';
- my @c_cols = $rsrc->unique_constraint_columns($constraint);
+ # if only values are supplied we need to default to 'primary'
+ $constraint_name = 'primary' unless defined $constraint_name;
+
+ my @c_cols = $rsrc->unique_constraint_columns($constraint_name);
$self->throw_exception(
- "No constraint columns, maybe a malformed '$constraint' constraint?"
+ "No constraint columns, maybe a malformed '$constraint_name' constraint?"
) unless @c_cols;
$self->throw_exception (
'find() expects either a column/value hashref, or a list of values '
- . "corresponding to the columns of the specified unique constraint '$constraint'"
+ . "corresponding to the columns of the specified unique constraint '$constraint_name'"
) unless @c_cols == @_;
$call_cond = {};
@@ -748,11 +759,11 @@ sub find {
my $alias = exists $attrs->{alias} ? $attrs->{alias} : $self->{attrs}{alias};
my $final_cond;
- if (exists $attrs->{key}) {
+ if (defined $constraint_name) {
$final_cond = $self->_qualify_cond_columns (
$self->_build_unique_cond (
- $attrs->{key},
+ $constraint_name,
$call_cond,
),
@@ -2115,107 +2126,6 @@ C<total_entries> on the L<Data::Page> object.
=cut
-# make a wizard good for both a scalar and a hashref
-my $mk_lazy_count_wizard = sub {
- require Variable::Magic;
-
- my $stash = { total_rs => shift };
- my $slot = shift; # only used by the hashref magic
-
- my $magic = Variable::Magic::wizard (
- data => sub { $stash },
-
- (!$slot)
- ? (
- # the scalar magic
- get => sub {
- # set value lazily, and dispell for good
- ${$_[0]} = $_[1]{total_rs}->count;
- Variable::Magic::dispell (${$_[0]}, $_[1]{magic_selfref});
- return 1;
- },
- set => sub {
- # an explicit set implies dispell as well
- # the unless() is to work around "fun and giggles" below
- Variable::Magic::dispell (${$_[0]}, $_[1]{magic_selfref})
- unless (caller(2))[3] eq 'DBIx::Class::ResultSet::pager';
- return 1;
- },
- )
- : (
- # the uvar magic
- fetch => sub {
- if ($_[2] eq $slot and !$_[1]{inactive}) {
- my $cnt = $_[1]{total_rs}->count;
- $_[0]->{$slot} = $cnt;
-
- # attempting to dispell in a fetch handle (works in store), seems
- # to invariable segfault on 5.10, 5.12, 5.13 :(
- # so use an inactivator instead
- #Variable::Magic::dispell (%{$_[0]}, $_[1]{magic_selfref});
- $_[1]{inactive}++;
- }
- return 1;
- },
- store => sub {
- if (! $_[1]{inactive} and $_[2] eq $slot) {
- #Variable::Magic::dispell (%{$_[0]}, $_[1]{magic_selfref});
- $_[1]{inactive}++
- unless (caller(2))[3] eq 'DBIx::Class::ResultSet::pager';
- }
- return 1;
- },
- ),
- );
-
- $stash->{magic_selfref} = $magic;
- weaken ($stash->{magic_selfref}); # this fails on 5.8.1
-
- return $magic;
-};
-
-# the tie class for 5.8.1
-{
- package # hide from pause
- DBIx::Class::__DBIC_LAZY_RS_COUNT__;
- use base qw/Tie::Hash/;
-
- sub FIRSTKEY { my $dummy = scalar keys %{$_[0]{data}}; each %{$_[0]{data}} }
- sub NEXTKEY { each %{$_[0]{data}} }
- sub EXISTS { exists $_[0]{data}{$_[1]} }
- sub DELETE { delete $_[0]{data}{$_[1]} }
- sub CLEAR { %{$_[0]{data}} = () }
- sub SCALAR { scalar %{$_[0]{data}} }
-
- sub TIEHASH {
- $_[1]{data} = {%{$_[1]{selfref}}};
- %{$_[1]{selfref}} = ();
- Scalar::Util::weaken ($_[1]{selfref});
- return bless ($_[1], $_[0]);
- };
-
- sub FETCH {
- if ($_[1] eq $_[0]{slot}) {
- my $cnt = $_[0]{data}{$_[1]} = $_[0]{total_rs}->count;
- untie %{$_[0]{selfref}};
- %{$_[0]{selfref}} = %{$_[0]{data}};
- return $cnt;
- }
- else {
- $_[0]{data}{$_[1]};
- }
- }
-
- sub STORE {
- $_[0]{data}{$_[1]} = $_[2];
- if ($_[1] eq $_[0]{slot}) {
- untie %{$_[0]{selfref}};
- %{$_[0]{selfref}} = %{$_[0]{data}};
- }
- $_[2];
- }
-}
-
sub pager {
my ($self) = @_;
@@ -2234,70 +2144,15 @@ sub pager {
# with a subselect) to get the real total count
my $count_attrs = { %$attrs };
delete $count_attrs->{$_} for qw/rows offset page pager/;
- my $total_rs = (ref $self)->new($self->result_source, $count_attrs);
-
-### the following may seem awkward and dirty, but it's a thought-experiment
-### necessary for future development of DBIx::DS. Do *NOT* change this code
-### before talking to ribasushi/mst
+ my $total_rs = (ref $self)->new($self->result_source, $count_attrs);
- require Data::Page;
- my $pager = Data::Page->new(
- 0, #start with an empty set
+ require DBIx::Class::ResultSet::Pager;
+ return $self->{pager} = DBIx::Class::ResultSet::Pager->new(
+ sub { $total_rs->count }, #lazy-get the total
$attrs->{rows},
$self->{attrs}{page},
);
-
- my $data_slot = 'total_entries';
-
- # Since we are interested in a cached value (once it's set - it's set), every
- # technique will detach from the magic-host once the time comes to fire the
- # ->count (or in the segfaulting case of >= 5.10 it will deactivate itself)
-
- if ($] < 5.008003) {
- # 5.8.1 throws 'Modification of a read-only value attempted' when one tries
- # to weakref the magic container :(
- # tested on 5.8.1
- tie (%$pager, 'DBIx::Class::__DBIC_LAZY_RS_COUNT__',
- { slot => $data_slot, total_rs => $total_rs, selfref => $pager }
- );
- }
- elsif ($] < 5.010) {
- # We can use magic on the hash value slot. It's interesting that the magic is
- # attached to the hash-slot, and does *not* stop working once I do the dummy
- # assignments after the cast()
- # tested on 5.8.3 and 5.8.9
- my $magic = $mk_lazy_count_wizard->($total_rs);
- Variable::Magic::cast ( $pager->{$data_slot}, $magic );
-
- # this is for fun and giggles
- $pager->{$data_slot} = -1;
- $pager->{$data_slot} = 0;
-
- # this does not work for scalars, but works with
- # uvar magic below
- #my %vals = %$pager;
- #%$pager = ();
- #%{$pager} = %vals;
- }
- else {
- # And the uvar magic
- # works on 5.10.1, 5.12.1 and 5.13.4 in its current form,
- # however see the wizard maker for more notes
- my $magic = $mk_lazy_count_wizard->($total_rs, $data_slot);
- Variable::Magic::cast ( %$pager, $magic );
-
- # still works
- $pager->{$data_slot} = -1;
- $pager->{$data_slot} = 0;
-
- # this now works
- my %vals = %$pager;
- %$pager = ();
- %{$pager} = %vals;
- }
-
- return $self->{pager} = $pager;
}
=head2 page
@@ -3585,6 +3440,7 @@ sub _merge_joinpref_attr {
$position++;
}
my ($import_key) = ( ref $import_element eq 'HASH' ) ? keys %{$import_element} : ($import_element);
+ $import_key = '' if not defined $import_key;
if ($best_candidate->{score} == 0 || exists $seen_keys->{$import_key}) {
push( @{$orig}, $import_element );
@@ -3699,6 +3555,11 @@ sub STORABLE_freeze {
# A cursor in progress can't be serialized (and would make little sense anyway)
delete $to_serialize->{cursor};
+ # nor is it sensical to store a not-yet-fired-count pager
+ if ($to_serialize->{pager} and ref $to_serialize->{pager}{total_entries} eq 'CODE') {
+ delete $to_serialize->{pager};
+ }
+
Storable::nfreeze($to_serialize);
}
diff --git a/lib/DBIx/Class/ResultSet/Pager.pm b/lib/DBIx/Class/ResultSet/Pager.pm
new file mode 100644
index 0000000..e8510c3
--- /dev/null
+++ b/lib/DBIx/Class/ResultSet/Pager.pm
@@ -0,0 +1,21 @@
+package # hide from pause
+ DBIx::Class::ResultSet::Pager;
+
+use warnings;
+use strict;
+
+use base 'Data::Page';
+use mro 'c3';
+
+# simple support for lazy totals
+sub _total_entries_accessor {
+ if (@_ == 1 and ref $_[0]->{total_entries} eq 'CODE') {
+ return $_[0]->{total_entries} = $_[0]->{total_entries}->();
+ }
+
+ return shift->next::method(@_);
+}
+
+sub _skip_namespace_frames { qr/^Data::Page/ }
+
+1;
diff --git a/lib/DBIx/Class/ResultSource.pm b/lib/DBIx/Class/ResultSource.pm
index 9489f49..c523745 100644
--- a/lib/DBIx/Class/ResultSource.pm
+++ b/lib/DBIx/Class/ResultSource.pm
@@ -1041,6 +1041,20 @@ sub resultset {
);
}
+=head2 name
+
+=over 4
+
+=item Arguments: None
+
+=item Result value: $name
+
+=back
+
+Returns the name of the result source, which will typically be the table
+name. This may be a scalar reference if the result source has a non-standard
+name.
+
=head2 source_name
=over 4
@@ -1447,7 +1461,7 @@ sub _resolve_join {
$jpath = [@$jpath]; # copy
- if (not defined $join) {
+ if (not defined $join or not length $join) {
return ();
}
elsif (ref $join eq 'ARRAY') {
@@ -1714,7 +1728,7 @@ sub _resolve_prefetch {
my ($self, $pre, $alias, $alias_map, $order, $collapse, $pref_path) = @_;
$pref_path ||= [];
- if (not defined $pre) {
+ if (not defined $pre or not length $pre) {
return ();
}
elsif( ref $pre eq 'ARRAY' ) {
diff --git a/lib/DBIx/Class/Row.pm b/lib/DBIx/Class/Row.pm
index 4eaa431..60c854b 100644
--- a/lib/DBIx/Class/Row.pm
+++ b/lib/DBIx/Class/Row.pm
@@ -1139,23 +1139,38 @@ sub inflate_result {
foreach my $pre (keys %{$prefetch||{}}) {
- my $pre_source = $source->related_source($pre)
- or $class->throw_exception("Can't prefetch non-existent relationship ${pre}");
-
- my $accessor = $source->relationship_info($pre)->{attrs}{accessor}
- or $class->throw_exception("No accessor for prefetched $pre");
-
- my @pre_vals;
+ my (@pre_vals, $is_multi);
if (ref $prefetch->{$pre}[0] eq 'ARRAY') {
+ $is_multi = 1;
@pre_vals = @{$prefetch->{$pre}};
}
- elsif ($accessor eq 'multi') {
- $class->throw_exception("Implicit prefetch (via select/columns) not supported with accessor 'multi'");
- }
else {
@pre_vals = $prefetch->{$pre};
}
+ my $pre_source = try {
+ $source->related_source($pre)
+ }
+ catch {
+ $class->throw_exception(sprintf
+
+ "Can't inflate manual prefetch into non-existent relationship '%s' from '%s', "
+ . "check the inflation specification (columns/as) ending in '%s.%s'.",
+
+ $pre,
+ $source->source_name,
+ $pre,
+ (keys %{$pre_vals[0][0]})[0] || 'something.something...',
+ );
+ };
+
+ my $accessor = $source->relationship_info($pre)->{attrs}{accessor}
+ or $class->throw_exception("No accessor type declared for prefetched $pre");
+
+ if (! $is_multi and $accessor eq 'multi') {
+ $class->throw_exception("Manual prefetch (via select/columns) not supported with accessor 'multi'");
+ }
+
my @pre_objects;
for my $me_pref (@pre_vals) {
diff --git a/lib/DBIx/Class/SQLMaker.pm b/lib/DBIx/Class/SQLMaker.pm
index c4bd627..fd42594 100644
--- a/lib/DBIx/Class/SQLMaker.pm
+++ b/lib/DBIx/Class/SQLMaker.pm
@@ -441,15 +441,18 @@ sub _gen_from_blocks {
sub _from_chunk_to_sql {
my ($self, $fromspec) = @_;
- return join (' ', $self->_SWITCH_refkind($fromspec, {
- SCALARREF => sub {
+ return join (' ', do {
+ if (! ref $fromspec) {
+ $self->_quote($fromspec);
+ }
+ elsif (ref $fromspec eq 'SCALAR') {
$$fromspec;
- },
- ARRAYREFREF => sub {
+ }
+ elsif (ref $fromspec eq 'REF' and ref $$fromspec eq 'ARRAY') {
push @{$self->{from_bind}}, @{$$fromspec}[1..$#$$fromspec];
$$fromspec->[0];
- },
- HASHREF => sub {
+ }
+ elsif (ref $fromspec eq 'HASH') {
my ($as, $table, $toomuch) = ( map
{ $_ => $fromspec->{$_} }
( grep { $_ !~ /^\-/ } keys %$fromspec )
@@ -459,11 +462,11 @@ sub _from_chunk_to_sql {
if defined $toomuch;
($self->_from_chunk_to_sql($table), $self->_quote($as) );
- },
- SCALAR => sub {
- $self->_quote($fromspec);
- },
- }));
+ }
+ else {
+ $self->throw_exception('Unsupported from refkind: ' . ref $fromspec );
+ }
+ });
}
sub _join_condition {
diff --git a/lib/DBIx/Class/SQLMaker/Oracle.pm b/lib/DBIx/Class/SQLMaker/Oracle.pm
index d088192..d144113 100644
--- a/lib/DBIx/Class/SQLMaker/Oracle.pm
+++ b/lib/DBIx/Class/SQLMaker/Oracle.pm
@@ -75,13 +75,13 @@ sub _order_siblings_by {
my ( @sql, @bind );
for my $c ( $self->_order_by_chunks($arg) ) {
- $self->_SWITCH_refkind(
- $c,
- {
- SCALAR => sub { push @sql, $c },
- ARRAYREF => sub { push @sql, shift @$c; push @bind, @$c },
- }
- );
+ if (ref $c) {
+ push @sql, shift @$c;
+ push @bind, @$c;
+ }
+ else {
+ push @sql, $c;
+ }
}
my $sql =
@@ -210,20 +210,29 @@ sub _insert_returning {
my $f = $options->{returning};
- my ($f_list, @f_names) = $self->_SWITCH_refkind($f, {
- ARRAYREF => sub {
- (join ', ', map { $self->_quote($_) } @$f),
- @$f
- },
- SCALAR => sub {
- $self->_quote($f),
- $f,
- },
- SCALARREF => sub {
- $$f,
- $$f,
- },
- });
+ my ($f_list, @f_names) = do {
+ if (! ref $f) {
+ (
+ $self->_quote($f),
+ $f,
+ )
+ }
+ elsif (ref $f eq 'ARRAY') {
+ (
+ (join ', ', map { $self->_quote($_) } @$f),
+ @$f,
+ )
+ }
+ elsif (ref $f eq 'SCALAR') {
+ (
+ $$f,
+ $$f,
+ )
+ }
+ else {
+ $self->throw_exception("Unsupported INSERT RETURNING option $f");
+ }
+ };
my $rc_ref = $options->{returning_container}
or $self->throw_exception('No returning container supplied for IR values');
diff --git a/lib/DBIx/Class/SQLMaker/OracleJoins.pm b/lib/DBIx/Class/SQLMaker/OracleJoins.pm
index d2bc160..b95c56e 100644
--- a/lib/DBIx/Class/SQLMaker/OracleJoins.pm
+++ b/lib/DBIx/Class/SQLMaker/OracleJoins.pm
@@ -1,5 +1,4 @@
-package # Hide from PAUSE
- DBIx::Class::SQLMaker::OracleJoins;
+package DBIx::Class::SQLMaker::OracleJoins;
use warnings;
use strict;
diff --git a/lib/DBIx/Class/Schema.pm b/lib/DBIx/Class/Schema.pm
index 1959f40..5b86fec 100644
--- a/lib/DBIx/Class/Schema.pm
+++ b/lib/DBIx/Class/Schema.pm
@@ -196,17 +196,16 @@ sub _map_namespaces {
# returns the result_source_instance for the passed class/object,
# or dies with an informative message (used by load_namespaces)
sub _ns_get_rsrc_instance {
- my $class = shift;
- my $rs = ref ($_[0]) || $_[0];
-
- if ($rs->can ('result_source_instance') ) {
- return $rs->result_source_instance;
- }
- else {
- $class->throw_exception (
- "Attempt to load_namespaces() class $rs failed - are you sure this is a real Result Class?"
+ my $me = shift;
+ my $rs_class = ref ($_[0]) || $_[0];
+
+ return try {
+ $rs_class->result_source_instance
+ } catch {
+ $me->throw_exception (
+ "Attempt to load_namespaces() class $rs_class failed - are you sure this is a real Result Class?: $_"
);
- }
+ };
}
sub load_namespaces {
@@ -400,7 +399,6 @@ sub load_classes {
foreach my $to (@to_register) {
$class->register_class(@$to);
- # if $class->can('result_source_instance');
}
}
@@ -831,7 +829,7 @@ sub connection {
}
catch {
$self->throw_exception(
- "No arguments to load_classes and couldn't load ${storage_class} ($_)"
+ "Unable to load storage class ${storage_class}: $_"
);
};
my $storage = $storage_class->new($self=>$args);
@@ -905,40 +903,51 @@ will produce the output
sub compose_namespace {
my ($self, $target, $base) = @_;
+
my $schema = $self->clone;
+
+ $schema->source_registrations({});
+
+ # the original class-mappings must remain - otherwise
+ # reverse_relationship_info will not work
+ #$schema->class_mappings({});
+
{
no warnings qw/redefine/;
local *Class::C3::reinitialize = sub { } if DBIx::Class::_ENV_::OLD_MRO;
use warnings qw/redefine/;
no strict qw/refs/;
- foreach my $moniker ($schema->sources) {
- my $source = $schema->source($moniker);
+ foreach my $moniker ($self->sources) {
+ my $orig_source = $self->source($moniker);
+
my $target_class = "${target}::${moniker}";
- $self->inject_base(
- $target_class => $source->result_class, ($base ? $base : ())
+ $self->inject_base($target_class, $orig_source->result_class, ($base || ()) );
+
+ # register_source examines result_class, and then returns us a clone
+ my $new_source = $schema->register_source($moniker, bless
+ { %$orig_source, result_class => $target_class },
+ ref $orig_source,
);
- $source->result_class($target_class);
- if ($target_class->can('result_source_instance')) {
- # since the newly created classes are registered only with
- # the instance of $schema, it should be safe to weaken
- # the ref (it will GC when $schema is destroyed)
- $target_class->result_source_instance($source);
- weaken ${"${target_class}::__cag_result_source_instance"};
+ if ($target_class->can('result_source_instance')) {
+ # give the class a schema-less source copy
+ $target_class->result_source_instance( bless
+ { %$new_source, schema => ref $new_source->{schema} || $new_source->{schema} },
+ ref $new_source,
+ );
}
- $schema->register_source($moniker, $source);
}
- }
- Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO;
- {
- no strict 'refs';
- no warnings 'redefine';
+
foreach my $meth (qw/class source resultset/) {
+ no warnings 'redefine';
*{"${target}::${meth}"} = subname "${target}::${meth}" =>
sub { shift->schema->$meth(@_) };
}
}
+
+ Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO;
+
return $schema;
}
@@ -1035,6 +1044,7 @@ sub clone {
$clone->register_extra_source($moniker => $new);
}
$clone->storage->set_schema($clone) if $clone->storage;
+
return $clone;
}
@@ -1319,11 +1329,7 @@ moniker.
=cut
-sub register_source {
- my $self = shift;
-
- $self->_register_source(@_);
-}
+sub register_source { shift->_register_source(@_) }
=head2 unregister_source
@@ -1337,11 +1343,7 @@ Removes the L<DBIx::Class::ResultSource> from the schema for the given moniker.
=cut
-sub unregister_source {
- my $self = shift;
-
- $self->_unregister_source(@_);
-}
+sub unregister_source { shift->_unregister_source(@_) }
=head2 register_extra_source
@@ -1356,42 +1358,44 @@ has a source and you want to register an extra one.
=cut
-sub register_extra_source {
- my $self = shift;
-
- $self->_register_source(@_, { extra => 1 });
-}
+sub register_extra_source { shift->_register_source(@_, { extra => 1 }) }
sub _register_source {
my ($self, $moniker, $source, $params) = @_;
- my $orig_source = $source;
-
$source = $source->new({ %$source, source_name => $moniker });
+
$source->schema($self);
weaken $source->{schema} if ref($self);
- my $rs_class = $source->result_class;
-
my %reg = %{$self->source_registrations};
$reg{$moniker} = $source;
$self->source_registrations(\%reg);
- return if ($params->{extra});
- return unless defined($rs_class) && $rs_class->can('result_source_instance');
-
- my %map = %{$self->class_mappings};
- if (
- exists $map{$rs_class}
- and
- $map{$rs_class} ne $moniker
- and
- $rs_class->result_source_instance ne $orig_source
- ) {
- carp "$rs_class already has a source, use register_extra_source for additional sources";
+ return $source if $params->{extra};
+
+ my $rs_class = $source->result_class;
+ if ($rs_class and my $rsrc = try { $rs_class->result_source_instance } ) {
+ my %map = %{$self->class_mappings};
+ if (
+ exists $map{$rs_class}
+ and
+ $map{$rs_class} ne $moniker
+ and
+ $rsrc ne $_[2] # orig_source
+ ) {
+ carp
+ "$rs_class already had a registered source which was replaced by this call. "
+ . 'Perhaps you wanted register_extra_source(), though it is more likely you did '
+ . 'something wrong.'
+ ;
+ }
+
+ $map{$rs_class} = $moniker;
+ $self->class_mappings(\%map);
}
- $map{$rs_class} = $moniker;
- $self->class_mappings(\%map);
+
+ return $source;
}
{
diff --git a/lib/DBIx/Class/Storage/DBI.pm b/lib/DBIx/Class/Storage/DBI.pm
index 6c6efcc..847c8a1 100644
--- a/lib/DBIx/Class/Storage/DBI.pm
+++ b/lib/DBIx/Class/Storage/DBI.pm
@@ -92,6 +92,7 @@ my @rdbms_specific_methods = qw/
delete
select
select_single
+ with_deferred_fk_checks
get_use_dbms_capability
get_dbms_capability
@@ -1528,7 +1529,13 @@ sub _dbi_attrs_for_bind {
$_->{dbd_attrs}
}
elsif($_->{sqlt_datatype}) {
- $self->bind_attribute_by_data_type($_->{sqlt_datatype}) || undef;
+ # cache the result in the dbh_details hash, as it can not change unless
+ # we connect to something else
+ my $cache = $self->_dbh_details->{_datatype_map_cache} ||= {};
+ if (not exists $cache->{$_->{sqlt_datatype}}) {
+ $cache->{$_->{sqlt_datatype}} = $self->bind_attribute_by_data_type($_->{sqlt_datatype}) || undef;
+ }
+ $cache->{$_->{sqlt_datatype}};
}
elsif ($sba_attrs and $_->{dbic_colname}) {
$sba_attrs->{$_->{dbic_colname}} || undef;
diff --git a/lib/DBIx/Class/Storage/DBI/ADO.pm b/lib/DBIx/Class/Storage/DBI/ADO.pm
index 36423c4..577d2d3 100644
--- a/lib/DBIx/Class/Storage/DBI/ADO.pm
+++ b/lib/DBIx/Class/Storage/DBI/ADO.pm
@@ -4,6 +4,7 @@ use base 'DBIx::Class::Storage::DBI';
use mro 'c3';
use Sub::Name;
+use Try::Tiny;
use namespace::clean;
=head1 NAME
@@ -60,23 +61,28 @@ sub _dbh_get_info {
}
# Monkeypatch out the horrible warnings during global destruction.
-# A patch to DBD::ADO has been submitted as well.
+# A patch to DBD::ADO has been submitted as well, and it was fixed
+# as of 2.99
# https://rt.cpan.org/Ticket/Display.html?id=65563
sub _init {
- no warnings 'redefine';
- require DBD::ADO;
-
- if (DBD::ADO->VERSION <= 2.98) {
- my $disconnect = *DBD::ADO::db::disconnect{CODE};
-
- *DBD::ADO::db::disconnect = subname 'DBD::ADO::db::disconnect' => sub {
- my $warn_handler = $SIG{__WARN__} || sub { warn @_ };
- local $SIG{__WARN__} = sub {
- $warn_handler->(@_)
- unless $_[0] =~ /Not a Win32::OLE object|uninitialized value/;
+ unless ($DBD::ADO::__DBIC_MONKEYPATCH_CHECKED__) {
+ require DBD::ADO;
+
+ unless (try { DBD::ADO->VERSION('2.99'); 1 }) {
+ no warnings 'redefine';
+ my $disconnect = *DBD::ADO::db::disconnect{CODE};
+
+ *DBD::ADO::db::disconnect = subname 'DBD::ADO::db::disconnect' => sub {
+ my $warn_handler = $SIG{__WARN__} || sub { warn @_ };
+ local $SIG{__WARN__} = sub {
+ $warn_handler->(@_)
+ unless $_[0] =~ /Not a Win32::OLE object|uninitialized value/;
+ };
+ $disconnect->(@_);
};
- $disconnect->(@_);
- };
+ }
+
+ $DBD::ADO::__DBIC_MONKEYPATCH_CHECKED__ = 1;
}
}
diff --git a/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm b/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm
index f582b94..df7053a 100644
--- a/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm
+++ b/lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm
@@ -433,28 +433,35 @@ sub _dbi_attrs_for_bind {
$attrs;
}
-my $dbd_loaded;
sub bind_attribute_by_data_type {
my ($self, $dt) = @_;
- $dbd_loaded ||= do {
- require DBD::Oracle;
- if ($DBD::Oracle::VERSION eq '1.23') {
- $self->throw_exception(
- "BLOB/CLOB support in DBD::Oracle == 1.23 is broken, use an earlier or later ".
- "version.\n\nSee: https://rt.cpan.org/Public/Bug/Display.html?id=46016\n"
- );
+ if ($self->_is_lob_type($dt)) {
+
+ # this is a hot-ish codepath, store an escape-flag in the DBD namespace, so that
+ # things like Class::Unload work (unlikely but possible)
+ unless ($DBD::Oracle::__DBIC_DBD_VERSION_CHECK_OK__) {
+
+ # no earlier - no later
+ if ($DBD::Oracle::VERSION eq '1.23') {
+ $self->throw_exception(
+ "BLOB/CLOB support in DBD::Oracle == 1.23 is broken, use an earlier or later ".
+ "version (https://rt.cpan.org/Public/Bug/Display.html?id=46016)"
+ );
+ }
+
+ $DBD::Oracle::__DBIC_DBD_VERSION_CHECK_OK__ = 1;
}
- 1;
- };
- if ($self->_is_lob_type($dt)) {
return {
ora_type => $self->_is_text_lob_type($dt)
? DBD::Oracle::ORA_CLOB()
: DBD::Oracle::ORA_BLOB()
};
}
+ else {
+ return undef;
+ }
}
# Handle blob columns in WHERE.
diff --git a/lib/DBIx/Class/Storage/DBI/Pg.pm b/lib/DBIx/Class/Storage/DBI/Pg.pm
index 0dc7ea8..371f185 100644
--- a/lib/DBIx/Class/Storage/DBI/Pg.pm
+++ b/lib/DBIx/Class/Storage/DBI/Pg.pm
@@ -164,33 +164,34 @@ sub sqlt_type {
return 'PostgreSQL';
}
-my $type_cache;
sub bind_attribute_by_data_type {
my ($self,$data_type) = @_;
- # Ask for a DBD::Pg with array support
- # pg uses (used?) version::qv()
- require DBD::Pg;
-
- if ($self->_server_info->{normalized_dbms_version} >= 9.0) {
- if (not try { DBD::Pg->VERSION('2.17.2') }) {
- carp_once( __PACKAGE__.': BYTEA columns are known to not work on Pg >='
- . " 9.0 with DBD::Pg < 2.17.2\n" );
+ if ($self->_is_binary_lob_type($data_type)) {
+ # this is a hot-ish codepath, use an escape flag to minimize
+ # amount of function/method calls
+ # additionally version.pm is cock, and memleaks on multiple
+ # ->VERSION calls
+ # the flag is stored in the DBD namespace, so that Class::Unload
+ # will work (unlikely, but still)
+ unless ($DBD::Pg::__DBIC_DBD_VERSION_CHECK_DONE__) {
+ if ($self->_server_info->{normalized_dbms_version} >= 9.0) {
+ try { DBD::Pg->VERSION('2.17.2'); 1 } or carp (
+ __PACKAGE__.': BYTEA columns are known to not work on Pg >= 9.0 with DBD::Pg < 2.17.2'
+ );
+ }
+ elsif (not try { DBD::Pg->VERSION('2.9.2'); 1 } ) { carp (
+ __PACKAGE__.': DBD::Pg 2.9.2 or greater is strongly recommended for BYTEA column support'
+ )}
+
+ $DBD::Pg::__DBIC_DBD_VERSION_CHECK_DONE__ = 1;
}
- }
- elsif (not try { DBD::Pg->VERSION('2.9.2') }) {
- carp_once( __PACKAGE__.': DBD::Pg 2.9.2 or greater is strongly recommended'
- . "for BYTEA column support.\n" );
- }
- # cache the result of _is_binary_lob_type
- if (!exists $type_cache->{$data_type}) {
- $type_cache->{$data_type} = $self->_is_binary_lob_type($data_type)
- ? +{ pg_type => DBD::Pg::PG_BYTEA() }
- : undef
+ return { pg_type => DBD::Pg::PG_BYTEA() };
+ }
+ else {
+ return undef;
}
-
- $type_cache->{$data_type};
}
sub _exec_svp_begin {
diff --git a/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm b/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm
index 94239c2..352386e 100644
--- a/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm
+++ b/lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm
@@ -263,8 +263,17 @@ sub _prep_for_execute {
keys %$columns_info
;
- if (($op eq 'insert' && $bound_identity_col) ||
- ($op eq 'update' && exists $args->[0]{$identity_col})) {
+ if (
+ ($bound_identity_col and $op eq 'insert')
+ or
+ (
+ $op eq 'update'
+ and
+ defined $identity_col
+ and
+ exists $args->[0]{$identity_col}
+ )
+ ) {
$sql = join ("\n",
$self->_set_table_identity_sql($op => $table, 'on'),
$sql,
@@ -272,8 +281,15 @@ sub _prep_for_execute {
);
}
- if ($op eq 'insert' && (not $bound_identity_col) && $identity_col &&
- (not $self->{insert_bulk})) {
+ if (
+ (not $bound_identity_col)
+ and
+ $identity_col
+ and
+ (not $self->{insert_bulk})
+ and
+ $op eq 'insert'
+ ) {
$sql =
"$sql\n" .
$self->_fetch_identity_sql($ident, $identity_col);
diff --git a/lib/DBIx/Class/Storage/DBI/Sybase/FreeTDS.pm b/lib/DBIx/Class/Storage/DBI/Sybase/FreeTDS.pm
index 9f5ba5d..341c1e3 100644
--- a/lib/DBIx/Class/Storage/DBI/Sybase/FreeTDS.pm
+++ b/lib/DBIx/Class/Storage/DBI/Sybase/FreeTDS.pm
@@ -9,8 +9,8 @@ use namespace::clean;
=head1 NAME
-DBIx::Class::Storage::DBI::Sybase - Base class for drivers using L<DBD::Sybase>
-over FreeTDS.
+DBIx::Class::Storage::DBI::Sybase::FreeTDS - Base class for drivers using
+DBD::Sybase over FreeTDS.
=head1 DESCRIPTION
diff --git a/lib/DBIx/Class/Storage/TxnScopeGuard.pm b/lib/DBIx/Class/Storage/TxnScopeGuard.pm
index 0979a26..3263096 100644
--- a/lib/DBIx/Class/Storage/TxnScopeGuard.pm
+++ b/lib/DBIx/Class/Storage/TxnScopeGuard.pm
@@ -3,7 +3,7 @@ package DBIx::Class::Storage::TxnScopeGuard;
use strict;
use warnings;
use Try::Tiny;
-use Scalar::Util qw/weaken blessed/;
+use Scalar::Util qw/weaken blessed refaddr/;
use DBIx::Class;
use DBIx::Class::Exception;
use DBIx::Class::Carp;
@@ -14,9 +14,25 @@ my ($guards_count, $compat_handler, $foreign_handler);
sub new {
my ($class, $storage) = @_;
+ my $guard = {
+ inactivated => 0,
+ storage => $storage,
+ };
+
+ # we are starting with an already set $@ - in order for things to work we need to
+ # be able to recognize it upon destruction - store its weakref
+ # recording it before doing the txn_begin stuff
+ if (defined $@ and $@ ne '') {
+ $guard->{existing_exception_ref} = (ref $@ ne '') ? $@ : \$@;
+ weaken $guard->{existing_exception_ref};
+ }
+
$storage->txn_begin;
- my $guard = bless [ 0, $storage, $storage->_dbh ], ref $class || $class;
+ $guard->{dbh} = $storage->_dbh;
+ weaken $guard->{dbh};
+
+ bless $guard, ref $class || $class;
# install a callback carefully
if (DBIx::Class::_ENV_::INVISIBLE_DOLLAR_AT and !$guards_count) {
@@ -53,19 +69,21 @@ sub new {
$guards_count++;
- weaken ($guard->[2]);
$guard;
}
sub commit {
my $self = shift;
- $self->[1]->txn_commit;
- $self->[0] = 1;
+ $self->{storage}->throw_exception("Refusing to execute multiple commits on scope guard $self")
+ if $self->{inactivated};
+
+ $self->{storage}->txn_commit;
+ $self->{inactivated} = 1;
}
sub DESTROY {
- my ($dismiss, $storage) = @{$_[0]};
+ my $self = shift;
$guards_count--;
@@ -90,24 +108,34 @@ sub DESTROY {
undef $foreign_handler;
}
- return if $dismiss;
+ return if $self->{inactivated};
- # if our dbh is not ours anymore, the weakref will go undef
- $storage->_verify_pid;
- return unless $_[0]->[2];
+ # if our dbh is not ours anymore, the $dbh weakref will go undef
+ $self->{storage}->_verify_pid;
+ return unless $self->{dbh};
- my $exception = $@;
+ my $exception = $@ if (
+ defined $@
+ and
+ $@ ne ''
+ and
+ (
+ ! defined $self->{existing_exception_ref}
+ or
+ refaddr( ref $@ eq '' ? \$@ : $@ ) != refaddr($self->{existing_exception_ref})
+ )
+ );
{
local $@;
carp 'A DBIx::Class::Storage::TxnScopeGuard went out of scope without explicit commit or error. Rolling back.'
- unless $exception;
+ unless defined $exception;
my $rollback_exception;
# do minimal connectivity check due to weird shit like
# https://rt.cpan.org/Public/Bug/Display.html?id=62370
- try { $storage->_seems_connected && $storage->txn_rollback }
+ try { $self->{storage}->_seems_connected && $self->{storage}->txn_rollback }
catch { $rollback_exception = shift };
if ( $rollback_exception and (
diff --git a/t/100extra_source.t b/t/100extra_source.t
index b917958..490bbec 100644
--- a/t/100extra_source.t
+++ b/t/100extra_source.t
@@ -55,7 +55,7 @@ warnings_like (
isa_ok ($schema->resultset('Artist'), 'DBIx::Class::ResultSet');
},
[
- qr/DBICTest::Artist already has a source, use register_extra_source for additional sources/
+ qr/DBICTest::Artist already had a registered source which was replaced by this call/
],
'registering source to an existing result warns'
);
diff --git a/t/100populate.t b/t/100populate.t
index 9588f4e..f8c6f10 100644
--- a/t/100populate.t
+++ b/t/100populate.t
@@ -45,7 +45,7 @@ throws_ok ( sub {
}
} ('Huey', 'Dewey', $ex_title, 'Louie')
])
-}, qr/\Qexecute_array() aborted with 'constraint failed\E.+ at populate slice.+$ex_title/ms, 'Readable exception thrown for failed populate');
+}, qr/\Qexecute_array() aborted with '\E.+ at populate slice.+$ex_title/ms, 'Readable exception thrown for failed populate');
## make sure populate honors fields/orders in list context
## schema order
@@ -171,7 +171,7 @@ throws_ok {
name => 'foo3',
},
]);
-} qr/\Qexecute_array() aborted with 'datatype mismatch'/, 'bad slice';
+} qr/\Qexecute_array() aborted with 'datatype mismatch\E\b/, 'bad slice';
is($rs->count, 0, 'populate is atomic');
diff --git a/t/51threadtxn.t b/t/51threadtxn.t
index 7b11a7b..1245fab 100644
--- a/t/51threadtxn.t
+++ b/t/51threadtxn.t
@@ -21,13 +21,13 @@ use threads;
use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('rdbms_pg')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('rdbms_pg');
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
plan skip_all => 'Set $ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test'
. ' (note: creates and drops a table named artist!)' unless ($dsn && $user);
+plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('rdbms_pg')
+ unless DBIx::Class::Optional::Dependencies->req_ok_for ('rdbms_pg');
my $num_children = $ENV{DBICTEST_THREAD_STRESS} || 1;
if($num_children !~ /^[0-9]+$/ || $num_children < 10) {
diff --git a/t/52leaks.t b/t/52leaks.t
index 5111b67..5614252 100644
--- a/t/52leaks.t
+++ b/t/52leaks.t
@@ -36,9 +36,10 @@ if ($ENV{DBICTEST_IN_PERSISTENT_ENV}) {
use lib qw(t/lib);
use DBICTest::RunMode;
use DBIx::Class;
+use B 'svref_2object';
BEGIN {
plan skip_all => "Your perl version $] appears to leak like a sieve - skipping test"
- if DBIx::Class::_ENV_::PEEPEENESS();
+ if DBIx::Class::_ENV_::PEEPEENESS;
}
use Scalar::Util qw/refaddr reftype weaken/;
@@ -121,6 +122,7 @@ unless (DBICTest::RunMode->is_plain) {
%$weak_registry = ();
}
+my @compose_ns_classes;
{
use_ok ('DBICTest');
@@ -128,6 +130,8 @@ unless (DBICTest::RunMode->is_plain) {
my $rs = $schema->resultset ('Artist');
my $storage = $schema->storage;
+ @compose_ns_classes = map { "DBICTest::${_}" } keys %{$schema->source_registrations};
+
ok ($storage->connected, 'we are connected');
my $row_obj = $rs->search({}, { rows => 1})->next; # so that commits/rollbacks work
@@ -188,8 +192,6 @@ unless (DBICTest::RunMode->is_plain) {
result_source_handle => $rs->result_source->handle,
- fresh_pager => $rs->page(5)->pager,
- pager => $pager,
pager_explicit_count => $pager_explicit_count,
};
@@ -203,6 +205,8 @@ unless (DBICTest::RunMode->is_plain) {
storage => $storage,
sql_maker => $storage->sql_maker,
dbh => $storage->_dbh,
+ fresh_pager => $rs->page(5)->pager,
+ pager => $pager,
);
if ($has_dt) {
@@ -267,6 +271,7 @@ unless (DBICTest::RunMode->is_plain) {
reftype $phantom,
refaddr $phantom,
);
+
$weak_registry->{$slot} = $phantom;
weaken $weak_registry->{$slot};
}
@@ -300,25 +305,32 @@ for my $slot (keys %$weak_registry) {
}
}
-
-# FIXME
-# For reasons I can not yet fully understand the table() god-method (located in
-# ::ResultSourceProxy::Table) attaches an actual source instance to each class
-# as virtually *immortal* class-data.
-# For now just ignore these instances manually but there got to be a saner way
-for ( map { $_->result_source_instance } (
+# every result class has a result source instance as classdata
+# make sure these are all present and distinct before ignoring
+# (distinct means only 1 reference)
+for my $rs_class (
'DBICTest::BaseResult',
+ @compose_ns_classes,
map { DBICTest::Schema->class ($_) } DBICTest::Schema->sources
-)) {
- delete $weak_registry->{$_};
+) {
+ # need to store the SVref and examine it separately, to push the rsrc instance off the pad
+ my $SV = svref_2object($rs_class->result_source_instance);
+ is( $SV->REFCNT, 1, "Source instance of $rs_class referenced exactly once" );
+
+ # ignore it
+ delete $weak_registry->{$rs_class->result_source_instance};
}
-# FIXME
-# same problem goes for the schema - its classdata contains live result source
-# objects, which to add insult to the injury are *different* instances from the
-# ones we ignored above
-for ( values %{DBICTest::Schema->source_registrations || {}} ) {
- delete $weak_registry->{$_};
+# Schema classes also hold sources, but these are clones, since
+# each source contains the schema (or schema class name in this case)
+# Hence the clone so that the same source can be registered with
+# multiple schemas
+for my $moniker ( keys %{DBICTest::Schema->source_registrations || {}} ) {
+
+ my $SV = svref_2object(DBICTest::Schema->source($moniker));
+ is( $SV->REFCNT, 1, "Source instance registered under DBICTest::Schema as $moniker referenced exactly once" );
+
+ delete $weak_registry->{DBICTest::Schema->source($moniker)};
}
for my $slot (sort keys %$weak_registry) {
@@ -337,7 +349,6 @@ for my $slot (sort keys %$weak_registry) {
};
}
-
# we got so far without a failure - this is a good thing
# now let's try to rerun this script under a "persistent" environment
# this is ugly and dirty but we do not yet have a Test::Embedded or
diff --git a/t/53lean_startup.t b/t/53lean_startup.t
index 0054f03..072eac3 100644
--- a/t/53lean_startup.t
+++ b/t/53lean_startup.t
@@ -1,14 +1,17 @@
# Use a require override instead of @INC munging (less common)
# Do the override as early as possible so that CORE::require doesn't get compiled away
-# We will replace $req_override in a bit
+# We will add the hook in a bit, got to load some regular stuff
my $test_hook;
BEGIN {
- $test_hook = sub {}; # noop at first
- *CORE::GLOBAL::require = sub {
- $test_hook->(@_);
- CORE::require($_[0]);
- };
+ unshift @INC, 't/lib';
+ require DBICTest::Util::OverrideRequire;
+
+ DBICTest::Util::OverrideRequire::override_global_require( sub {
+ my $res = $_[0]->();
+ $test_hook->($_[1]) if $test_hook;
+ return $res;
+ });
}
use strict;
@@ -16,8 +19,16 @@ use warnings;
use Test::More;
use Data::Dumper;
+# Package::Stash::XS is silly and fails if a require hook contains regular
+# expressions on perl < 5.8.7. Load the damned thing if the case
+BEGIN {
+ require Package::Stash if $] < 5.008007;
+}
+
+my $expected_core_modules;
+
BEGIN {
- my $core_modules = { map { $_ => 1 } qw/
+ $expected_core_modules = { map { $_ => 1 } qw/
strict
warnings
@@ -70,7 +81,7 @@ BEGIN {
# exclude everything where the current namespace does not match the called function
# (this works around very weird XS-induced require callstack corruption)
if (
- !$core_modules->{$req}
+ !$expected_core_modules->{$req}
and
@caller
and
@@ -103,4 +114,21 @@ delete $ENV{$_} for qw/
my $schema = DBICTest->init_schema;
is ($schema->resultset('Artist')->next->name, 'Caterwauler McCrae');
+# check if anything we were expecting didn't actually load
+my $nl;
+for (keys %$expected_core_modules) {
+ my $mod = "$_.pm";
+ $mod =~ s/::/\//g;
+ unless ($INC{$mod}) {
+ my $err = sprintf "Expected DBIC core module %s never loaded - %s needs adjustment", $_, __FILE__;
+ if (DBICTest::RunMode->is_smoker or DBICTest::RunMode->is_author) {
+ fail ($err)
+ }
+ else {
+ diag "\n" unless $nl++;
+ diag $err;
+ }
+ }
+}
+
done_testing;
diff --git a/t/55namespaces_cleaned.t b/t/55namespaces_cleaned.t
index f11fa51..6706966 100644
--- a/t/55namespaces_cleaned.t
+++ b/t/55namespaces_cleaned.t
@@ -1,20 +1,32 @@
-# Pre-5.10 perls pollute %INC on unsuccesfull module
-# require, making it appear as if the module is already
-# loaded on subsequent require()s
-# Can't seem to find the exact RT/perldelta entry
BEGIN {
if ($] < 5.010) {
- # shut up spurious warnings without loading warnings.pm
- *CORE::GLOBAL::require = sub {};
-
- *CORE::GLOBAL::require = sub {
- my $res = eval { CORE::require($_[0]) };
- if ($@) {
- delete $INC{$_[0]};
- die
+
+ # Pre-5.10 perls pollute %INC on unsuccesfull module
+ # require, making it appear as if the module is already
+ # loaded on subsequent require()s
+ # Can't seem to find the exact RT/perldelta entry
+ #
+ # The reason we can't just use a sane, clean loader, is because
+ # if a Module require()s another module the %INC will still
+ # get filled with crap and we are back to square one. A global
+ # fix is really the only way for this test, as we try to load
+ # each available module separately, and have no control (nor
+ # knowledge) over their common dependencies.
+ #
+ # we want to do this here, in the very beginning, before even
+ # warnings/strict are loaded
+
+ unshift @INC, 't/lib';
+ require DBICTest::Util::OverrideRequire;
+
+ DBICTest::Util::OverrideRequire::override_global_require( sub {
+ my $res = eval { $_[0]->() };
+ if ($@ ne '') {
+ delete $INC{$_[1]};
+ die $@;
}
- $res;
- }
+ return $res;
+ } );
}
}
@@ -60,6 +72,10 @@ my $skip_idx = { map { $_ => 1 } (
# G::L::D is unclean, but we never inherit from it
'DBIx::Class::Admin::Descriptive',
'DBIx::Class::Admin::Usage',
+
+ # this subclass is expected to inherit whatever crap comes
+ # from the parent
+ 'DBIx::Class::ResultSet::Pager',
) };
my $has_cmop = eval { require Class::MOP };
@@ -92,7 +108,7 @@ for my $mod (@modules) {
next if ( DBIx::Class::_ENV_::BROKEN_NAMESPACE_CLEAN() and $name =~ /^carp(?:_unique|_once)?$/ );
- # overload is a funky thing - it is neither cleaned, and its imports are named funny
+ # overload is a funky thing - it is not cleaned, and its imports are named funny
next if $name =~ /^\(/;
my $gv = svref_2object($all_method_like{$name})->GV;
@@ -170,5 +186,4 @@ sub find_modules {
return sort @modules;
}
-
done_testing;
diff --git a/t/67pager.t b/t/67pager.t
index b7eb2ca..eb17faa 100644
--- a/t/67pager.t
+++ b/t/67pager.t
@@ -5,6 +5,7 @@ use Test::More;
use Test::Exception;
use lib qw(t/lib);
use DBICTest;
+use Storable qw/dclone/;
my $schema = DBICTest->init_schema();
@@ -186,5 +187,32 @@ $schema->default_resultset_attributes({ rows => 5 });
is($p->(), 5, 'default rows is 5');
+# does serialization work (preserve laziness, while preserving state if exits)
+$qcnt = 0;
+$it = $rs->search(
+ {},
+ { order_by => 'title',
+ rows => 5,
+ page => 2 }
+);
+$pager = $it->pager;
+is ($qcnt, 0, 'No queries on rs/pager creation');
+
+$it = do { local $DBIx::Class::ResultSourceHandle::thaw_schema = $schema; dclone ($it) };
+is ($qcnt, 0, 'No queries on rs/pager freeze/thaw');
+
+is( $it->pager->entries_on_this_page, 1, "entries_on_this_page ok for page 2" );
+
+is ($qcnt, 1, 'Count fired to get pager page entries');
+
+$rs->create({ title => 'bah', artist => 1, year => 2011 });
+
+$qcnt = 0;
+$it = do { local $DBIx::Class::ResultSourceHandle::thaw_schema = $schema; dclone ($it) };
+is ($qcnt, 0, 'No queries on rs/pager freeze/thaw');
+
+is( $it->pager->entries_on_this_page, 1, "entries_on_this_page ok for page 2, even though underlying count changed" );
+
+is ($qcnt, 0, 'No count fired on pre-existing total count');
done_testing;
diff --git a/t/746mssql.t b/t/746mssql.t
index 36a405f..620258e 100644
--- a/t/746mssql.t
+++ b/t/746mssql.t
@@ -534,10 +534,15 @@ SQL
});
TODO: {
- local $TODO =
+ my $freetds_and_dynamic_cursors = 1
+ if $opts_name eq 'use_dynamic_cursors' &&
+ $schema->storage->using_freetds;
+
+ local $TODO =
'these tests fail on freetds with dynamic cursors for some reason'
- if $opts_name eq 'use_dynamic_cursors'
- && $schema->storage->using_freetds;
+ if $freetds_and_dynamic_cursors;
+ local $ENV{DBIC_NULLABLE_KEY_NOWARN} = 1
+ if $freetds_and_dynamic_cursors;
my $rs = $schema->resultset('Money');
my $row;
diff --git a/t/74mssql.t b/t/74mssql.t
index 2470c7f..0ccaa23 100644
--- a/t/74mssql.t
+++ b/t/74mssql.t
@@ -14,14 +14,15 @@ use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
-plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mssql_sybase')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mssql_sybase');
-
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_${_}" } qw/DSN USER PASS/};
plan skip_all => 'Set $ENV{DBICTEST_MSSQL_DSN}, _USER and _PASS to run this test'
unless ($dsn);
+
+plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mssql_sybase')
+ unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mssql_sybase');
+
{
my $srv_ver = DBICTest::Schema->connect($dsn, $user, $pass)->storage->_server_info->{dbms_version};
ok ($srv_ver, 'Got a test server version on fresh schema: ' . ($srv_ver||'???') );
diff --git a/t/750firebird.t b/t/750firebird.t
index 1b4166d..ec45419 100644
--- a/t/750firebird.t
+++ b/t/750firebird.t
@@ -8,23 +8,21 @@ use lib qw(t/lib);
use DBICTest;
use Scope::Guard ();
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_FIREBIRD_${_}" } qw/DSN USER PASS/};
-my ($dsn2, $user2, $pass2) = @ENV{map { "DBICTEST_FIREBIRD_INTERBASE_${_}" } qw/DSN USER PASS/};
-my ($dsn3, $user3, $pass3) = @ENV{map { "DBICTEST_FIREBIRD_ODBC_${_}" } qw/DSN USER PASS/};
-
-plan skip_all => 'Test needs ' .
- (join ' or ', map { $_ ? $_ : () }
- DBIx::Class::Optional::Dependencies->req_missing_for('test_rdbms_firebird'),
- DBIx::Class::Optional::Dependencies->req_missing_for('test_rdbms_firebird_interbase'),
- DBIx::Class::Optional::Dependencies->req_missing_for('test_rdbms_firebird_odbc'))
- unless
- $dsn && DBIx::Class::Optional::Dependencies->req_ok_for('test_rdbms_firebird')
- or
- $dsn2 && DBIx::Class::Optional::Dependencies->req_ok_for('test_rdbms_firebird_interbase')
- or
- $dsn3 && DBIx::Class::Optional::Dependencies->req_ok_for('test_rdbms_firebird_odbc')
- or
- (not $dsn || $dsn2 || $dsn3);
+my $env2optdep = {
+ DBICTEST_FIREBIRD => 'test_rdbms_firebird',
+ DBICTEST_FIREBIRD_INTERBASE => 'test_rdbms_firebird_interbase',
+ DBICTEST_FIREBIRD_ODBC => 'test_rdbms_firebird_odbc',
+};
+
+plan skip_all => join (' ',
+ 'Set $ENV{DBICTEST_FIREBIRD_DSN} and/or $ENV{DBICTEST_FIREBIRD_INTERBASE_DSN}',
+ 'and/or $ENV{DBICTEST_FIREBIRD_ODBC_DSN},',
+ '_USER and _PASS to run these tests.',
+
+ 'WARNING: this test creates and drops the tables "artist", "bindtype_test" and',
+ '"sequence_test"; the generators "gen_artist_artistid", "pkid1_seq", "pkid2_seq"',
+ 'and "nonpkid_seq" and the trigger "artist_bi".',
+) unless grep { $ENV{"${_}_DSN"} } keys %$env2optdep;
# tests stolen from 749sybase_asa.t
@@ -35,29 +33,17 @@ plan skip_all => 'Test needs ' .
# Example ODBC DSN:
# dbi:ODBC:Driver=Firebird;Dbname=/var/lib/firebird/2.5/data/hlaghdb.fdb
-plan skip_all => <<'EOF' unless $dsn || $dsn2 || $dsn3;
-Set $ENV{DBICTEST_FIREBIRD_DSN} and/or $ENV{DBICTEST_FIREBIRD_INTERBASE_DSN}
-and/or $ENV{DBICTEST_FIREBIRD_ODBC_DSN},
-_USER and _PASS to run these tests.
-
-WARNING: this test creates and drops the tables "artist", "bindtype_test" and
-"sequence_test"; the generators "gen_artist_artistid", "pkid1_seq", "pkid2_seq"
-and "nonpkid_seq" and the trigger "artist_bi".
-EOF
-
-my @info = (
- [ $dsn, $user, $pass ],
- [ $dsn2, $user2, $pass2 ],
- [ $dsn3, $user3, $pass3 ],
-);
-
my $schema;
-foreach my $conn_idx (0..$#info) {
- my ($dsn, $user, $pass) = @{ $info[$conn_idx] || [] };
+for my $prefix (keys %$env2optdep) { SKIP: {
+
+ my ($dsn, $user, $pass) = map { $ENV{"${prefix}_$_"} } qw/DSN USER PASS/;
next unless $dsn;
+ skip ("Testing with ${prefix}_DSN needs " . DBIx::Class::Optional::Dependencies->req_missing_for( $env2optdep->{$prefix} ), 1)
+ unless DBIx::Class::Optional::Dependencies->req_ok_for($env2optdep->{$prefix});
+
$schema = DBICTest::Schema->connect($dsn, $user, $pass, {
auto_savepoint => 1,
quote_char => q["],
@@ -294,7 +280,7 @@ EOF
};
}
}
-}
+}}
done_testing;
diff --git a/t/84serialize.t b/t/84serialize.t
index dedf8da..04e16cb 100644
--- a/t/84serialize.t
+++ b/t/84serialize.t
@@ -55,23 +55,36 @@ my %stores = (
return $fire;
},
- ($ENV{DBICTEST_MEMCACHED})
- ? do {
- require Cache::Memcached;
- my $memcached = Cache::Memcached->new(
- { servers => [ $ENV{DBICTEST_MEMCACHED} ] } );
-
- my $key = 'tmp_dbic_84serialize_memcached_test';
-
- ( memcached => sub {
- $memcached->set( $key, $_[0], 60 );
- local $DBIx::Class::ResultSourceHandle::thaw_schema = $schema;
- return $memcached->get($key);
- });
- } : ()
- ,
);
+if ($ENV{DBICTEST_MEMCACHED}) {
+ if (DBIx::Class::Optional::Dependencies->req_ok_for ('test_memcached')) {
+ my $memcached = Cache::Memcached->new(
+ { servers => [ $ENV{DBICTEST_MEMCACHED} ] }
+ );
+
+ my $key = 'tmp_dbic_84serialize_memcached_test';
+
+ $stores{memcached} = sub {
+ $memcached->set( $key, $_[0], 60 );
+ local $DBIx::Class::ResultSourceHandle::thaw_schema = $schema;
+ return $memcached->get($key);
+ };
+ }
+ else {
+ SKIP: {
+ skip 'Memcached tests need ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_memcached'), 1;
+ }
+ }
+}
+else {
+ SKIP: {
+ skip 'Set $ENV{DBICTEST_MEMCACHED} to run the memcached serialization tests', 1;
+ }
+}
+
+
+
for my $name (keys %stores) {
my $store = $stores{$name};
diff --git a/t/86sqlt.t b/t/86sqlt.t
index 961ca27..76e759d 100644
--- a/t/86sqlt.t
+++ b/t/86sqlt.t
@@ -22,11 +22,10 @@ sub DBICTest::Schema::deployment_statements {
return $self->next::method(@_);
}
-my $schema = DBICTest->init_schema (no_deploy => 1);
-
# Check deployment statements ctx sensitivity
{
+ my $schema = DBICTest->init_schema (no_deploy => 1);
my $not_first_table_creation_re = qr/CREATE TABLE fourkeys_to_twokeys/;
my $statements = $schema->deployment_statements;
@@ -84,6 +83,8 @@ my $schema = DBICTest->init_schema (no_deploy => 1);
ok($component_deploy_hook_called, 'component deploy hook got called');
}
+my $schema = DBICTest->init_schema (no_deploy => 1);
+
{
my $deploy_hook_called = 0;
$custom_deployment_statements_called = 0;
diff --git a/t/88result_set_column.t b/t/88result_set_column.t
index 847483a..044e71a 100644
--- a/t/88result_set_column.t
+++ b/t/88result_set_column.t
@@ -47,8 +47,8 @@ warnings_exist (sub {
# test distinct propagation
is_deeply (
- [$rs->search ({}, { distinct => 1 })->get_column ('year')->all],
- [$rs_year->func('distinct')],
+ [sort $rs->search ({}, { distinct => 1 })->get_column ('year')->all],
+ [sort $rs_year->func('distinct')],
'distinct => 1 is passed through properly',
);
diff --git a/t/94versioning.t b/t/94versioning.t
index e6eb0e2..09ce788 100644
--- a/t/94versioning.t
+++ b/t/94versioning.t
@@ -23,7 +23,11 @@ BEGIN {
require DBIx::Class;
plan skip_all =>
'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('deploy')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('deploy')
+ unless DBIx::Class::Optional::Dependencies->req_ok_for ('deploy');
+
+ plan skip_all =>
+ 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mysql')
+ unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mysql');
}
use_ok('DBICVersion_v1');
diff --git a/t/98savepoints.t b/t/98savepoints.t
index 7bc6d64..03365ec 100644
--- a/t/98savepoints.t
+++ b/t/98savepoints.t
@@ -4,142 +4,144 @@ use warnings;
use Test::More;
use DBIx::Class::Optional::Dependencies ();
-my ($create_sql, $dsn, $user, $pass);
+my $env2optdep = {
+ DBICTEST_PG => 'rdbms_pg',
+ DBICTEST_MYSQL => 'test_rdbms_mysql',
+};
-if ($ENV{DBICTEST_PG_DSN}) {
- plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('rdbms_pg')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('rdbms_pg');
-
- ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
-
- $create_sql = "CREATE TABLE artist (artistid serial PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10))";
-} elsif ($ENV{DBICTEST_MYSQL_DSN}) {
- plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mysql')
- unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mysql');
-
- ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MYSQL_${_}" } qw/DSN USER PASS/};
-
- $create_sql = "CREATE TABLE artist (artistid INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10)) ENGINE=InnoDB";
-} else {
- plan skip_all => 'Set DBICTEST_(PG|MYSQL)_DSN _USER and _PASS if you want to run savepoint tests';
-}
+plan skip_all => join (' ',
+ 'Set $ENV{DBICTEST_PG_DSN} and/or $ENV{DBICTEST_MYSQL_DSN} _USER and _PASS to run these tests.',
+) unless grep { $ENV{"${_}_DSN"} } keys %$env2optdep;
use lib qw(t/lib);
use DBICTest;
use DBICTest::Stats;
-plan tests => 16;
+my $schema;
-my $schema = DBICTest::Schema->connect ($dsn,$user,$pass,{ auto_savepoint => 1 });
+for my $prefix (keys %$env2optdep) { SKIP: {
+ my ($dsn, $user, $pass) = map { $ENV{"${prefix}_$_"} } qw/DSN USER PASS/;
-my $stats = DBICTest::Stats->new;
+ skip ("Skipping tests with $prefix: set \$ENV{${prefix}_DSN} _USER and _PASS", 1)
+ unless $dsn;
-$schema->storage->debugobj($stats);
+ skip ("Testing with ${prefix}_DSN needs " . DBIx::Class::Optional::Dependencies->req_missing_for( $env2optdep->{$prefix} ), 1)
+ unless DBIx::Class::Optional::Dependencies->req_ok_for($env2optdep->{$prefix});
-$schema->storage->debug(1);
+ $schema = DBICTest::Schema->connect ($dsn,$user,$pass,{ auto_savepoint => 1 });
-{
- local $SIG{__WARN__} = sub {};
- $schema->storage->dbh->do ('DROP TABLE IF EXISTS artist');
- $schema->storage->dbh->do ($create_sql);
-}
+ my $create_sql;
+ $schema->storage->ensure_connected;
+ if ($schema->storage->isa('DBIx::Class::Storage::DBI::Pg')) {
+ $create_sql = "CREATE TABLE artist (artistid serial PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10))";
+ $schema->storage->dbh->do('SET client_min_messages=WARNING');
+ }
+ elsif ($schema->storage->isa('DBIx::Class::Storage::DBI::mysql')) {
+ $create_sql = "CREATE TABLE artist (artistid INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10)) ENGINE=InnoDB";
+ }
+ else {
+ skip( 'Untested driver ' . $schema->storage, 1 );
+ }
-$schema->resultset('Artist')->create({ name => 'foo' });
+ note "Testing $prefix";
-$schema->txn_begin;
+ my $stats = DBICTest::Stats->new;
+ $schema->storage->debugobj($stats);
+ $schema->storage->debug(1);
-my $arty = $schema->resultset('Artist')->find(1);
+ $schema->storage->dbh->do ('DROP TABLE IF EXISTS artist');
+ $schema->storage->dbh->do ($create_sql);
-my $name = $arty->name;
+ $schema->resultset('Artist')->create({ name => 'foo' });
-# First off, test a generated savepoint name
-$schema->svp_begin;
+ $schema->txn_begin;
-cmp_ok($stats->{'SVP_BEGIN'}, '==', 1, 'Statistics svp_begin tickled');
+ my $arty = $schema->resultset('Artist')->find(1);
-$arty->update({ name => 'Jheephizzy' });
+ my $name = $arty->name;
-$arty->discard_changes;
+ # First off, test a generated savepoint name
+ $schema->svp_begin;
-cmp_ok($arty->name, 'eq', 'Jheephizzy', 'Name changed');
+ cmp_ok($stats->{'SVP_BEGIN'}, '==', 1, 'Statistics svp_begin tickled');
-# Rollback the generated name
-# Active: 0
-$schema->svp_rollback;
+ $arty->update({ name => 'Jheephizzy' });
-cmp_ok($stats->{'SVP_ROLLBACK'}, '==', 1, 'Statistics svp_rollback tickled');
+ $arty->discard_changes;
-$arty->discard_changes;
+ cmp_ok($arty->name, 'eq', 'Jheephizzy', 'Name changed');
-cmp_ok($arty->name, 'eq', $name, 'Name rolled back');
+ # Rollback the generated name
+ # Active: 0
+ $schema->svp_rollback;
-$arty->update({ name => 'Jheephizzy'});
+ cmp_ok($stats->{'SVP_ROLLBACK'}, '==', 1, 'Statistics svp_rollback tickled');
-# Active: 0 1
-$schema->svp_begin('testing1');
+ $arty->discard_changes;
-$arty->update({ name => 'yourmom' });
+ cmp_ok($arty->name, 'eq', $name, 'Name rolled back');
-# Active: 0 1 2
-$schema->svp_begin('testing2');
+ $arty->update({ name => 'Jheephizzy'});
-$arty->update({ name => 'gphat' });
-$arty->discard_changes;
-cmp_ok($arty->name, 'eq', 'gphat', 'name changed');
-# Active: 0 1 2
-# Rollback doesn't DESTROY the savepoint, it just rolls back to the value
-# at it's conception
-$schema->svp_rollback('testing2');
-$arty->discard_changes;
-cmp_ok($arty->name, 'eq', 'yourmom', 'testing2 reverted');
+ # Active: 0 1
+ $schema->svp_begin('testing1');
-# Active: 0 1 2 3
-$schema->svp_begin('testing3');
-$arty->update({ name => 'coryg' });
-# Active: 0 1 2 3 4
-$schema->svp_begin('testing4');
-$arty->update({ name => 'watson' });
+ $arty->update({ name => 'yourmom' });
-# Release 3, which implicitly releases 4
-# Active: 0 1 2
-$schema->svp_release('testing3');
-$arty->discard_changes;
-cmp_ok($arty->name, 'eq', 'watson', 'release left data');
-# This rolls back savepoint 2
-# Active: 0 1 2
-$schema->svp_rollback;
-$arty->discard_changes;
-cmp_ok($arty->name, 'eq', 'yourmom', 'rolled back to 2');
+ # Active: 0 1 2
+ $schema->svp_begin('testing2');
-# Rollback the original savepoint, taking us back to the beginning, implicitly
-# rolling back savepoint 1 and 2
-$schema->svp_rollback('savepoint_0');
-$arty->discard_changes;
-cmp_ok($arty->name, 'eq', 'foo', 'rolled back to start');
+ $arty->update({ name => 'gphat' });
+ $arty->discard_changes;
+ cmp_ok($arty->name, 'eq', 'gphat', 'name changed');
+ # Active: 0 1 2
+ # Rollback doesn't DESTROY the savepoint, it just rolls back to the value
+ # at it's conception
+ $schema->svp_rollback('testing2');
+ $arty->discard_changes;
+ cmp_ok($arty->name, 'eq', 'yourmom', 'testing2 reverted');
-$schema->txn_commit;
+ # Active: 0 1 2 3
+ $schema->svp_begin('testing3');
+ $arty->update({ name => 'coryg' });
+ # Active: 0 1 2 3 4
+ $schema->svp_begin('testing4');
+ $arty->update({ name => 'watson' });
-# And now to see if txn_do will behave correctly
+ # Release 3, which implicitly releases 4
+ # Active: 0 1 2
+ $schema->svp_release('testing3');
+ $arty->discard_changes;
+ cmp_ok($arty->name, 'eq', 'watson', 'release left data');
+ # This rolls back savepoint 2
+ # Active: 0 1 2
+ $schema->svp_rollback;
+ $arty->discard_changes;
+ cmp_ok($arty->name, 'eq', 'yourmom', 'rolled back to 2');
-$schema->txn_do (sub {
- $schema->txn_do (sub {
- $arty->name ('Muff');
+ # Rollback the original savepoint, taking us back to the beginning, implicitly
+ # rolling back savepoint 1 and 2
+ $schema->svp_rollback('savepoint_0');
+ $arty->discard_changes;
+ cmp_ok($arty->name, 'eq', 'foo', 'rolled back to start');
- $arty->update;
- });
+ $schema->txn_commit;
+
+ # And now to see if txn_do will behave correctly
+ $schema->txn_do (sub {
+ $schema->txn_do (sub {
+ $arty->name ('Muff');
+ $arty->update;
+ });
eval {
$schema->txn_do (sub {
- $arty->name ('Moff');
-
- $arty->update;
-
- $arty->discard_changes;
-
- is($arty->name,'Moff','Value updated in nested transaction');
-
- $schema->storage->dbh->do ("GUARANTEED TO PHAIL");
- });
+ $arty->name ('Moff');
+ $arty->update;
+ $arty->discard_changes;
+ is($arty->name,'Moff','Value updated in nested transaction');
+ $schema->storage->dbh->do ("GUARANTEED TO PHAIL");
+ });
};
ok ($@,'Nested transaction failed (good)');
@@ -153,15 +155,20 @@ $schema->txn_do (sub {
$arty->update;
});
-$arty->discard_changes;
+ $arty->discard_changes;
+
+ is($arty->name,'Miff','auto_savepoint worked');
+
+ cmp_ok($stats->{'SVP_BEGIN'},'==',7,'Correct number of savepoints created');
-is($arty->name,'Miff','auto_savepoint worked');
+ cmp_ok($stats->{'SVP_RELEASE'},'==',3,'Correct number of savepoints released');
-cmp_ok($stats->{'SVP_BEGIN'},'==',7,'Correct number of savepoints created');
+ cmp_ok($stats->{'SVP_ROLLBACK'},'==',5,'Correct number of savepoint rollbacks');
-cmp_ok($stats->{'SVP_RELEASE'},'==',3,'Correct number of savepoints released');
+ $schema->storage->dbh->do ("DROP TABLE artist");
+}}
-cmp_ok($stats->{'SVP_ROLLBACK'},'==',5,'Correct number of savepoint rollbacks');
+done_testing;
-END { $schema->storage->dbh->do ("DROP TABLE artist") if defined $schema }
+END { eval { $schema->storage->dbh->do ("DROP TABLE artist") } if defined $schema }
diff --git a/t/99dbic_sqlt_parser.t b/t/99dbic_sqlt_parser.t
index ce103d1..87e4d68 100644
--- a/t/99dbic_sqlt_parser.t
+++ b/t/99dbic_sqlt_parser.t
@@ -30,7 +30,8 @@ BEGIN {
lives_ok { isa_ok (create_schema ({ schema => 'DBICTest::Schema' }), 'SQL::Translator::Schema', 'SQLT schema object produced') };
-my $schema = DBICTest->init_schema();
+my $schema = DBICTest->init_schema( no_deploy => 1 );
+
# Dummy was yanked out by the sqlt hook test
# CustomSql tests the horrific/deprecated ->name(\$sql) hack
# YearXXXXCDs are views
diff --git a/t/cdbi/22-deflate_order.t b/t/cdbi/22-deflate_order.t
index 1de3f87..cdb8c0d 100644
--- a/t/cdbi/22-deflate_order.t
+++ b/t/cdbi/22-deflate_order.t
@@ -8,11 +8,6 @@ if ($@) {
plan (skip_all => "Time::Piece::MySQL, Class::Trigger and DBIx::ContextualFetch required: $@");
}
-plan skip_all => 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test'
- unless ($ENV{DBICTEST_MYSQL_DSN} && $ENV{DBICTEST_MYSQL_USER});
-
-plan tests => 3;
-
use lib 't/cdbi/testlib';
use_ok ('Log');
@@ -27,3 +22,4 @@ $log->update;
ok eval { $log->datetime_stamp }, "Have datetime after update";
diag $@ if $@;
+done_testing;
diff --git a/t/cdbi/columns_as_hashes.t b/t/cdbi/columns_as_hashes.t
index 6a9b1bc..06fc149 100644
--- a/t/cdbi/columns_as_hashes.t
+++ b/t/cdbi/columns_as_hashes.t
@@ -90,8 +90,11 @@ warning_is {
# Emulate that Class::DBI inflates immediately
SKIP: {
- skip "Need MySQL to run this test", 3 unless eval { require MyFoo };
-
+ unless (eval { require MyFoo }) {
+ my ($err) = $@ =~ /([^\n]+)/;
+ skip $err, 3
+ }
+
my $foo = MyFoo->insert({
name => 'Whatever',
tdate => '1949-02-01',
diff --git a/t/cdbi/testlib/MyBase.pm b/t/cdbi/testlib/MyBase.pm
index 6821a39..7885ed5 100644
--- a/t/cdbi/testlib/MyBase.pm
+++ b/t/cdbi/testlib/MyBase.pm
@@ -8,6 +8,24 @@ use DBI;
our $dbh;
+my $err;
+if (! $ENV{DBICTEST_MYSQL_DSN} ) {
+ $err = 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test';
+}
+elsif ( ! DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mysql') ) {
+ $err = 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mysql')
+}
+
+if ($err) {
+ my $t = eval { Test::Builder->new };
+ if ($t and ! $t->current_test) {
+ $t->skip_all ($err);
+ }
+ else {
+ die "$err\n";
+ }
+}
+
my @connect = (@ENV{map { "DBICTEST_MYSQL_${_}" } qw/DSN USER PASS/}, { PrintError => 0});
$dbh = DBI->connect(@connect) or die DBI->errstr;
my @table;
diff --git a/t/lib/DBICTest/Util/OverrideRequire.pm b/t/lib/DBICTest/Util/OverrideRequire.pm
new file mode 100644
index 0000000..ffae8bf
--- /dev/null
+++ b/t/lib/DBICTest/Util/OverrideRequire.pm
@@ -0,0 +1,134 @@
+package DBICTest::Util::OverrideRequire;
+
+# no use/require of any kind - work bare
+
+BEGIN {
+ # Neat STDERR require call tracer
+ #
+ # 0 - no trace
+ # 1 - just requires and return values
+ # 2 - neat stacktrace (assumes that the supplied $override_cref does *not* (ab)use goto)
+ # 3 - full stacktrace
+ *TRACE = sub () { 0 };
+}
+
+# Takes a single coderef and replaces CORE::GLOBAL::require with it.
+#
+# On subsequent require() calls, the coderef will be invoked with
+# two arguments - ($next_require, $module_name_copy)
+#
+# $next_require is a coderef closing over the module name. It needs
+# to be invoked at some point without arguments for the actual
+# require to take place (this way your coderef in essence becomes an
+# around modifier)
+#
+# $module_name_copy is a string-copy of what $next_require is closing
+# over. The reason for the copy is that you may trigger a side effect
+# on magical values, and subsequently abort the require (e.g.
+# require v.5.8.8 magic)
+#
+# All of this almost verbatim copied from Lexical::SealRequireHints
+# Zefram++
+sub override_global_require (&) {
+ my $override_cref = shift;
+
+ our $next_require = defined(&CORE::GLOBAL::require)
+ ? \&CORE::GLOBAL::require
+ : sub {
+
+ my ($arg) = @_;
+
+ # The shenanigans with $CORE::GLOBAL::{require}
+ # are required because if there's a
+ # &CORE::GLOBAL::require when the eval is
+ # executed then the CORE::require in there is
+ # interpreted as plain require on some Perl
+ # versions, leading to recursion.
+ my $grequire = delete $CORE::GLOBAL::{require};
+
+ my $res = eval sprintf '
+ local $SIG{__DIE__};
+ $CORE::GLOBAL::{require} = $grequire;
+ package %s;
+ CORE::require($arg);
+ ', scalar caller(0); # the caller already had its package replaced
+
+ my $err = $@ if $@ ne '';
+
+ if( TRACE ) {
+ if (TRACE == 1) {
+ printf STDERR "Require of '%s' (returned: '%s')\n",
+ (my $m_copy = $arg),
+ (my $r_copy = $res),
+ ;
+ }
+ else {
+ my ($fr_num, @fr, @tr, $excise);
+ while (@fr = caller($fr_num++)) {
+
+ # Package::Stash::XS is a cock and gets mightily confused if one
+ # uses a regex in the require hook. Even though it happens only
+ # on < 5.8.7 it's still rather embarassing (also wtf does P::S::XS
+ # even need to regex its own module name?!). So we do not use re :)
+ if (TRACE == 3 or (index($fr[1], '(eval ') != 0 and index($fr[1], __FILE__) != 0) ) {
+ push @tr, [@fr]
+ }
+
+ # the caller before this would be the override site - kill it away
+ # if the cref writer uses goto - well tough, tracer won't work
+ if ($fr[3] eq 'DBICTest::Util::OverrideRequire::__ANON__') {
+ $excise ||= $tr[-2]
+ if TRACE == 2;
+ }
+ }
+
+ my @stack =
+ map { "$_->[1], line $_->[2]" }
+ grep { ! $excise or $_->[1] ne $excise->[1] or $_->[2] ne $excise->[2] }
+ @tr
+ ;
+
+ printf STDERR "Require of '%s' (returned: '%s')\n%s\n\n",
+ (my $m_copy = $arg),
+ (my $r_copy = $res||''),
+ join "\n", (map { " $_" } @stack)
+ ;
+ }
+ }
+
+ die $err if defined $err;
+
+ return $res;
+ }
+ ;
+
+ # Need to suppress the redefinition warning, without
+ # invoking warnings.pm.
+ BEGIN { ${^WARNING_BITS} = ""; }
+
+ *CORE::GLOBAL::require = sub {
+ die "wrong number of arguments to require\n"
+ unless @_ == 1;
+
+ # the copy is to prevent accidental overload firing (e.g. require v5.8.8)
+ my ($arg_copy) = our ($arg) = @_;
+
+ return $override_cref->(sub {
+ die "The require delegate takes no arguments\n"
+ if @_;
+
+ my $res = eval sprintf '
+ local $SIG{__DIE__};
+ package %s;
+ $next_require->($arg);
+ ', scalar caller(2); # 2 for the indirection of the $override_cref around
+
+ die $@ if $@ ne '';
+
+ return $res;
+
+ }, $arg_copy);
+ }
+}
+
+1;
diff --git a/t/prefetch/diamond.t b/t/prefetch/diamond.t
index 0de8009..9dbea1a 100644
--- a/t/prefetch/diamond.t
+++ b/t/prefetch/diamond.t
@@ -38,11 +38,21 @@ my $ars = $schema->resultset ('Artwork');
my $cd_paths = {
'no cd' => [],
+ 'no cd empty' => [ '' ],
+ 'no cd undef' => [ undef ],
+ 'no cd href' => [ {} ],
+ 'no cd aoh' => [ [{}] ],
+ 'no cd complex' => [ [ [ undef ] ] ],
'cd' => ['cd'],
'cd->artist1' => [{'cd' => 'artist'}]
};
my $a2a_paths = {
'no a2a' => [],
+ 'no a2a empty ' => [ '' ],
+ 'no a2a undef' => [ undef ],
+ 'no a2a href' => [ {} ],
+ 'no a2a aoh' => [ [{}] ],
+ 'no a2a complex' => [ [ '' ] ],
'a2a' => ['artwork_to_artist'],
'a2a->artist2' => [{'artwork_to_artist' => 'artist'}]
};
diff --git a/t/prefetch/incomplete.t b/t/prefetch/incomplete.t
index a93e693..8682ba7 100644
--- a/t/prefetch/incomplete.t
+++ b/t/prefetch/incomplete.t
@@ -6,8 +6,6 @@ use Test::Exception;
use lib qw(t/lib);
use DBICTest;
-plan tests => 9;
-
my $schema = DBICTest->init_schema();
lives_ok(sub {
@@ -51,3 +49,14 @@ lives_ok(sub {
is ($cd->artist->name, 'Random Boy Band', 'Artist object has correct name');
}, 'implicit keyless prefetch works');
+
+# sane error
+throws_ok(
+ sub {
+ $schema->resultset('Track')->search({}, { join => { cd => 'artist' }, '+columns' => 'artist.name' } )->next;
+ },
+ qr|\QCan't inflate manual prefetch into non-existent relationship 'artist' from 'Track', check the inflation specification (columns/as) ending in 'artist.name'|,
+ 'Sensible error message on mis-specified "as"',
+);
+
+done_testing;
diff --git a/t/search/preserve_original_rs.t b/t/search/preserve_original_rs.t
index 525efd4..8896b48 100644
--- a/t/search/preserve_original_rs.t
+++ b/t/search/preserve_original_rs.t
@@ -9,14 +9,10 @@ use DBICTest;
use DBIC::SqlMakerTest;
use DBIC::DebugObj;
-# use Data::Dumper comparisons to avoid mesing with coderefs
-use Data::Dumper;
-$Data::Dumper::Sortkeys = 1;
+use Storable qw/dclone/;
my $schema = DBICTest->init_schema();
-plan tests => 22;
-
# A search() with prefetch seems to pollute an already joined resultset
# in a way that offsets future joins (adapted from a test case by Debolaz)
{
@@ -24,38 +20,38 @@ plan tests => 22;
# test a real-life case - rs is obtained by an implicit m2m join
$cd_rs = $schema->resultset ('Producer')->first->cds;
- $attrs = Dumper $cd_rs->{attrs};
+ $attrs = dclone( $cd_rs->{attrs} );
$cd_rs->search ({})->all;
- is (Dumper ($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after a simple search');
+ is_deeply (dclone($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after a simple search');
lives_ok (sub {
$cd_rs->search ({'artist.artistid' => 1}, { prefetch => 'artist' })->all;
- is (Dumper ($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after search with prefetch');
+ is_deeply (dclone($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after search with prefetch');
}, 'first prefetching search ok');
lives_ok (sub {
$cd_rs->search ({'artist.artistid' => 1}, { prefetch => 'artist' })->all;
- is (Dumper ($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after another search with prefetch')
+ is_deeply (dclone($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after another search with prefetch')
}, 'second prefetching search ok');
# test a regular rs with an empty seen_join injected - it should still work!
$cd_rs = $schema->resultset ('CD');
$cd_rs->{attrs}{seen_join} = {};
- $attrs = Dumper $cd_rs->{attrs};
+ $attrs = dclone( $cd_rs->{attrs} );
$cd_rs->search ({})->all;
- is (Dumper ($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after a simple search');
+ is_deeply (dclone($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after a simple search');
lives_ok (sub {
$cd_rs->search ({'artist.artistid' => 1}, { prefetch => 'artist' })->all;
- is (Dumper ($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after search with prefetch');
+ is_deeply (dclone($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after search with prefetch');
}, 'first prefetching search ok');
lives_ok (sub {
$cd_rs->search ({'artist.artistid' => 1}, { prefetch => 'artist' })->all;
- is (Dumper ($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after another search with prefetch')
+ is_deeply (dclone($cd_rs->{attrs}), $attrs, 'Resultset attributes preserved after another search with prefetch')
}, 'second prefetching search ok');
}
@@ -89,3 +85,5 @@ for my $s (qw/a2a artw cd artw_back/) {
is_same_sql_bind ($rs->as_query, $q{$s}{query}, "$s resultset unmodified (as_query matches)" );
}
+
+done_testing;
diff --git a/t/storage/debug.t b/t/storage/debug.t
index ab70122..632f370 100644
--- a/t/storage/debug.t
+++ b/t/storage/debug.t
@@ -10,7 +10,7 @@ use DBIC::DebugObj;
use DBIC::SqlMakerTest;
use Path::Class qw/file/;
-BEGIN { delete @ENV{qw(DBIC_TRACE_PROFILE)} }
+BEGIN { delete @ENV{qw(DBIC_TRACE DBIC_TRACE_PROFILE DBICTEST_SQLITE_USE_FILE)} }
my $schema = DBICTest->init_schema();
diff --git a/t/storage/error.t b/t/storage/error.t
index b72b0fe..44cc1c9 100644
--- a/t/storage/error.t
+++ b/t/storage/error.t
@@ -16,7 +16,7 @@ warnings_are ( sub {
sub {
$schema->resultset('CD')->create({ title => 'vacation in antarctica' })
},
- qr/DBI Exception.+constraint failed.+cd\.artist.+NULL/s
+ qr/DBI Exception.+cd\.artist.+NULL/s
); # as opposed to some other error
}, [], 'No warnings besides exception' );
diff --git a/t/storage/nobindvars.t b/t/storage/nobindvars.t
index a2e0cba..d2dd840 100644
--- a/t/storage/nobindvars.t
+++ b/t/storage/nobindvars.t
@@ -4,39 +4,20 @@ use warnings;
use Test::More;
use lib qw(t/lib);
use DBICTest;
+use DBIC::DebugObj;
+use DBIC::SqlMakerTest;
use DBI::Const::GetInfoType;
-my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MYSQL_${_}" } qw/DSN USER PASS/};
-
-#warn "$dsn $user $pass";
-
-plan skip_all => 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test'
- unless ($dsn && $user);
-
-{ # Fake storage driver for mysql + no bind variables
- package DBIx::Class::Storage::DBI::MySQLNoBindVars;
+{ # Fake storage driver for SQLite + no bind variables
+ package DBICTest::SQLite::NoBindVars;
use Class::C3;
use base qw/
DBIx::Class::Storage::DBI::NoBindVars
- DBIx::Class::Storage::DBI::mysql
+ DBIx::Class::Storage::DBI::SQLite
/;
- $INC{'DBIx/Class/Storage/DBI/MySQLNoBindVars.pm'} = 1;
}
-# XXX Class::C3 doesn't like some of the Storage stuff happening late...
-Class::C3::reinitialize();
-
-my $schema = DBICTest::Schema->clone;
-$schema->storage_type('::DBI::MySQLNoBindVars');
-$schema->connection($dsn, $user, $pass);
-
-my $dbh = $schema->storage->dbh;
-
-$dbh->do("DROP TABLE IF EXISTS artist;");
-
-$dbh->do("CREATE TABLE artist (artistid INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, name VARCHAR(100), rank INTEGER NOT NULL DEFAULT '13', charfield CHAR(10));");
-
-$schema->class('Artist')->load_components('PK::Auto');
+my $schema = DBICTest->init_schema (storage_type => 'DBICTest::SQLite::NoBindVars', no_populate => 1);
# test primary key handling
my $new = $schema->resultset('Artist')->create({ name => 'foo' });
@@ -51,16 +32,29 @@ my $it = $schema->resultset('Artist')->search( {},
offset => 2,
order_by => 'artistid' }
);
+
is( $it->count, 3, "LIMIT count ok" ); # ask for 3 rows out of 7 artists
+
+my ($sql, @bind);
+my $orig_debugobj = $schema->storage->debugobj;
+my $orig_debug = $schema->storage->debug;
+$schema->storage->debugobj (DBIC::DebugObj->new (\$sql, \@bind) );
+$schema->storage->debug (1);
+
is( $it->next->name, "Artist 2", "iterator->next ok" );
$it->next;
$it->next;
is( $it->next, undef, "next past end of resultset ok" );
-# clean up our mess
-END {
- my $dbh = eval { $schema->storage->_dbh };
- $dbh->do("DROP TABLE artist") if $dbh;
-}
+$schema->storage->debugobj ($orig_debugobj);
+$schema->storage->debug ($orig_debug);
+
+is_same_sql_bind (
+ $sql,
+ \@bind,
+ 'SELECT me.artistid, me.name, me.rank, me.charfield FROM artist me ORDER BY artistid LIMIT 3 OFFSET 2',
+ [],
+ 'Correctly interpolated SQL'
+);
done_testing;
diff --git a/t/storage/quote_names.t b/t/storage/quote_names.t
index 4d768cf..6bb8bfd 100644
--- a/t/storage/quote_names.t
+++ b/t/storage/quote_names.t
@@ -71,21 +71,11 @@ for my $class (keys %expected) { SKIP: {
# Try quote_names with available DBs.
-# SQLite first.
-
-my $schema = DBICTest->init_schema(quote_names => 1);
-
-is $schema->storage->sql_maker->quote_char, '"',
- q{quote_names => 1 sets correct quote_char for SQLite ('"')};
-
-is $schema->storage->sql_maker->name_sep, '.',
- q{quote_names => 1 sets correct name_sep for SQLite (".")};
-
-# Now the others.
-
# Env var to base class mapping, these are the DBs I actually have.
-# -- Caelum
+# the SQLITE is a fake memory dsn
+local $ENV{DBICTEST_SQLITE_DSN} = 'dbi:SQLite::memory:';
my %dbs = (
+ SQLITE => 'DBIx::Class::Storage::DBI::SQLite',
ORA => 'DBIx::Class::Storage::DBI::Oracle::Generic',
PG => 'DBIx::Class::Storage::DBI::Pg',
MYSQL => 'DBIx::Class::Storage::DBI::mysql',
@@ -99,7 +89,13 @@ my %dbs = (
MSSQL_ODBC => 'DBIx::Class::Storage::DBI::MSSQL',
);
-while (my ($db, $base_class) = each %dbs) {
+# Make sure oracle is tried last - some clients (e.g. 10.2) have symbol
+# clashes with libssl, and will segfault everything coming after them
+for my $db (sort {
+ $a eq 'ORA' ? 1
+ : $b eq 'ORA' ? -1
+ : $a cmp $b
+} keys %dbs) {
my ($dsn, $user, $pass) = map $ENV{"DBICTEST_${db}_$_"}, qw/DSN USER PASS/;
next unless $dsn;
@@ -114,28 +110,30 @@ while (my ($db, $base_class) = each %dbs) {
1;
} || next;
- my $expected_quote_char = $expected{$base_class}{quote_char};
- my $quote_char_text = dumper($expected_quote_char);
+ my ($exp_quote_char, $exp_name_sep) =
+ @{$expected{$dbs{$db}}}{qw/quote_char name_sep/};
+
+ my ($quote_char_text, $name_sep_text) = map { dumper($_) }
+ ($exp_quote_char, $exp_name_sep);
is_deeply $schema->storage->sql_maker->quote_char,
- $expected_quote_char,
+ $exp_quote_char,
"$db quote_char with quote_names => 1 is $quote_char_text";
- my $expected_name_sep = $expected{$base_class}{name_sep};
is $schema->storage->sql_maker->name_sep,
- $expected_name_sep,
- "$db name_sep with quote_names => 1 is '$expected_name_sep'";
+ $exp_name_sep,
+ "$db name_sep with quote_names => 1 is $name_sep_text";
}
done_testing;
sub dumper {
- my $val = shift;
+ my $val = shift;
- my $dd = DumperObject;
- $dd->Indent(0);
- return $dd->Values([ $val ])->Dump;
+ my $dd = DumperObject;
+ $dd->Indent(0);
+ return $dd->Values([ $val ])->Dump;
}
1;
diff --git a/t/storage/txn_scope_guard.t b/t/storage/txn_scope_guard.t
index 03985b3..739ed6c 100644
--- a/t/storage/txn_scope_guard.t
+++ b/t/storage/txn_scope_guard.t
@@ -116,7 +116,9 @@ use DBICTest;
}
# make sure it warns *big* on failed rollbacks
-{
+# test with and without a poisoned $@
+for my $poison (0,1) {
+
my $schema = DBICTest->init_schema();
no strict 'refs';
@@ -160,11 +162,12 @@ use DBICTest;
}
};
{
+ eval { die 'GIFT!' if $poison };
my $guard = $schema->txn_scope_guard;
$schema->resultset ('Artist')->create ({ name => 'bohhoo'});
}
- is (@w, 2, 'Both expected warnings found');
+ is (@w, 2, 'Both expected warnings found' . ($poison ? ' (after $@ poisoning)' : '') );
# just to mask off warning since we could not disconnect above
$schema->storage->_dbh->disconnect;
diff --git a/xt/podcoverage.t b/xt/podcoverage.t
index be4bbbb..f67a2f0 100644
--- a/xt/podcoverage.t
+++ b/xt/podcoverage.t
@@ -149,6 +149,9 @@ my $exceptions = {
# skipped because the synopsis covers it clearly
'DBIx::Class::InflateColumn::File' => { skip => 1 },
+
+# internal subclass, nothing to POD
+ 'DBIx::Class::ResultSet::Pager' => { skip => 1 },
};
my $ex_lookup = {};
--
Debian packaging of libdbix-class-perl
More information about the Pkg-perl-cvs-commits
mailing list