MySQL Lists are EOL. Please join:

List:Commits« Previous MessageNext Message »
From:Serge Kozlov Date:August 17 2010 8:35pm
Subject:bzr push into nuts branch (Serge.Kozlov:382 to 383)
View as plain text  
  383 Serge Kozlov	2010-08-18
      Improvements for DataSource:
      1. DataSource files moved to My::Nuts::Library::DataSource
      2. Renamed nuts.pl options:
        --data-source -> --ds
        --data-source-options -> --ds-options
      3. New nuts.pl options:
        --ds-load-dump - load queries from file
        --ds-save-dump - store queries into file
      4. Updated test case with DataSource
      5. New default DataSource: SimpleRandom (instead Simple).

    removed:
      lib/DataSource/Simple.pm
    added:
      lib/My/Nuts/Library/DataSource/DataTypes/
      lib/My/Nuts/Library/DataSource/DataTypes/MySQL.pm
      lib/My/Nuts/Library/DataSource/SimpleRandom.pm
    renamed:
      lib/DataSource/ => lib/My/Nuts/Library/DataSource/
      lib/DataSource.pm => lib/My/Nuts/Library/DataSource.pm
    modified:
      bin/Driver.pm
      bin/nuts.pl
      lib/My/Nuts/Library/DataSource/Instance/DataSource.pm
      lib/My/Nuts/Library/DataSource/RQG.pm
      lib/My/Nuts/Library/DataSource/Sample.pm
      suites/rep/chained_diff_engine.pm
      suites/rep/client_redirect/client_redirect.pm
      suites/rep/hot_standby.pm
      suites/rep/semisync.pm
      suites/rep/stress_mixed/stress_mixed.pm
      suites/samples/data_source.pm
      suites/updown/rep_basic_downgrade.pm
      suites/updown/rep_basic_upgrade.pm
      lib/My/Nuts/Library/DataSource.pm
  382 Serge Kozlov	2010-07-30
      The patch fixes issue when Nuts uses binary files from .libs directories.

    modified:
      lib/My/Nuts/util/mysql.pm
=== modified file 'bin/Driver.pm'
--- a/bin/Driver.pm	2010-03-31 20:40:41 +0000
+++ b/bin/Driver.pm	2010-08-17 20:30:52 +0000
@@ -86,8 +86,10 @@ sub opt_configure
                        "version!"         => \$Parameters::version,
                        "env-setup-file=s"   => \$Parameters::env_setup_file,
                        "env-cleanup-file=s" => \$Parameters::env_cleanup_file,
-                       "data-source=s" => \$Parameters::data_source,
-                       "data-source-option=s" => \@Parameters::data_source_options,
+                       "ds=s" 			=> \$Parameters::data_source,
+                       "ds-save-dump=s" 	=> \$Parameters::data_source_save_dump,
+                       "ds-load-dump=s" 	=> \$Parameters::data_source_load_dump,
+                       "ds-options=s" 		=> \@Parameters::data_source_options,
                        "upgrade-server-order=s" => \$upgrade_server_order
                        
           ) or pod2usage (2) && exit 1;
@@ -172,7 +174,7 @@ sub opt_configure
     }
     if (! defined($Parameters::data_source))
     {
-	$Parameters::data_source = 'Simple';
+	$Parameters::data_source = 'SimpleRandom';
     }
     if (defined($Parameters::formatter))
     {

=== modified file 'bin/nuts.pl'
--- a/bin/nuts.pl	2010-03-31 20:40:41 +0000
+++ b/bin/nuts.pl	2010-08-17 20:30:52 +0000
@@ -135,11 +135,31 @@ nuts.pl - script that starts the Nuts te
                     test case, then build allocation would restart
                     from start of buildorder, meaning that fourth
                     server would get b1.
+                    
   --upgrade-server-order
 		    Must be used together with multiple builds. This allow user
 		    to set the number of build (number from command line like 
 		    --buildorder) that will be used in test case for command
 		    updown_server().
+		    
+  --ds=<file>	    Name of the library that should be used for test cases with 
+		    included external source of queries:
+		    use My::Nuts::Library::DataSource
+		    Nuts seeks <file> in <NUTS_HOME>/lib/My/Nuts/Library/DataSource,
+		    load it as class and try to get list of queries via subroutine
+		    get_data_from_source();		    
+		    
+  --ds-save-dump=<file>
+		    Store queries generated by DataSource library into <file>.
+		
+  --ds-load-dump=<file>
+		    Load queries from <file>.
+		
+  --ds-options=<parameters>
+		    Specify parameters for DataSource library. Each DataSource
+		    library can have own <parameters>.
+		
+		
 
 . To run the self tests using an azalea building in the directory mysql-azalea:
 

=== removed file 'lib/DataSource/Simple.pm'
--- a/lib/DataSource/Simple.pm	2009-09-25 11:24:12 +0000
+++ b/lib/DataSource/Simple.pm	1970-01-01 00:00:00 +0000
@@ -1,32 +0,0 @@
-package DataSource::Simple;
-use Exporter;
-our @ISA = qw(Exporter DataSource::Instance::DataSource);
-our @IMPORT = qw(get_data_from_source);
-use Cwd;
-use strict;
-use warnings;
-use Class::Std::Utils;
-use DataSource::Instance::DataSource;
-
-my @words= (
-    'Beijing', 'Berlin', 'Buenos Aires', 'Cairo', 'Delphi', 
-    'Jakarta', 'Lisbon', 'London', 'Manila', 'Moscow', 
-    'Mexico', 'New York', 'Oslo', 'Paris', 'Prague', 
-    'Rio de Janeiro', 'Rome', 'Seoul', 'Stockholm', 'Tokyo');
-
-sub get_data_from_source
-{
-  my ( $self ) = @_;
-  my $ident = ident ($self);
-  my $queries = [];
-  my $w_size = scalar(@words);
-  push(@{$queries}, "CREATE TABLE t1 (a INT NOT NULL PRIMARY KEY, b VARCHAR(10) NOT NULL, c CHAR(200) NULL);"); 
-  for (my $i= 1; $i <= $w_size; $i++)
-  {
-    push(@{$queries}, "INSERT INTO t1 (a, b) VALUES ($i, '$words[$i-1]');");
-    push(@{$queries}, "INSERT INTO t1 (a, b) VALUES (" . (100*$i) . ", '$words[$i-1]');");
-    push(@{$queries}, "UPDATE t1 SET c='$words[$w_size-$i]'");
-  }
-  push(@{$queries}, "DELETE FROM t1 WHERE a > 100;");
-  return $queries;
-}

=== renamed directory 'lib/DataSource' => 'lib/My/Nuts/Library/DataSource'
=== renamed file 'lib/DataSource.pm' => 'lib/My/Nuts/Library/DataSource.pm'
--- a/lib/DataSource.pm	2009-09-17 09:26:13 +0000
+++ b/lib/My/Nuts/Library/DataSource.pm	2010-08-17 20:30:52 +0000
@@ -1,22 +1,53 @@
-package DataSource;
+package My::Nuts::Library::DataSource;
 use Exporter;
 our @ISA = qw(Exporter);
 our @EXPORT = qw(get_data_from_source);
 use Cwd;
+use IO::File;
+use My;
 use strict;
 use warnings;
 
+my $queries= [];
+
 sub get_data_from_source
 {
-  my $class= 'DataSource::' . $Parameters::data_source;
-  eval "require $class";
-  if ($@)
-  {
-    print STDERR $@;    
-  }
-  my $data_source= $class->new();
-  my $data= $data_source->get_data_from_source();
-  return $data;
+    my $dumpfile;
+    # Load queries from a dump file
+    if (defined $Parameters::data_source_load_dump)
+    {
+	$queries= [];
+	$dumpfile= new IO::File($Parameters::data_source_load_dump, "r")
+	    or die("Cannot open dump file " . $Parameters::data_source_load_dump);
+	my @lines= $dumpfile->getlines();
+	$dumpfile= undef;
+	foreach (@lines)
+	{
+	    s/\n//g;
+	    push(@$queries, $_);
+	}
+    }
+    elsif (scalar(@$queries) == 0)
+    {
+	# Generate queries only if array is empty (just re-use queries from a recent processed test case)
+	my $class= 'My::Nuts::Library::DataSource::' . $Parameters::data_source;
+	eval "require $class";
+	if ($@)
+	{
+	    print STDERR $@;    
+	}
+	my $data_source= $class->new();
+	$queries= $data_source->get_data_from_source();
+    }
+    # Store generated queries to a dump file
+    if (defined $Parameters::data_source_save_dump)
+    {
+	$dumpfile= new IO::File($Parameters::data_source_save_dump, "w") 
+	    or die("Cannot open dump file " . $Parameters::data_source_save_dump);
+	print $dumpfile join("\n", @$queries);
+	$dumpfile= undef;
+    }
+    return $queries;
 }
 
 1;

=== added directory 'lib/My/Nuts/Library/DataSource/DataTypes'
=== added file 'lib/My/Nuts/Library/DataSource/DataTypes/MySQL.pm'
--- a/lib/My/Nuts/Library/DataSource/DataTypes/MySQL.pm	1970-01-01 00:00:00 +0000
+++ b/lib/My/Nuts/Library/DataSource/DataTypes/MySQL.pm	2010-08-17 20:30:52 +0000
@@ -0,0 +1,507 @@
+package My::Nuts::Library::DataSource::DataTypes::MySQL;
+use Exporter;
+our @ISA = qw(Exporter);
+our @EXPORT = qw(get_data_types get_data_type_predefined_values get_data_type_random_value get_data_type_unique_random_values);
+use strict;
+use warnings;
+use Class::Std::Utils;
+
+my $data_types = {
+    "BIT(64)" => {
+	"group" => "numeric",
+	"ranges" => [["0b0000000000000000000000000000000000000000000000000000000000000000", "0b1111111111111111111111111111111111111111111111111111111111111111"]],
+    },
+    "TINYINT SIGNED" => {
+	"group" => "numeric",
+	"ranges" => [[-128, 127]],
+	"values" => [0]
+    },
+    "TINYINT UNSIGNED" => {
+	"group" => "numeric",
+	"ranges" => [[0, 255]]
+    },
+    "SMALLINT SIGNED" => {
+	"group" => "numeric",
+	"ranges" => [[ -32768, 32767]],
+	"values" => [0]
+    },
+    "SMALLINT UNSIGNED" => {
+	"group" => "numeric",
+	"ranges" => [[0, 65535]]
+    },
+    "MEDIUMINT SIGNED" => {
+	"group" => "numeric",
+	"ranges" => [[-8388608, 8388607]],
+	"values" => [0]
+    },
+    "MEDIUMINT UNSIGNED" => {
+	"group" => "numeric",
+	"ranges" => [[0, 16777215]]
+    },
+    "INT SIGNED" => {
+	"group" => "numeric",
+	"ranges" => [[-2147483648, 2147483647]],
+	"values" => [0]
+    },
+    "INT UNSIGNED" => {
+	"group" => "numeric",
+	"ranges" => [[0, 4294967295]]
+    },
+    "BIGINT SIGNED" => {
+	"group" => "numeric",
+	"ranges" => [[-9223372036854775808, 9223372036854775807]],
+	"values" => [0]
+    },
+    "BIGINT UNSIGNED" => {
+	"group" => "numeric",
+	"ranges" => [[0, 18446744073709551615]]
+    },
+    "FLOAT SIGNED" => {
+	"group" => "numeric",
+	"ranges" => [["-3.402823466E+38", "-1.175494351E-38"], 0, ["1.175494351E-38", "3.402823466E+38"]]
+    },
+    "FLOAT UNSIGNED" => {
+	"group" => "numeric",
+	"ranges" => [0, ["1.175494351E-38","3.402823466E+38"]]
+    },
+    "DOUBLE SIGNED" => {
+	"group" => "numeric",
+	"ranges" => [["-1.7976931348623157E+308", "-2.2250738585072014E-308"], 0, ["2.2250738585072014E-308", "1.7976931348623157e+308"]]
+    },
+    "DOUBLE UNSIGNED" => {
+	"group" => "numeric",
+	"ranges" => [0, ["2.2250738585072014E-308", "1.7976931348623157e+308"]]
+    },
+    "DATETIME" => {
+	"group" => "datetime",
+	"ranges" => [["'1000-01-01 00:00:00'", "'9999-12-31 23:59:59'"]]
+    },
+    "DATE" => {
+	"group" => "datetime",
+	"ranges" => [["'1000-01-01'", "'9999-12-31'"]]
+    },
+    "TIME" => {
+	"group" => "datetime",
+	"ranges" => [["'-838:59:59'", "'838:59:59'"]]
+    },
+    "TIMESTAMP" => {
+	"group" => "datetime",
+	"ranges" => [["'1970-01-01 00:00:01'", "'2038-01-09 03:14:07'"]]
+    },
+    "YEAR(2)" => {
+	"group" => "datetime",
+	"ranges" => [["'00'", "'99'"]],
+	"values" => ["'69'", "'70'"]
+    },
+    "YEAR(4)" => {
+	"group" => "datetime",
+	"ranges" => [["'1901'", "'2155'"]]
+    },
+    "CHAR(254)" => {
+	"group" => "string",
+	"ranges" => []
+    },
+    "VARCHAR(254)" => {
+	"group" => "string",
+	"ranges" => []
+    },
+    "BINARY(254)" => {
+	"group" => "binary",
+	"ranges" => []
+    },
+    "VARBINARY(254)" => {
+	"group" => "binary",
+	"ranges" => []
+    },
+    "TINYBLOB" => {
+	"group" => "binary",
+	"ranges" => []
+    },
+    "BLOB" => {
+	"group" => "binary",
+	"ranges" => []
+    },
+    "MEDIUMBLOB" => {
+	"group" => "binary",
+	"ranges" => []
+    },
+    "LONGBLOB" => {
+	"group" => "string",
+	"ranges" => []
+    },
+    "TINYTEXT" => {
+	"group" => "string",
+	"ranges" => []
+    },
+    "TEXT" => {
+	"group" => "string",
+	"ranges" => []
+    },
+    "MEDIUMTEXT" => {
+	"group" => "string",
+	"ranges" => []
+    },
+    "LONGTEXT" => {
+	"group" => "string",
+	"ranges" => []
+    }
+};
+
+sub get_data_type_predefined_values
+{
+    my $type = shift;
+    my $values = [];
+    foreach my $range (@{$data_types->{$type}->{"ranges"}})
+    {
+	if (ref($range) eq "ARRAY")
+	{
+	    push(@$values, @$range);
+	}
+	else
+	{
+	    push(@$values, $range);
+	}
+    }    
+    foreach my $value (@{$data_types->{$type}->{"values"}})
+    {
+	push(@$values, $value);
+    }
+    return $values;
+}
+
+sub get_data_type_unique_random_values
+{
+    my ($type, $num) = @_;
+    my $used = {};
+    my $values = [];
+    my $value;
+    my $range_num = 0;
+    $range_num = scalar($data_types->{$type}->{"ranges"}) if (defined scalar($data_types->{$type}->{"ranges"}));
+    my $val_num = 0;
+    $val_num = scalar($data_types->{$type}->{"values"}) if (defined scalar($data_types->{$type}->{"values"}));
+    if ($range_num > 0 || $val_num >= $num)
+    {
+	while (scalar(@$values) < $num)
+	{
+	    $value = get_data_type_random_value($type);
+	    if (!defined($used->{$value}))
+	    {
+		push(@$values, $value);  
+		$used->{$value} = 1;
+	    }
+	}
+    }
+    return $values;
+}
+
+sub get_data_type_random_value
+{
+    my $type = shift;
+    my @pre_values = ();
+    if ( defined($data_types->{$type}) )
+    {
+	foreach my $range (@{$data_types->{$type}->{"ranges"}})
+	{
+	    if (ref($range) eq "ARRAY")
+	    {
+		push(@pre_values, get_random_value_from_range($type, $range));
+	    }
+	    else
+	    {
+		push(@pre_values, $range);
+	    }
+	}
+    }    
+    foreach my $value (@{$data_types->{$type}->{"values"}})
+    {
+	push(@pre_values, $value);
+    }
+    if ($type =~ m/(CHAR|BLOB|TEXT|BINARY)/i)
+    {
+	push(@pre_values, get_random_value_from_range($type, undef));
+    }
+    elsif ($type =~ m/^DECIMAL\(([0-9]+)\,([0-9]+)\)/i)
+    {
+	push(@pre_values, get_random_value_from_range($type, [$1, $2]));
+    }
+    return $pre_values[int(rand(scalar(@pre_values)))];
+}
+
+sub get_random_value_from_range
+{
+    my ($type, $range)= @_;
+    my $limit;
+    my $min;
+    my $max;
+    my $value= $range->[0];
+    if ($type =~ m/^(tinyint|smallint|mediumint|int|bigint).+/i)
+    {	
+	$limit= $range->[1];
+	if ($range->[0] =~ m/^\-(.+)$/)
+	{
+	    $limit= $limit + $1;
+	}
+	$value= int(rand($limit));
+	if ($range->[0] =~ m/^\-(.+)$/)
+	{
+	    $value= $value - $1;
+	}
+    }
+    elsif ($type =~ m/^bit.*/i)
+    {
+	if (length($range->[0]) == length($range->[1]) && $range->[0] =~ m/^0b/ && $range->[1] =~ m/^0b(.+)$/)
+	{
+	    $value= "0b";
+	    foreach my $idx (2..(length($range->[0])-1))
+	    {
+		if (substr($range->[0], $idx, 1) eq substr($range->[1], $idx, 1))
+		{
+		    $value .= $range->[0];
+		}
+		elsif (rand(2) >= 1) 
+		{
+		    $value .= "1";
+		}
+		else
+		{
+		    $value .= "0";
+		}
+	    }
+	}
+    }
+    elsif ($type =~ m/(char|text|blob|binary)/i)
+    {
+	my $limit = 50;
+	if ($type =~ m/(char|binary)\(([0-9]+)\)$/i)
+	{
+	    $limit = $2;
+	}
+	elsif ($type =~ m/(tiny)/i)
+	{
+	    $limit = 254;
+	}
+	$limit= int(rand($limit - 1)) + 1;
+	my @values = ();
+	for (1..$limit)
+	{
+	    my $cur_value= int(rand(96)) + 32;
+	    if ($cur_value == 34 || $cur_value == 39 || $cur_value == 44 || $cur_value == 92 || $cur_value == 96)
+	    {
+		$cur_value= 32;
+	    }	    
+	    push(@values, chr($cur_value));
+	}
+	$value= "'" . join("", @values) . "'";
+	if ($type =~ m/(blob|text)/i)
+	{
+	    $limit = 65433;
+	    if ($type =~ m/medium/i)
+	    {
+		$limit = 16777112;
+	    }
+	    elsif ($type =~ m/long/i)
+	    {
+		$limit = 4294967191;
+	    }	    
+	    $limit= 65433;
+	    $limit= int(rand($limit - 1)) + 1;
+	    my $char_list= " 01234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
+	    $value= substr($char_list, int(rand(length($char_list))), 1);
+	    $value = "REPEAT('" . $value . "', $limit)";
+	}
+    }
+    elsif ($type =~ m/(binary|blob)/i)
+    {
+	$min = $range->[0];
+	$max = $range->[1];
+	$value = "";
+	$min =~ s/(\'|\")//g;
+	$max =~ s/(\'|\")//g;
+	my $limit = 10;
+	for (1..$limit)
+	{
+	    $value .= chr(92) . (int(rand($max-$min))+$min);
+	}
+	$value = "'" . $value . "'";
+    }
+    elsif ($type =~ m/^(date|datetime|timestamp) *$/i)
+    {
+	$value = 
+	"-" . sprintf("%02d", int(rand(12))+1)
+	. "-" . sprintf("%02d", int(rand(28))+1)
+	. " " . sprintf("%02d", int(rand(24)))
+	. ":" . sprintf("%02d", int(rand(60)))
+	. ":" . sprintf("%02d", int(rand(60)));
+	if (($range->[0] . $range->[1]) =~ m/^\'([0-9]{4})\-.+\'([0-9]{4})\-/i)
+	{
+	    $min= $1;
+	    $max= $2-1;
+	    $value= sprintf("%04d", int(rand($max-$min))+$min) . $value;
+	}
+	if ($type =~ m/^date$/i)
+	{
+	    $value= substr($value, 0, 10);
+	}
+	$value= "'" . $value . "'";		
+    }
+    elsif ($type =~ m/^time$/i)
+    {
+	$value = 
+	":" . sprintf("%02d", int(rand(60)))
+	. ":" . sprintf("%02d", int(rand(60)));
+	if (($range->[0] . $range->[1]) =~ m/^\'\-([0-9]+).+\'([0-9]+)/i)
+	{
+	    $min= $1;
+	    $max= $2-1;
+	    $value= (int(rand($max+$min))-$min) . $value;
+	}
+	$value= "'" . $value . "'";		
+    }
+    elsif ($type =~ m/^(float|double).*/i)
+    {
+	my $minus= "";
+	if ($range->[0] =~ /^[0-9]+/ && $range->[1] =~ /^[0-9]+/)
+	{
+	    if ($range->[0] =~ m/e\-(.+)$/i)
+	    {
+		$min= $1 + 2;
+	    }
+	    if ($range->[1] =~ m/e\+(.+)$/i)
+	    {
+		$max= $1 - 2;
+	    }
+	    $value = int(rand($min+$max)) - $min;
+	    if ($value >= 0)
+	    {
+		$value= "+" . $value;
+	    }
+	}
+	elsif ($range->[0] =~ /^\-/ && $range->[1] =~ /^\-/)
+	{
+	    if ($range->[0] =~ m/e\+(.+)$/i)
+	    {
+		$max= $1 - 2;
+	    }
+	    if ($range->[1] =~ m/e\-(.+)$/i)
+	    {
+		$min= $1 + 2;
+	    }
+	    $value = int(rand($min+$max)) - $min;
+	    if ($value >= 0)
+	    {
+		$value= "+" . $value;
+	    }
+	    $minus= "-";
+	}
+	my $r_value= rand();
+	if ($r_value < 0.01)
+	{
+	    $r_value= 100*$r_value;
+	}
+	elsif ($r_value < 0.1)
+	{
+	    $r_value= 10*$r_value;
+	}
+	$value = $minus . sprintf("%1.2f", $r_value) . "E" . $value;	
+    }    
+    elsif ($type =~ m/^decimal.+/i)
+    {
+	$value = "";
+	for (my $i = $range->[0]; $i > 0; $i--)
+	{
+	    $value .= "." if ( $range->[1] == $i);
+	    if ( $i == $range->[0] || $i == 1)
+	    {
+		$value .= int(rand(10));	    
+	    }
+	    else
+	    {
+		$value .= int(rand(9)) + 1;	    
+	    }
+	} 
+    }
+    elsif ($type =~ m/^year.*/i)
+    {
+	$min= $range->[0];
+	$max= $range->[1];
+	$min =~ s/\'//g;
+	$max =~ s/\'//g;
+	$limit= $max - $min;
+	$value= int(rand($limit)) + $min;
+	if ($value < 10)
+	{
+	    $value= "0" . $value;
+	}
+	$value= "'" . $value . "'";	
+    }
+    return $value;
+}
+
+sub get_data_types
+{
+    return $data_types;
+}
+
+1;
+__END__
+
+=over
+
+=back
+
+=head1 My::Nuts::Library::Kernel::ServerDataTypes
+
+=over
+
+=back
+
+=head2 SYNOPSYS
+
+The interface is used to access to ServerDataTypes object that provides data types list and values for them.
+
+=over
+
+=back
+
+=head2 METHODS
+
+=over
+
+=back
+
+=head3 get_data_types()
+
+Description: return the reference to hash with data types.
+
+=over
+
+=back
+
+=head3 get_data_type_random_value(type, range);
+
+Description: get a random value for given data type for selected range
+
+Parameters: 
+
+type - data type, must be a key from get_data_types()
+
+range - reference to range for given data type
+
+=over
+
+=back
+
+=head3 get_data_type_unique_random_values(type, number);
+
+Description: get unique random values for given data type
+
+Parameters: 
+
+type - data type, must be a key from get_data_types()
+
+number - number of values 
+
+=over
+
+=back
+

=== modified file 'lib/My/Nuts/Library/DataSource/Instance/DataSource.pm'
--- a/lib/DataSource/Instance/DataSource.pm	2009-09-10 20:03:21 +0000
+++ b/lib/My/Nuts/Library/DataSource/Instance/DataSource.pm	2010-08-17 20:30:52 +0000
@@ -1,7 +1,8 @@
-package DataSource::Instance::DataSource;
+package My::Nuts::Library::DataSource::Instance::DataSource;
 use Exporter;
 our @ISA = qw(Exporter);
 use Cwd;
+use My;
 use strict;
 use warnings;
 use Class::Std::Utils;

=== modified file 'lib/My/Nuts/Library/DataSource/RQG.pm'
--- a/lib/DataSource/RQG.pm	2009-09-10 20:03:21 +0000
+++ b/lib/My/Nuts/Library/DataSource/RQG.pm	2010-08-17 20:30:52 +0000
@@ -1,18 +1,19 @@
-package DataSource::RQG;
+package My::Nuts::Library::DataSource::RQG;
 use Exporter;
-our @ISA = qw(Exporter DataSource::Instance::DataSource);
+our @ISA = qw(Exporter My::Nuts::Library::DataSource::Instance::DataSource);
 our @EXPORT = qw(get_data_from_source);
 use Cwd;
+use My;
 use strict;
 use warnings;
-use DataSource::Instance::DataSource;
+use My::Nuts::Library::DataSource::Instance::DataSource;
 
 sub get_data_from_source
 {
   my $queries = [];
   my $gentest_args= join(' ', @Parameters::data_source_options);
   my $cur_dir= cwd();
-  chdir('lib/DataSource/RQG');
+  chdir('lib/My/Nuts/Library/DataSource/RQG');
   my $output= `./gentest.pl $gentest_args`;
   $output=~ s/\#.*?\n//gim;
   $output=~ s/\n//gim;

=== modified file 'lib/My/Nuts/Library/DataSource/Sample.pm'
--- a/lib/DataSource/Sample.pm	2009-09-10 20:03:21 +0000
+++ b/lib/My/Nuts/Library/DataSource/Sample.pm	2010-08-17 20:30:52 +0000
@@ -1,12 +1,13 @@
-package DataSource::Sample;
+package My::Nuts::Library::DataSource::Sample;
 use Exporter;
-our @ISA = qw(Exporter DataSource::Instance::DataSource);
+our @ISA = qw(Exporter My::Nuts::Library::DataSource::Instance::DataSource);
 our @IMPORT = qw(get_data_from_source);
 use Cwd;
+use My;
 use strict;
 use warnings;
 use Class::Std::Utils;
-use DataSource::Instance::DataSource;
+use My::Nuts::Library::DataSource::Instance::DataSource;
 
 sub get_data_from_source
 {

=== added file 'lib/My/Nuts/Library/DataSource/SimpleRandom.pm'
--- a/lib/My/Nuts/Library/DataSource/SimpleRandom.pm	1970-01-01 00:00:00 +0000
+++ b/lib/My/Nuts/Library/DataSource/SimpleRandom.pm	2010-08-17 20:30:52 +0000
@@ -0,0 +1,369 @@
+package My::Nuts::Library::DataSource::SimpleRandom;
+use Exporter;
+our @ISA = qw(Exporter My::Nuts::Library::DataSource::Instance::DataSource);
+our @IMPORT = qw(get_data_from_source);
+use Cwd;
+use IO::File;
+use strict;
+use warnings;
+use Class::Std::Utils;
+use My::Nuts::Library::DataSource::Instance::DataSource;
+use My::Nuts::Library::DataSource::DataTypes::MySQL;
+
+my @nulls = ("", " NULL", " NOT NULL");
+my %used_pk_values;
+my %used_values;
+my %tables;
+my %sr_parameters = (
+    "table_num"		=> 20,
+    "min_col_num"	=> 2,
+    "max_col_num"	=> 16,    
+    "query_num" 	=> 1000,
+    "dml" 		=> "insert:40,update:25,delete:25,replace:10",
+);
+my $queries= [];
+
+sub get_data_from_source
+{
+    my ( $self ) = @_;
+    my $ident = ident ($self);
+    # Use queries generated by a previos test case
+    return $queries if (scalar(@$queries) > 0);
+    # Read external parameters
+    foreach my $param_group (@Parameters::data_source_options)
+    {
+	foreach my $param ( split("//", $param_group) )
+	{
+	    if ($param =~ m/([a-zA-Z0-9\_]+)=([a-zA-Z0-9\_\:\.\,\/]+)/i)
+	    {
+		$sr_parameters{$1}= $2;
+	    }
+	    else
+	    {
+		$sr_parameters{$param}= 1;
+	    }
+	}
+    }
+    # Header
+    push(@$queries, "# Simple Random Data Source. Current time is " . localtime);
+    push(@$queries, "# Parameters: " . join("; ", map($_."=". $sr_parameters{$_} , keys(%sr_parameters))));
+    # Generate tables
+    do_create_table();
+    # Queries
+    my $total_weight= 0;
+    my @st_info= ();
+    foreach my $st (split(/\,/, $sr_parameters{"dml"}))
+    {
+        if ($st =~ m/(.+)\:(\d+)/i)
+        {
+    	    $total_weight= $total_weight + $2;
+	    push (@st_info, {"st_group" => $1, "weight" => $total_weight});
+    	}
+    }
+    for (my $i = 0; $i < $sr_parameters{"query_num"}; $i++)
+    {
+        my $op= rand(100);
+        my $query= "";
+        my $num= int(rand($sr_parameters{"table_num"})) + 1;
+	my $values= {};
+	foreach my $col ( sort keys %{$tables{$num}} )
+	{
+	    if ($col =~ /^c\_[0-9]+$/)
+	    {
+	        my $cur_type= $tables{$num}{$col}{"type"};
+	        $values->{$col}= get_data_type_random_value($cur_type, 1);
+	    }
+	}
+    	my $pk_exists= check_pk($num, $values);
+	my $cur_rand= rand($total_weight);
+	foreach my $st (@st_info)
+	{
+	    if ($cur_rand < $st->{"weight"})
+	    {
+	        do_dml($st->{"st_group"}, $num, $pk_exists, $values);
+	        last;
+	    }
+	}		
+    }
+    # Check debug options
+    if (defined $sr_parameters{"debug"})
+    {
+	my $idx= 1;
+	my @lines= @$queries;
+	$queries= [];
+	foreach (@lines)
+	{
+	    push(@$queries, "# query $idx. " . localtime, $_);
+	    $idx++;
+	}
+    }
+    # Clean up
+    %used_pk_values= ();
+    %used_values= ();
+    %tables= ();
+    return $queries;
+}
+
+sub do_dml
+{
+    my ($st_type, $num, $pk_exists, $values)= @_;
+    my $query;
+    my @cur_values= ();
+    my @raw_values= ();
+    my %where_values = ();
+    # Insert/Replace
+    if ($st_type =~ m/(insert|replace)/)
+    {
+	if ($st_type eq "insert")
+	{
+	    # Do insert if PK does not exists for these values
+	    if (! defined $pk_exists)
+	    {
+		# Add Nulls
+		for my $col ( sort keys %$values )
+		{
+		    if ( ($tables{$num}{$col}{"null"} !~ m/NOT/) && (! defined $tables{$num}{$col}{"pk"}) && (rand(5) > 4) )
+		    {
+			$values->{$col}= "NULL";
+		    }
+		    $used_values{$num}{$col}{$values->{$col}}= 1;
+		}
+		if (defined ($tables{$num}{"auto_inc_pk"}))
+		{
+		    $used_values{$num}{$tables{$num}{"auto_inc_pk"}}{$values->{$tables{$num}{"auto_inc_pk"}}}= 1;
+		    $values->{$tables{$num}{"auto_inc_pk"}}= "NULL";
+		}
+		$query= "INSERT INTO t$num VALUES (" . join(", ", map($_= $values->{$_}, sort keys %$values)) . ")";	    
+	    }
+	}
+	elsif ($st_type eq "replace")
+	{
+	    $query= "REPLACE INTO t$num VALUES (" . join(", ", map($_= $values->{$_}, sort keys %$values)) . ")";	    
+	}
+    }
+    # Update/Delete
+    elsif ($st_type =~ m/(update|delete)/)
+    {
+	if (defined $used_pk_values{$num})
+	{
+	    my @raw_values= keys %{$used_pk_values{$num}};	    
+	    $cur_values[0]= $raw_values[int(rand(scalar(@raw_values)))];
+	    if ($cur_values[0] =~ m/\n/)
+	    {
+		@cur_values= split(/\n/, $cur_values[0]);
+	    }
+	    my $i= 0;
+	    foreach my $col (keys %{$tables{$num}})
+	    {
+		if ($col =~ m/^c\_[0-9]+/)
+		{
+		    if (defined ($tables{$num}{$col}{"pk"}))
+		    {
+			$cur_values[$i]= $col . " = " . $cur_values[$i];
+			$where_values{$col}= $cur_values[$i];	    
+			$i++;
+		    }
+		}
+	    }
+	    $query= " WHERE " . join(" AND ", @cur_values);
+	}
+	else
+	{
+	    foreach my $col (keys %{$tables{$num}})
+	    {
+		if ($col =~ m/^c\_[0-9]+/)
+		{
+		    my $cur_value= undef;
+		    my @raw_values= keys %{$used_values{$num}{$col}};
+		    if (rand(2) > 1 && scalar(@raw_values) > 0)
+		    {			
+			$cur_value= $raw_values[int(rand(scalar(@raw_values)))];
+		    }
+		    else
+		    {
+			$cur_value= $values->{$col};
+		    }	
+		    $where_values{$col}= $cur_value;	    
+		    push(@cur_values, $col . " = " . $cur_value);
+		}
+	    }	    
+	    $query= " WHERE " . join(" OR ", @cur_values);
+	}
+	if ($st_type eq "update")
+	{
+	    foreach my $col (keys %{$tables{$num}})
+	    {
+		if ($col =~ m/^c\_[0-9]+/)
+		{
+	    	    if (! defined ($tables{$num}{$col}{"pk"}) && rand(3) > 2)
+		    {
+			push(@cur_values, $col . "= " . $values->{$col});
+		    }
+		}
+	    }
+	    if (scalar(@cur_values) > 0)
+	    {
+		$query= "UPDATE t$num SET " . join(", ", @cur_values) . $query;
+	    }
+	    else
+	    {
+		$query= undef;
+	    }
+	}
+	elsif  ($st_type eq "delete")
+	{
+	    $query= "DELETE FROM t$num" . $query;
+	}
+    }
+    push(@$queries, $query) if (defined $query);
+}
+
+sub do_create_table
+{
+    # Read available data types
+    my $data_types = get_data_types();
+    my @data_type_list= keys %$data_types;
+    my $orig_list_num= scalar(@data_type_list);
+    # Set different weights for data types
+    for (my $i= 0; $i < $orig_list_num; $i++)
+    {
+	if ($data_type_list[$i] =~ /int/i)
+	{
+	    push(@data_type_list, $data_type_list[$i], $data_type_list[$i], $data_type_list[$i]);
+	}
+	elsif ($data_type_list[$i] =~ /^(varchar|date|time)/i)
+	{
+	    push(@data_type_list, $data_type_list[$i], $data_type_list[$i]);
+	}
+    }
+    # DDL
+    for (my $num= 1; $num <= $sr_parameters{"table_num"}; $num++)
+    {
+	my $create_table= "CREATE TABLE t$num ("; 
+	if (scalar(keys %tables) > 1 && rand(10) > 9)
+	{
+	    # Create table from an another one
+	    my $base_num= int(rand($num-1)) + 1;
+	    $create_table= "CREATE TABLE t$num LIKE t" . $base_num; 
+	    %{$tables{$num}}= %{$tables{$base_num}};	    
+	}
+	else
+	{
+	    # Generate columns for table
+	    my $col_limit= int(rand($sr_parameters{"max_col_num"} - $sr_parameters{"min_col_num"})) + $sr_parameters{"min_col_num"};
+	    my $zero= "0";
+	    my $auto_inc_candidate= undef;
+	    for (my $i = 1; $i <= $col_limit; $i++)
+	    {
+		my $cur_type= $data_type_list[int(rand(scalar(@data_type_list)))];
+		#next if ($cur_type =~ m/(TEXT|BLOB|BINARY)/);	    
+		$zero= "" if ($i > 9);
+		$tables{$num}{"c_$zero$i"}{"type"}= $cur_type;
+		my $cur_null= $nulls[int(rand(3))];
+		$tables{$num}{"c_$zero$i"}{"null"}= $cur_null;
+		$tables{$num}{"c_$zero$i"}{"column_desc"}= "c_$zero$i " . $cur_type . $cur_null;
+		if ( $tables{$num}{"c_$zero$i"}{"column_desc"} =~ m/int unsigned not null/i && ! defined($auto_inc_candidate) )
+		{
+		    $auto_inc_candidate= "c_$zero$i";
+		}
+		if ( ($cur_null =~ m/NOT NULL/) && ($cur_type !~ m/(TEXT|BLOB|BINARY|FLOAT|DOUBLE|TIMESTAMP)/i) && rand(3) > 2)
+		{
+		    $tables{$num}{"c_$zero$i"}{"pk"}= 1;
+		}
+	    }
+	    # Generate PK
+	    my @pk_cols= ();
+	    if (rand(5) > 4 && defined($auto_inc_candidate))
+	    {
+		$tables{$num}{$auto_inc_candidate}{"column_desc"} .= " AUTO_INCREMENT";
+		$tables{$num}{"auto_inc_pk"}= $auto_inc_candidate;
+		push(@pk_cols, $auto_inc_candidate);
+		foreach ( sort keys (%{$tables{$num}}) )
+		{
+		    if (m/^c\_[0-9]+/i)
+		    {
+			$tables{$num}{$_}{"pk"}= undef
+		    }
+		}
+		$tables{$num}{$auto_inc_candidate}{"pk"}= 1
+	    }
+	    else
+	    {
+		foreach my $col (keys(%{$tables{$num}}))
+		{
+		    if ($col =~ /^c\_[0-9]+$/)
+		    {
+			push(@pk_cols, $col) if (defined $tables{$num}{$col}{"pk"});
+		    }
+		}
+	    }
+	    my @cols= ();
+	    foreach ( sort keys (%{$tables{$num}}) )
+	    {
+		if (m/^c\_[0-9]+/i)
+		{
+		    push(@cols, $tables{$num}{$_}{"column_desc"});
+		}
+	    }
+	    $create_table.= join (", ", sort @cols);
+	    $tables{$num}{"create_table"}= $create_table;
+	    if (scalar(@pk_cols) > 0 )
+	    {
+		$create_table.= ", PRIMARY KEY (" . join(", ", @pk_cols) . ")";
+	    }
+    	    $create_table.= ")";
+    	    if (defined $sr_parameters{"engines"})
+    	    {
+    		my @engines= split(",", $sr_parameters{"engines"});
+    		$create_table.= " ENGINE=" .  uc $engines[int(rand(scalar(@engines)))];
+    	    }    	
+    	    $tables{$num}{"create_table"}= $create_table;
+    	}
+    	push(@{$queries}, $create_table);     	    	
+    }    
+}
+
+
+
+sub check_pk
+{
+    my ($num, $values)= @_;
+    my @pk_values;
+    if (defined $tables{$num}{"auto_inc_pk"})
+    {
+	push(@pk_values, $tables{$num}{"auto_inc_pk"});
+    }
+    else
+    {
+	foreach my $col (sort keys(%{$tables{$num}}))
+	{
+	    if ($col =~ /^c\_[0-9]+$/)
+	    {
+		if (defined $tables{$num}{$col}{"pk"})
+		{
+		    push(@pk_values, $values->{$col});
+		}
+	    }
+	}
+    }
+    if (scalar(@pk_values) > 0)
+    {
+	my $sign= join("\n", @pk_values);
+	if (defined $used_pk_values{$num}{$sign})
+	{
+	    return 1;
+	}
+	else
+	{
+	    $used_pk_values{$num}{$sign}= 1;
+	    return undef;
+	}
+    }
+    else
+    {
+	return undef;
+    }
+    
+}
+
+1;

=== modified file 'suites/rep/chained_diff_engine.pm'
--- a/suites/rep/chained_diff_engine.pm	2009-11-23 20:24:20 +0000
+++ b/suites/rep/chained_diff_engine.pm	2010-08-17 20:30:52 +0000
@@ -10,8 +10,8 @@ use My::Nuts::Library::Kernel::Manager;
 use My::Nuts::Library::Kernel::Result;
 use My::Nuts::Library::Tests::SimpleTest;
 use My::Nuts::Library::Kernel::Replication;
+use My::Nuts::Library::DataSource;
 use Test::More;
-use DataSource;
 my @combinations = (
     "row", "mixed", "stmt"
 );

=== modified file 'suites/rep/client_redirect/client_redirect.pm'
--- a/suites/rep/client_redirect/client_redirect.pm	2009-11-23 20:24:20 +0000
+++ b/suites/rep/client_redirect/client_redirect.pm	2010-08-17 20:30:52 +0000
@@ -11,8 +11,8 @@ use My::Nuts::Library::Kernel::Manager;
 use My::Nuts::Library::Kernel::Result;
 use My::Nuts::Library::Tests::SimpleTest;
 use My::Nuts::Library::Kernel::Replication;
+use My::Nuts::Library::DataSource;
 use Test::More;
-use DataSource;
 
 my @combinations = (
     "row", "mixed", "stmt"

=== modified file 'suites/rep/hot_standby.pm'
--- a/suites/rep/hot_standby.pm	2009-11-23 20:24:20 +0000
+++ b/suites/rep/hot_standby.pm	2010-08-17 20:30:52 +0000
@@ -10,8 +10,8 @@ use My::Nuts::Library::Kernel::Manager;
 use My::Nuts::Library::Kernel::Result;
 use My::Nuts::Library::Tests::SimpleTest;
 use My::Nuts::Library::Kernel::Replication;
+use My::Nuts::Library::DataSource;
 use Test::More;
-use DataSource;
 
 my @combinations = (
     "row", "mixed", "stmt" 

=== modified file 'suites/rep/semisync.pm'
--- a/suites/rep/semisync.pm	2009-11-23 20:24:20 +0000
+++ b/suites/rep/semisync.pm	2010-08-17 20:30:52 +0000
@@ -10,8 +10,8 @@ use My::Nuts::Library::Kernel::Manager;
 use My::Nuts::Library::Kernel::Result;
 use My::Nuts::Library::Tests::WithPlugin;
 use My::Nuts::Library::Kernel::Replication;
+use My::Nuts::Library::DataSource;
 use Test::More;
-use DataSource;
 
 my %combinations = (
     "row" 	=> "ROW",

=== modified file 'suites/rep/stress_mixed/stress_mixed.pm'
--- a/suites/rep/stress_mixed/stress_mixed.pm	2009-11-25 19:05:08 +0000
+++ b/suites/rep/stress_mixed/stress_mixed.pm	2010-08-17 20:30:52 +0000
@@ -11,8 +11,8 @@ use My::Nuts::Library::Kernel::Manager;
 use My::Nuts::Library::Kernel::Result;
 use My::Nuts::Library::Tests::SimpleTest;
 use My::Nuts::Library::Kernel::Replication;
+use My::Nuts::Library::DataSource;
 use Test::More;
-use DataSource;
 
 
 my @combinations = (

=== modified file 'suites/samples/data_source.pm'
--- a/suites/samples/data_source.pm	2009-09-10 20:03:21 +0000
+++ b/suites/samples/data_source.pm	2010-08-17 20:30:52 +0000
@@ -8,8 +8,8 @@ use My::Nuts::Library::Kernel::Server;
 use My::Nuts::Library::Kernel::Manager;
 use My::Nuts::Library::Kernel::Result;
 use My::Nuts::Library::Tests::SimpleTest;
+use My::Nuts::Library::DataSource;
 use Test::More;
-use DataSource;
 
 sub prepare
 {
@@ -47,10 +47,10 @@ __END__;
 
 =head1 NAME
 
-rep::hot_standby - Replication for 2 master and 1 slave.
+samples::data_source - Test with queries from DataSource library
 
 =head1 SYNOPSIS
 
-Replication for 2 master and 1 slave with hot stansby mode.
+Test starts one MySQL server, created test database and run queries from DataSource library. 
 
 =back

=== modified file 'suites/updown/rep_basic_downgrade.pm'
--- a/suites/updown/rep_basic_downgrade.pm	2010-03-31 20:40:41 +0000
+++ b/suites/updown/rep_basic_downgrade.pm	2010-08-17 20:30:52 +0000
@@ -10,8 +10,8 @@ use My::Nuts::Library::Kernel::Manager;
 use My::Nuts::Library::Kernel::Result;
 use My::Nuts::Library::Tests::SimpleTest;
 use My::Nuts::Library::Kernel::Replication;
+use My::Nuts::Library::DataSource;
 use Test::More;
-use DataSource;
 
 
 sub prepare

=== modified file 'suites/updown/rep_basic_upgrade.pm'
--- a/suites/updown/rep_basic_upgrade.pm	2010-03-31 20:40:41 +0000
+++ b/suites/updown/rep_basic_upgrade.pm	2010-08-17 20:30:52 +0000
@@ -10,8 +10,8 @@ use My::Nuts::Library::Kernel::Manager;
 use My::Nuts::Library::Kernel::Result;
 use My::Nuts::Library::Tests::SimpleTest;
 use My::Nuts::Library::Kernel::Replication;
+use My::Nuts::Library::DataSource;
 use Test::More;
-use DataSource;
 
 
 sub prepare


Attachment: [text/bzr-bundle] bzr/serge.kozlov@sun.com-20100817203052-24954o2821meo296.bundle
Thread
bzr push into nuts branch (Serge.Kozlov:382 to 383) Serge Kozlov17 Aug