=== removed file 'COPYING.innodb-deadlock-count-patch'
--- COPYING.innodb-deadlock-count-patch	2013-05-27 12:16:36 +0000
+++ COPYING.innodb-deadlock-count-patch	1970-01-01 00:00:00 +0000
@@ -1,30 +0,0 @@
-Portions of this software contain modifications contributed by Eric Bergen.
-These contributions are used with the following license:
-
-Copyright (c) 2010, Eric Bergen. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-      * Redistributions of source code must retain the above copyright
-        notice, this list of conditions and the following disclaimer.
-      * Redistributions in binary form must reproduce the above
-        copyright notice, this list of conditions and the following
-        disclaimer in the documentation and/or other materials
-        provided with the distribution.
-      * Neither the name of the Eric Bergen. nor the names of its
-        contributors may be used to endorse or promote products
-        derived from this software without specific prior written
-        permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

=== added file 'COPYING.show_temp_51'
--- COPYING.show_temp_51	1970-01-01 00:00:00 +0000
+++ COPYING.show_temp_51	2013-05-27 12:16:38 +0000
@@ -0,0 +1,13 @@
+Portions of this software contain modifications contributed by Venu Anuganti.
+These contributions are used with the following license:
+
+Copyright (c) 2010, Venu Anuganti, http://venublog.com/
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+    * Neither the name of the <ORGANIZATION> nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

=== modified file 'Makefile'
--- Makefile	2013-05-10 08:51:53 +0000
+++ Makefile	2013-05-27 12:16:38 +0000
@@ -1,5 +1,5 @@
-MYSQL_VERSION=5.6.10
-PERCONA_SERVER_VERSION=alpha60.2
+MYSQL_VERSION=5.6.11
+PERCONA_SERVER_VERSION=alpha60.3
 PERCONA_SERVER         ?=Percona-Server-$(MYSQL_VERSION)-$(PERCONA_SERVER_VERSION)
 PERCONA_SERVER_SHORT_1 ?=Percona-Server-$(MYSQL_VERSION)
 PERCONA_SERVER_SHORT_2 ?=Percona-Server

=== modified file 'Percona-Server/client/mysqldump.c'
--- Percona-Server/client/mysqldump.c	2013-05-13 04:25:56 +0000
+++ Percona-Server/client/mysqldump.c	2013-05-27 12:16:38 +0000
@@ -2482,6 +2482,48 @@
 }
 
 /*
+  Find the first occurrence of a quoted identifier in a given string. Returns
+  the pointer to the opening quote, and stores the pointer to the closing quote
+  to the memory location pointed to by the 'end' argument,
+
+  If no quoted identifiers are found, returns NULL (and the value pointed to by
+  'end' is undefined in this case).
+*/
+
+static const char *parse_quoted_identifier(const char *str,
+                                            const char **end)
+{
+  const char *from;
+  const char *to;
+
+  if (!(from= strchr(str, '`')))
+    return NULL;
+
+  to= from;
+
+  while ((to= strchr(to + 1, '`'))) {
+    /*
+      Double backticks represent a backtick in identifier, rather than a quote
+      character.
+    */
+    if (to[1] == '`')
+    {
+      to++;
+      continue;
+    }
+
+    break;
+  }
+
+  if (to <= from + 1)
+    return NULL;                                /* Empty identifier */
+
+  *end= to;
+
+  return from;
+}
+
+/*
   Parse the specified key definition string and check if the key contains an
   AUTO_INCREMENT column as the first key part. We only check for the first key
   part, because unlike MyISAM, InnoDB does not allow the AUTO_INCREMENT column
@@ -2492,32 +2534,22 @@
                                        const char *keydef,
                                        key_type_t type)
 {
-  char *from, *to;
+  const char *from, *to;
   uint idnum;
 
   DBUG_ASSERT(type != KEY_TYPE_NONE);
 
-  if (autoinc_column == NULL || !(from= strchr(keydef, '`')))
+  if (autoinc_column == NULL)
     return FALSE;
 
-  to= from;
   idnum= 0;
 
-  while ((to= strchr(to + 1, '`')))
+  /*
+    There is only 1 iteration of the following loop for type == KEY_TYPE_PRIMARY
+    and 2 iterations for type == KEY_TYPE_UNIQUE / KEY_TYPE_NON_UNIQUE.
+  */
+  while ((from= parse_quoted_identifier(keydef, &to)))
   {
-    /*
-      Double backticks represent a backtick in identifier, rather than a quote
-      character.
-    */
-    if (to[1] == '`')
-    {
-      to++;
-      continue;
-    }
-
-    if (to <= from + 1)
-      break;                                    /* Broken key definition */
-
     idnum++;
 
     /*
@@ -2532,11 +2564,10 @@
       Check only the first (for PRIMARY KEY) or the second (for secondary keys)
       quoted identifier.
     */
-    if ((idnum == 1 + test(type != KEY_TYPE_PRIMARY)) ||
-        !(from= strchr(to + 1, '`')))
+    if ((idnum == 1 + test(type != KEY_TYPE_PRIMARY)))
       break;
 
-    to= from;
+    keydef= to + 1;
   }
 
   return FALSE;
@@ -2544,6 +2575,42 @@
 
 
 /*
+  Find a node in the skipped keys list whose name matches a quoted
+  identifier specified as 'id_from' and 'id_to' arguments.
+*/
+
+static LIST *find_matching_skipped_key(const char *id_from,
+                                       const char *id_to)
+{
+  LIST *list;
+  size_t id_len;
+
+  id_len= id_to - id_from + 1;
+  DBUG_ASSERT(id_len > 2);
+
+  for (list= skipped_keys_list; list; list= list_rest(list))
+  {
+    const char *keydef;
+    const char *keyname_from;
+    const char *keyname_to;
+    size_t keyname_len;
+
+    keydef= list->data;
+
+    if ((keyname_from= parse_quoted_identifier(keydef, &keyname_to)))
+    {
+      keyname_len= keyname_to - keyname_from + 1;
+
+      if (id_len == keyname_len &&
+          !strncmp(keyname_from, id_from, id_len))
+        return list;
+    }
+  }
+
+  return NULL;
+}
+
+/*
   Remove secondary/foreign key definitions from a given SHOW CREATE TABLE string
   and store them into a temporary list to be used later.
 
@@ -2570,6 +2637,9 @@
   char *autoinc_column= NULL;
   my_bool has_autoinc= FALSE;
   key_type_t type;
+  const char *constr_from;
+  const char *constr_to;
+  LIST *keydef_node;
 
   strend= create_str + strlen(create_str);
 
@@ -2589,7 +2659,37 @@
     c= *tmp;
     *tmp= '\0'; /* so strstr() only processes the current line */
 
-    if (!strncmp(ptr, "UNIQUE KEY ", sizeof("UNIQUE KEY ") - 1))
+    if (!strncmp(ptr, "CONSTRAINT ", sizeof("CONSTRAINT ") - 1) &&
+        (constr_from= parse_quoted_identifier(ptr, &constr_to)) &&
+        (keydef_node= find_matching_skipped_key(constr_from, constr_to)))
+    {
+      char *keydef;
+      size_t keydef_len;
+
+      /*
+        There's a skipped key with the same name as the constraint name.  Let's
+        put it back before the current constraint definition and remove from the
+        skipped keys list.
+      */
+      keydef= keydef_node->data;
+      keydef_len= strlen(keydef) + 5;           /* ", \n  " */
+
+      memmove(orig_ptr + keydef_len, orig_ptr, strend - orig_ptr + 1);
+      memcpy(ptr, keydef, keydef_len - 5);
+      memcpy(ptr + keydef_len - 5, ", \n  ", 5);
+
+      skipped_keys_list= list_delete(skipped_keys_list, keydef_node);
+      my_free(keydef);
+      my_free(keydef_node);
+
+      strend+= keydef_len;
+      orig_ptr+= keydef_len;
+      ptr+= keydef_len;
+      tmp+= keydef_len;
+
+      type= KEY_TYPE_NONE;
+    }
+    else if (!strncmp(ptr, "UNIQUE KEY ", sizeof("UNIQUE KEY ") - 1))
       type= KEY_TYPE_UNIQUE;
     else if (!strncmp(ptr, "KEY ", sizeof("KEY ") - 1))
       type= KEY_TYPE_NON_UNIQUE;

=== modified file 'Percona-Server/include/errmsg.h'
--- Percona-Server/include/errmsg.h	2013-05-12 06:24:46 +0000
+++ Percona-Server/include/errmsg.h	2013-05-27 12:16:38 +0000
@@ -25,6 +25,7 @@
 void	init_client_errs(void);
 void	finish_client_errs(void);
 extern const char *client_errors[];	/* Error messages */
+extern const char **mysql_client_errors;	/* Error messages */
 #ifdef	__cplusplus
 }
 #endif

=== modified file 'Percona-Server/libmysql/errmsg.c'
--- Percona-Server/libmysql/errmsg.c	2013-05-12 06:24:46 +0000
+++ Percona-Server/libmysql/errmsg.c	2013-05-27 12:16:38 +0000
@@ -23,6 +23,7 @@
 #include <my_sys.h>
 #include "errmsg.h"
 
+const char **mysql_client_errors = client_errors;
 const char *client_errors[]=
 {
   "Unknown MySQL error",

=== removed file 'Percona-Server/mysql-test/include/have_pool_of_threads.inc'
--- Percona-Server/mysql-test/include/have_pool_of_threads.inc	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/include/have_pool_of_threads.inc	1970-01-01 00:00:00 +0000
@@ -1,4 +0,0 @@
--- require r/have_pool_of_threads.require
-disable_query_log;
-show variables like 'thread_handling';
-enable_query_log;

=== removed file 'Percona-Server/mysql-test/include/log_grep.inc'
--- Percona-Server/mysql-test/include/log_grep.inc	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/include/log_grep.inc	1970-01-01 00:00:00 +0000
@@ -1,41 +0,0 @@
---echo [log_grep.inc] file: $log_file pattern: $grep_pattern
-perl;
-  $log_file=           $ENV{'log_file'};
-  $log_file_full_path= $ENV{'log_file_full_path'};
-  $log_slow_rate_test= $ENV{'log_slow_rate_test'};
-  open(FILE, "$log_file_full_path") 
-    or die("Cannot open file $log_file_full_path: $!\n");
-
-  if ($log_slow_rate_test) {
-    $one= 0;
-    $two= 0;
-    $three= 0;
-    while(<FILE>) {
-      $one++   if(/'connection_one'/);
-      $two++   if(/'connection_two'/);
-      $three++ if(/'connection_three'/);
-    }
-    $sum= $one + $two + $three;
-    $zero= 0;
-    if ($one == 0) {
-      $zero++;
-    }
-    if ($two == 0) {
-      $zero++;
-    }
-    if ($three == 0) {
-      $zero++;
-    }
-    print "[log_grep.inc] sum:  $sum\n";
-    print "[log_grep.inc] zero: $zero\n";
-  }
-  else {
-    $grep_pattern=       $ENV{'grep_pattern'};
-    $lines= 0;
-    while(<FILE>) {
-      $lines++ if (/$grep_pattern/);
-    }
-    print "[log_grep.inc] lines:   $lines\n";
-  }
-  close(FILE);
-EOF

=== added file 'Percona-Server/mysql-test/include/percona_query_cache_with_comments.inc'
--- Percona-Server/mysql-test/include/percona_query_cache_with_comments.inc	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/include/percona_query_cache_with_comments.inc	2013-05-27 12:16:38 +0000
@@ -0,0 +1,95 @@
+--source include/percona_query_cache_with_comments_clear.inc
+let $query=/* with comment first */select * from t1;
+eval $query;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=# with comment first
+select * from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=-- with comment first
+select * from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=/* with comment first and "quote" */select * from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=# with comment first and "quote"
+select * from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=-- with comment first and "quote" 
+select * from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=
+    /* with comment and whitespaces first */select * from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query= 
+    # with comment and whitespaces first
+select * from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=
+    -- with comment and whitespaces first
+select * from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $internal=* internal comment *;
+
+let $query=select * /$internal/ from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+let $query=select */$internal/ from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+let $query=select */$internal/from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $internal=* internal comment with "quote" *;
+
+let $query=select * /$internal/ from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+let $query=select */$internal/ from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+let $query=select */$internal/from t1;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=select * from t1
+;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=select * from t1 ;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=select * from t1	;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=select * from t1
+/* comment in the end */;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=select * from t1
+/* *\/ */;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=select * from t1
+/* comment in the end */
+;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=select * from t1 #comment in the end;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=select * from t1 #comment in the end
+;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=select * from t1 -- comment in the end;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=select * from t1 -- comment in the end
+;
+--source include/percona_query_cache_with_comments_eval.inc
+
+let $query=select ' \'  ' from t1;
+--source include/percona_query_cache_with_comments_eval.inc

=== removed file 'Percona-Server/mysql-test/include/percona_query_cache_with_comments_begin.inc'
--- Percona-Server/mysql-test/include/percona_query_cache_with_comments_begin.inc	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/include/percona_query_cache_with_comments_begin.inc	1970-01-01 00:00:00 +0000
@@ -1,12 +0,0 @@
--- source include/have_query_cache.inc
-
-set GLOBAL query_cache_size=1355776;
-
---disable_warnings
-drop table if exists t1;
---enable_warnings
-
-create table t1 (a int not null);
-insert into t1 values (1),(2),(3);
-
---source include/percona_query_cache_with_comments_clear.inc

=== added file 'Percona-Server/mysql-test/include/percona_query_cache_with_comments_clear.inc'
--- Percona-Server/mysql-test/include/percona_query_cache_with_comments_clear.inc	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/include/percona_query_cache_with_comments_clear.inc	2013-05-27 12:16:38 +0000
@@ -0,0 +1,5 @@
+# Reset query cache variables.
+flush query cache; # This crashed in some versions
+flush query cache; # This crashed in some versions
+reset query cache;
+flush status;

=== added file 'Percona-Server/mysql-test/include/percona_query_cache_with_comments_eval.inc'
--- Percona-Server/mysql-test/include/percona_query_cache_with_comments_eval.inc	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/include/percona_query_cache_with_comments_eval.inc	2013-05-27 12:16:38 +0000
@@ -0,0 +1,7 @@
+echo -----------------------------------------------------;
+echo $query;
+echo -----------------------------------------------------;
+--source include/percona_query_cache_with_comments_show.inc
+eval $query;
+eval $query;
+--source include/percona_query_cache_with_comments_show.inc

=== removed file 'Percona-Server/mysql-test/include/percona_show_slave_status_nolock.inc'
--- Percona-Server/mysql-test/include/percona_show_slave_status_nolock.inc	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/include/percona_show_slave_status_nolock.inc	1970-01-01 00:00:00 +0000
@@ -1,56 +0,0 @@
---echo
---disable_result_log
-connection slave_lock;
---echo [slave_lock]
-send SHOW SLAVE STATUS;
-
-connection slave;
---let $condition= 'SHOW SLAVE STATUS'
---source include/wait_show_condition.inc
-
---disable_warnings
-SET DEBUG_SYNC='now WAIT_FOR signal.after_show_slave_status TIMEOUT 1';
---enable_warnings
-
---let current=`SELECT SUBSTR(Variable_value FROM 22) FROM INFORMATION_SCHEMA.SESSION_VARIABLES WHERE Variable_name = 'DEBUG_SYNC'`
---echo SIGNAL after SHOW SLAVE STATUS is $current
-
-connection slave;
---echo [slave]
-SET DEBUG_SYNC='now SIGNAL signal.empty';
-
-connection slave_nolock;
---echo [slave_nolock]
-send SHOW SLAVE STATUS NOLOCK;
-
-connection slave;
---let $condition= 'SHOW SLAVE STATUS NOLOCK'
---source include/wait_show_condition.inc
-
---disable_warnings
-SET DEBUG_SYNC='now WAIT_FOR signal.after_show_slave_status TIMEOUT 1';
---enable_warnings
-
---echo # should be 'signal.after_show_slave_status'
---let current=`SELECT SUBSTR(Variable_value FROM 22) FROM INFORMATION_SCHEMA.SESSION_VARIABLES WHERE Variable_name = 'DEBUG_SYNC'`
---echo SIGNAL after SHOW SLAVE STATUS NOLOCK is $current
-
-connection slave;
---echo [slave]
-SET DEBUG_SYNC='now SIGNAL signal.continue';
-
-connection slave_lock;
---disable_result_log
-reap;
---enable_result_log
-
-connection slave_nolock;
---disable_result_log
-reap;
---enable_result_log
-
-connection slave;
---echo [slave]
-SET DEBUG_SYNC='now SIGNAL signal.empty';
---enable_result_log
---echo

=== added file 'Percona-Server/mysql-test/r/have_pool_of_threads.require'
--- Percona-Server/mysql-test/r/have_pool_of_threads.require	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/r/have_pool_of_threads.require	2013-05-27 12:16:38 +0000
@@ -0,0 +1,2 @@
+Variable_name	Value
+thread_handling	pool-of-threads

=== added file 'Percona-Server/mysql-test/r/percona_bug1170103.result'
--- Percona-Server/mysql-test/r/percona_bug1170103.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/r/percona_bug1170103.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,19 @@
+CREATE TABLE t(a INT) ENGINE=InnoDB;
+INSERT INTO t VALUES (1), (2), (3);
+SET @old_query_cache_size=@@query_cache_size;
+SET @old_query_cache_type=@@query_cache_type;
+SET GLOBAL query_cache_size=1024*1024;
+SET GLOBAL query_cache_type=1;
+SELECT * FROM t;
+a
+1
+2
+3
+SELECT * FROM t;
+a
+1
+2
+3
+SET GLOBAL query_cache_size=@old_query_cache_size;
+SET GLOBAL query_cache_type=@old_query_cache_type;
+DROP TABLE t;

=== removed file 'Percona-Server/mysql-test/r/percona_bug933969.result'
--- Percona-Server/mysql-test/r/percona_bug933969.result	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/r/percona_bug933969.result	1970-01-01 00:00:00 +0000
@@ -1,16 +0,0 @@
-RESET MASTER;
-DROP TABLE IF EXISTS t1;
-CREATE TABLE t1 (word VARCHAR(20));
-INSERT INTO t1 VALUES ("hamite");
-INSERT INTO t1 VALUES ("hoho");
-INSERT INTO t1 VALUES ("znamenito");
-INSERT INTO t1 VALUES ("mrachny");
-INSERT INTO t1 VALUES ("mrak");
-INSERT INTO t1 VALUES ("zhut");
-INSERT INTO t1 VALUES ("parnisha");
-INSERT INTO t1 VALUES ("krrasota!");
-INSERT INTO t1 VALUES ("podumayesh");
-INSERT INTO t1 VALUES ("ogo!");
-FLUSH LOGS;
-DROP TABLE t1;
-RESET MASTER;

=== added file 'Percona-Server/mysql-test/r/percona_innodb_deadlock_count.result'
--- Percona-Server/mysql-test/r/percona_innodb_deadlock_count.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/r/percona_innodb_deadlock_count.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,28 @@
+# Establish connection con1 (user=root)
+# Establish connection con2 (user=root)
+# Establish connection con3 (user=root)
+# Drop test table
+drop table if exists t;
+# Create test table
+create table t(a INT PRIMARY KEY, b INT) engine=InnoDB;
+# Insert two rows to test table
+insert into t values(2,1);
+insert into t values(1,2);
+# Switch to connection con1
+BEGIN;
+SELECT b FROM t WHERE a=1 FOR UPDATE;
+# Switch to connection con2
+BEGIN;
+SELECT b FROM t WHERE a=2 FOR UPDATE;
+# Switch to connection con1
+SELECT b FROM t WHERE a=2 FOR UPDATE;
+# Switch to connection con2
+SELECT b FROM t WHERE a=1 FOR UPDATE;
+# Switch to connection con1
+ROLLBACK;
+# Switch to connection con2
+ROLLBACK;
+# Switch to connection con3
+Deadlocks: 1
+# Drop test table
+drop table t;

=== removed file 'Percona-Server/mysql-test/r/percona_log_slow_admin_statements-config.result'
--- Percona-Server/mysql-test/r/percona_log_slow_admin_statements-config.result	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/r/percona_log_slow_admin_statements-config.result	1970-01-01 00:00:00 +0000
@@ -1,6 +0,0 @@
-SHOW GLOBAL VARIABLES like 'log_slow_admin_statements';
-Variable_name	Value
-log_slow_admin_statements	ON
-SELECT * FROM INFORMATION_SCHEMA.GLOBAL_VARIABLES WHERE VARIABLE_NAME='log_slow_admin_statements';
-VARIABLE_NAME	VARIABLE_VALUE
-LOG_SLOW_ADMIN_STATEMENTS	ON

=== added file 'Percona-Server/mysql-test/r/percona_log_slow_admin_statements-config_foo.result'
--- Percona-Server/mysql-test/r/percona_log_slow_admin_statements-config_foo.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/r/percona_log_slow_admin_statements-config_foo.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,7 @@
+call mtr.add_suppression("option 'log_slow_admin_statements': boolean value 'foo' wasn't recognized. Set to OFF.");
+SHOW GLOBAL VARIABLES like 'log_slow_admin_statements';
+Variable_name	Value
+log_slow_admin_statements	OFF
+SELECT * FROM INFORMATION_SCHEMA.GLOBAL_VARIABLES WHERE VARIABLE_NAME='log_slow_admin_statements';
+VARIABLE_NAME	VARIABLE_VALUE
+LOG_SLOW_ADMIN_STATEMENTS	OFF

=== removed file 'Percona-Server/mysql-test/r/percona_log_slow_admin_statements-config_true.result'
--- Percona-Server/mysql-test/r/percona_log_slow_admin_statements-config_true.result	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/r/percona_log_slow_admin_statements-config_true.result	1970-01-01 00:00:00 +0000
@@ -1,6 +0,0 @@
-SHOW GLOBAL VARIABLES like 'log_slow_admin_statements';
-Variable_name	Value
-log_slow_admin_statements	ON
-SELECT * FROM INFORMATION_SCHEMA.GLOBAL_VARIABLES WHERE VARIABLE_NAME='log_slow_admin_statements';
-VARIABLE_NAME	VARIABLE_VALUE
-LOG_SLOW_ADMIN_STATEMENTS	ON

=== removed file 'Percona-Server/mysql-test/r/percona_log_slow_filter-cl.result'
--- Percona-Server/mysql-test/r/percona_log_slow_filter-cl.result	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/r/percona_log_slow_filter-cl.result	1970-01-01 00:00:00 +0000
@@ -1,6 +0,0 @@
-SHOW VARIABLES LIKE 'log_slow_filter';
-Variable_name	Value
-log_slow_filter	full_join
-SHOW GLOBAL VARIABLES LIKE 'log_slow_filter';
-Variable_name	Value
-log_slow_filter	full_join

=== removed file 'Percona-Server/mysql-test/r/percona_min_examined_row_limit.result'
--- Percona-Server/mysql-test/r/percona_min_examined_row_limit.result	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/r/percona_min_examined_row_limit.result	1970-01-01 00:00:00 +0000
@@ -1,35 +0,0 @@
-drop table if exists t;
-create table t(id INT PRIMARY KEY);
-insert into t values(1);
-insert into t values(2);
-insert into t values(3);
-SET GLOBAL long_query_time=2;
-SET GLOBAL slow_query_log_use_global_control='long_query_time,min_examined_row_limit';
-[log_start.inc] percona.slow_extended.min_examined_row_limit
-SET SESSION query_exec_time=2.1;
-SELECT 1;
-1
-1
-SET GLOBAL min_examined_row_limit=5;
-SELECT * FROM t AS t1, t AS t2;
-id	id
-1	1
-2	1
-3	1
-1	2
-2	2
-3	2
-1	3
-2	3
-3	3
-SELECT 1;
-1
-1
-SET SESSION query_exec_time=default;
-[log_stop.inc] percona.slow_extended.min_examined_row_limit
-SET GLOBAL min_examined_row_limit=default;
-SET GLOBAL slow_query_log_use_global_control=default;
-SET GLOBAL long_query_time=default;
-[log_grep.inc] file: percona.slow_extended.min_examined_row_limit pattern: Query_time
-[log_grep.inc] lines:   1
-DROP TABLE t;

=== modified file 'Percona-Server/mysql-test/r/percona_mysqldump_innodb_optimize_keys.result'
--- Percona-Server/mysql-test/r/percona_mysqldump_innodb_optimize_keys.result	2013-05-13 04:25:56 +0000
+++ Percona-Server/mysql-test/r/percona_mysqldump_innodb_optimize_keys.result	2013-05-27 12:16:38 +0000
@@ -477,3 +477,50 @@
 
 ######################################
 DROP TABLE t1;
+CREATE TABLE `t1` (
+`id` int(11) NOT NULL AUTO_INCREMENT,
+`a` int(11) NOT NULL,
+PRIMARY KEY (`id`),
+KEY `a` (`a`),
+CONSTRAINT `a` FOREIGN KEY (`a`) REFERENCES `t1` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+######################################
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+DROP TABLE IF EXISTS `t1`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `t1` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `a` int(11) NOT NULL,
+  PRIMARY KEY (`id`),
+  KEY `a` (`a`), 
+  CONSTRAINT `a` FOREIGN KEY (`a`) REFERENCES `t1` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+LOCK TABLES `t1` WRITE;
+/*!40000 ALTER TABLE `t1` DISABLE KEYS */;
+/*!40000 ALTER TABLE `t1` ENABLE KEYS */;
+UNLOCK TABLES;
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+
+######################################
+DROP TABLE t1;

=== added file 'Percona-Server/mysql-test/r/percona_query_cache_with_comments_crash.result'
--- Percona-Server/mysql-test/r/percona_query_cache_with_comments_crash.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/r/percona_query_cache_with_comments_crash.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,21 @@
+set GLOBAL query_cache_size=1355776;
+drop table if exists t1;
+create table t1 (a int not null);
+insert into t1 values (1),(2),(3);
+flush query cache;
+flush query cache;
+reset query cache;
+flush status;
+( select * from t1 );
+a
+1
+2
+3
+/*!40101 SET @OLD_SQL_MODE := @@SQL_MODE, @@SQL_MODE := REPLACE(REPLACE(@@SQL_MODE, 'ANSI_QUOTES', ''), ',,', ','), @OLD_QUOTE := @@SQL_QUOTE_SHOW_CREATE, @@SQL_QUOTE_SHOW_CREATE := 1 */;
+/* only comment */;
+# only comment
+;
+-- only comment
+;
+DROP TABLE t1;
+SET GLOBAL query_cache_size= default;

=== removed file 'Percona-Server/mysql-test/r/percona_query_cache_with_comments_prepared_statements.result'
--- Percona-Server/mysql-test/r/percona_query_cache_with_comments_prepared_statements.result	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/r/percona_query_cache_with_comments_prepared_statements.result	1970-01-01 00:00:00 +0000
@@ -1,396 +0,0 @@
-set GLOBAL query_cache_size=1355776;
-flush query cache;
-flush query cache;
-reset query cache;
-flush status;
-drop table if exists t1;
-create table t1 (a int not null);
-insert into t1 values (1),(2),(3);
-set global query_cache_strip_comments=ON;
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	0
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	0
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	0
-prepare stmt from '/* with comment */ select * from t1';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	0
-execute stmt;
-a
-1
-2
-3
-execute stmt;
-a
-1
-2
-3
-execute stmt;
-a
-1
-2
-3
-execute stmt;
-a
-1
-2
-3
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	5
-prepare stmt from 'select * from t1';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	6
-prepare stmt from 'select * /*internal comment*/from t1';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	7
-prepare stmt from 'select * /*internal comment*/ from t1';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	8
-prepare stmt from 'select * from t1 /* at the end */';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	9
-prepare stmt from 'select * from t1 /* with "quote" */';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	10
-prepare stmt from 'select * from t1 /* with \'quote\' */';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	11
-prepare stmt from 'select * from t1 # 123
-';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	12
-prepare stmt from 'select * from t1 # 123 with "quote"
-';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	13
-prepare stmt from 'select * from t1 # 123 with \'quote\'
-';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	14
-prepare stmt from 'select * from t1
-# 123
-';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	15
-prepare stmt from '#456
-select * from t1
-# 123
-';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	16
-prepare stmt from 'select * from t1 -- 123
-';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	17
-prepare stmt from 'select * from t1
--- 123
-';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	18
-prepare stmt from '-- comment in first
-select * from t1
-# 123
-';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	1
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	1
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	19
-prepare stmt from '(#456(
-select * from t1
-# 123(
-)';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	2
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	2
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	19
-prepare stmt from '/*test*/(-- comment in first(
-select * from t1
--- 123 asdasd
-/* test */)';
-execute stmt;
-a
-1
-2
-3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	2
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	2
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	20
-prepare stmt from 'select "test",a from t1';
-execute stmt;
-test	a
-test	1
-test	2
-test	3
-execute stmt;
-test	a
-test	1
-test	2
-test	3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	3
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	3
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	21
-prepare stmt from 'select "test /* internal \'comment\' */",a from t1';
-execute stmt;
-test /* internal 'comment' */	a
-test /* internal 'comment' */	1
-test /* internal 'comment' */	2
-test /* internal 'comment' */	3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	4
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	4
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	21
-prepare stmt from 'select "test #internal comment" ,a from t1';
-execute stmt;
-test #internal comment	a
-test #internal comment	1
-test #internal comment	2
-test #internal comment	3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	5
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	5
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	21
-prepare stmt from 'select "test #internal comment" #external comment
-,a from t1';
-execute stmt;
-test #internal comment	a
-test #internal comment	1
-test #internal comment	2
-test #internal comment	3
-show status like "Qcache_queries_in_cache";
-Variable_name	Value
-Qcache_queries_in_cache	5
-show status like "Qcache_inserts";
-Variable_name	Value
-Qcache_inserts	5
-show status like "Qcache_hits";
-Variable_name	Value
-Qcache_hits	22
-DROP TABLE t1;
-SET GLOBAL query_cache_size= default;
-set global query_cache_strip_comments=OFF;

=== added file 'Percona-Server/mysql-test/r/percona_status_wait_query_cache_mutex.result'
--- Percona-Server/mysql-test/r/percona_status_wait_query_cache_mutex.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/r/percona_status_wait_query_cache_mutex.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,20 @@
+SET GLOBAL query_cache_size=1355776;
+flush query cache;
+flush query cache;
+reset query cache;
+flush status;
+SET DEBUG_SYNC='after_query_cache_mutex SIGNAL mutex_locked WAIT_FOR unlock_mutex';
+SELECT "mutex_locked_query" as action;
+SET DEBUG_SYNC='now WAIT_FOR mutex_locked';
+SET DEBUG_SYNC='before_query_cache_mutex SIGNAL try_lock_mutex';
+SELECT "try_lock_mutex_query" as action;
+SET DEBUG_SYNC='now WAIT_FOR try_lock_mutex';
+SELECT SQL_NO_CACHE state FROM INFORMATION_SCHEMA.PROCESSLIST WHERE info='SELECT "try_lock_mutex_query" as action';
+state
+Waiting on query cache mutex
+SET DEBUG_SYNC='now SIGNAL unlock_mutex';
+action
+mutex_locked_query
+action
+try_lock_mutex_query
+SET GLOBAL query_cache_size=default;

=== added file 'Percona-Server/mysql-test/r/pool_of_threads.result'
--- Percona-Server/mysql-test/r/pool_of_threads.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/r/pool_of_threads.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,2169 @@
+drop table if exists t1,t2,t3,t4;
+CREATE TABLE t1 (
+Period smallint(4) unsigned zerofill DEFAULT '0000' NOT NULL,
+Varor_period smallint(4) unsigned DEFAULT '0' NOT NULL
+);
+INSERT INTO t1 VALUES (9410,9412);
+select period from t1;
+period
+9410
+select * from t1;
+Period	Varor_period
+9410	9412
+select t1.* from t1;
+Period	Varor_period
+9410	9412
+CREATE TABLE t2 (
+auto int not null auto_increment,
+fld1 int(6) unsigned zerofill DEFAULT '000000' NOT NULL,
+companynr tinyint(2) unsigned zerofill DEFAULT '00' NOT NULL,
+fld3 char(30) DEFAULT '' NOT NULL,
+fld4 char(35) DEFAULT '' NOT NULL,
+fld5 char(35) DEFAULT '' NOT NULL,
+fld6 char(4) DEFAULT '' NOT NULL,
+UNIQUE fld1 (fld1),
+KEY fld3 (fld3),
+PRIMARY KEY (auto)
+);
+select t2.fld3 from t2 where companynr = 58 and fld3 like "%imaginable%";
+fld3
+imaginable
+select fld3 from t2 where fld3 like "%cultivation" ;
+fld3
+cultivation
+select t2.fld3,companynr from t2 where companynr = 57+1 order by fld3;
+fld3	companynr
+concoct	58
+druggists	58
+engrossing	58
+Eurydice	58
+exclaimers	58
+ferociousness	58
+hopelessness	58
+Huey	58
+imaginable	58
+judges	58
+merging	58
+ostrich	58
+peering	58
+Phelps	58
+presumes	58
+Ruth	58
+sentences	58
+Shylock	58
+straggled	58
+synergy	58
+thanking	58
+tying	58
+unlocks	58
+select fld3,companynr from t2 where companynr = 58 order by fld3;
+fld3	companynr
+concoct	58
+druggists	58
+engrossing	58
+Eurydice	58
+exclaimers	58
+ferociousness	58
+hopelessness	58
+Huey	58
+imaginable	58
+judges	58
+merging	58
+ostrich	58
+peering	58
+Phelps	58
+presumes	58
+Ruth	58
+sentences	58
+Shylock	58
+straggled	58
+synergy	58
+thanking	58
+tying	58
+unlocks	58
+select fld3 from t2 order by fld3 desc limit 10;
+fld3
+youthfulness
+yelped
+Wotan
+workers
+Witt
+witchcraft
+Winsett
+Willy
+willed
+wildcats
+select fld3 from t2 order by fld3 desc limit 5;
+fld3
+youthfulness
+yelped
+Wotan
+workers
+Witt
+select fld3 from t2 order by fld3 desc limit 5,5;
+fld3
+witchcraft
+Winsett
+Willy
+willed
+wildcats
+select t2.fld3 from t2 where fld3 = 'honeysuckle';
+fld3
+honeysuckle
+select t2.fld3 from t2 where fld3 LIKE 'honeysuckl_';
+fld3
+honeysuckle
+select t2.fld3 from t2 where fld3 LIKE 'hon_ysuckl_';
+fld3
+honeysuckle
+select t2.fld3 from t2 where fld3 LIKE 'honeysuckle%';
+fld3
+honeysuckle
+select t2.fld3 from t2 where fld3 LIKE 'h%le';
+fld3
+honeysuckle
+select t2.fld3 from t2 where fld3 LIKE 'honeysuckle_';
+fld3
+select t2.fld3 from t2 where fld3 LIKE 'don_t_find_me_please%';
+fld3
+explain select t2.fld3 from t2 where fld3 = 'honeysuckle';
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ref	fld3	fld3	30	const	1	Using where; Using index
+explain select fld3 from t2 ignore index (fld3) where fld3 = 'honeysuckle';
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where
+explain select fld3 from t2 use index (fld1) where fld3 = 'honeysuckle';
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where
+explain select fld3 from t2 use index (fld3) where fld3 = 'honeysuckle';
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ref	fld3	fld3	30	const	1	Using where; Using index
+explain select fld3 from t2 use index (fld1,fld3) where fld3 = 'honeysuckle';
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ref	fld3	fld3	30	const	1	Using where; Using index
+explain select fld3 from t2 ignore index (fld3,not_used);
+ERROR 42000: Key 'not_used' doesn't exist in table 't2'
+explain select fld3 from t2 use index (not_used);
+ERROR 42000: Key 'not_used' doesn't exist in table 't2'
+select t2.fld3 from t2 where fld3 >= 'honeysuckle' and fld3 <= 'honoring' order by fld3;
+fld3
+honeysuckle
+honoring
+explain select t2.fld3 from t2 where fld3 >= 'honeysuckle' and fld3 <= 'honoring' order by fld3;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	range	fld3	fld3	30	NULL	2	Using where; Using index
+select fld1,fld3 from t2 where fld3="Colombo" or fld3 = "nondecreasing" order by fld3;
+fld1	fld3
+148504	Colombo
+068305	Colombo
+000000	nondecreasing
+select fld1,fld3 from t2 where companynr = 37 and fld3 = 'appendixes';
+fld1	fld3
+232605	appendixes
+1232605	appendixes
+1232606	appendixes
+1232607	appendixes
+1232608	appendixes
+1232609	appendixes
+select fld1 from t2 where fld1=250501 or fld1="250502";
+fld1
+250501
+250502
+explain select fld1 from t2 where fld1=250501 or fld1="250502";
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	range	fld1	fld1	4	NULL	2	Using where; Using index
+select fld1 from t2 where fld1=250501 or fld1=250502 or fld1 >= 250505 and fld1 <= 250601 or fld1 between 250501 and 250502;
+fld1
+250501
+250502
+250505
+250601
+explain select fld1 from t2 where fld1=250501 or fld1=250502 or fld1 >= 250505 and fld1 <= 250601 or fld1 between 250501 and 250502;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	range	fld1	fld1	4	NULL	4	Using where; Using index
+select fld1,fld3 from t2 where companynr = 37 and fld3 like 'f%';
+fld1	fld3
+012001	flanking
+013602	foldout
+013606	fingerings
+018007	fanatic
+018017	featherweight
+018054	fetters
+018103	flint
+018104	flopping
+036002	funereal
+038017	fetched
+038205	firearm
+058004	Fenton
+088303	feminine
+186002	freakish
+188007	flurried
+188505	fitting
+198006	furthermore
+202301	Fitzpatrick
+208101	fiftieth
+208113	freest
+218008	finishers
+218022	feed
+218401	faithful
+226205	foothill
+226209	furnishings
+228306	forthcoming
+228311	fated
+231315	freezes
+232102	forgivably
+238007	filial
+238008	fixedly
+select fld3 from t2 where fld3 like "L%" and fld3 = "ok";
+fld3
+select fld3 from t2 where (fld3 like "C%" and fld3 = "Chantilly");
+fld3
+Chantilly
+select fld1,fld3 from t2 where fld1 like "25050%";
+fld1	fld3
+250501	poisoning
+250502	Iraqis
+250503	heaving
+250504	population
+250505	bomb
+select fld1,fld3 from t2 where fld1 like "25050_";
+fld1	fld3
+250501	poisoning
+250502	Iraqis
+250503	heaving
+250504	population
+250505	bomb
+select distinct companynr from t2;
+companynr
+00
+37
+36
+50
+58
+29
+40
+53
+65
+41
+34
+68
+select distinct companynr from t2 order by companynr;
+companynr
+00
+29
+34
+36
+37
+40
+41
+50
+53
+58
+65
+68
+select distinct companynr from t2 order by companynr desc;
+companynr
+68
+65
+58
+53
+50
+41
+40
+37
+36
+34
+29
+00
+select distinct t2.fld3,period from t2,t1 where companynr=37 and fld3 like "O%";
+fld3	period
+obliterates	9410
+offload	9410
+opaquely	9410
+organizer	9410
+overestimating	9410
+overlay	9410
+select distinct fld3 from t2 where companynr = 34 order by fld3;
+fld3
+absentee
+accessed
+ahead
+alphabetic
+Asiaticizations
+attitude
+aye
+bankruptcies
+belays
+Blythe
+bomb
+boulevard
+bulldozes
+cannot
+caressing
+charcoal
+checksumming
+chess
+clubroom
+colorful
+cosy
+creator
+crying
+Darius
+diffusing
+duality
+Eiffel
+Epiphany
+Ernestine
+explorers
+exterminated
+famine
+forked
+Gershwins
+heaving
+Hodges
+Iraqis
+Italianization
+Lagos
+landslide
+libretto
+Majorca
+mastering
+narrowed
+occurred
+offerers
+Palestine
+Peruvianizes
+pharmaceutic
+poisoning
+population
+Pygmalion
+rats
+realest
+recording
+regimented
+retransmitting
+reviver
+rouses
+scars
+sicker
+sleepwalk
+stopped
+sugars
+translatable
+uncles
+unexpected
+uprisings
+versatility
+vest
+select distinct fld3 from t2 limit 10;
+fld3
+abates
+abiding
+Abraham
+abrogating
+absentee
+abut
+accessed
+accruing
+accumulating
+accuracies
+select distinct fld3 from t2 having fld3 like "A%" limit 10;
+fld3
+abates
+abiding
+Abraham
+abrogating
+absentee
+abut
+accessed
+accruing
+accumulating
+accuracies
+select distinct substring(fld3,1,3) from t2 where fld3 like "A%";
+substring(fld3,1,3)
+aba
+abi
+Abr
+abs
+abu
+acc
+acq
+acu
+Ade
+adj
+Adl
+adm
+Ado
+ads
+adv
+aer
+aff
+afi
+afl
+afo
+agi
+ahe
+aim
+air
+Ald
+alg
+ali
+all
+alp
+alr
+ama
+ame
+amm
+ana
+and
+ane
+Ang
+ani
+Ann
+Ant
+api
+app
+aqu
+Ara
+arc
+Arm
+arr
+Art
+Asi
+ask
+asp
+ass
+ast
+att
+aud
+Aug
+aut
+ave
+avo
+awe
+aye
+Azt
+select distinct substring(fld3,1,3) as a from t2 having a like "A%" order by a limit 10;
+a
+aba
+abi
+Abr
+abs
+abu
+acc
+acq
+acu
+Ade
+adj
+select distinct substring(fld3,1,3) from t2 where fld3 like "A%" limit 10;
+substring(fld3,1,3)
+aba
+abi
+Abr
+abs
+abu
+acc
+acq
+acu
+Ade
+adj
+select distinct substring(fld3,1,3) as a from t2 having a like "A%" limit 10;
+a
+aba
+abi
+Abr
+abs
+abu
+acc
+acq
+acu
+Ade
+adj
+create table t3 (
+period    int not null,
+name      char(32) not null,
+companynr int not null,
+price     double(11,0),
+price2     double(11,0),
+key (period),
+key (name)
+);
+create temporary table tmp engine = myisam select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+alter table t3 add t2nr int not null auto_increment primary key first;
+drop table tmp;
+SET BIG_TABLES=1;
+select distinct concat(fld3," ",fld3) as namn from t2,t3 where t2.fld1=t3.t2nr order by namn limit 10;
+namn
+Abraham Abraham
+abrogating abrogating
+admonishing admonishing
+Adolph Adolph
+afield afield
+aging aging
+ammonium ammonium
+analyzable analyzable
+animals animals
+animized animized
+SET BIG_TABLES=0;
+select distinct concat(fld3," ",fld3) from t2,t3 where t2.fld1=t3.t2nr order by fld3 limit 10;
+concat(fld3," ",fld3)
+Abraham Abraham
+abrogating abrogating
+admonishing admonishing
+Adolph Adolph
+afield afield
+aging aging
+ammonium ammonium
+analyzable analyzable
+animals animals
+animized animized
+select distinct fld5 from t2 limit 10;
+fld5
+neat
+Steinberg
+jarring
+tinily
+balled
+persist
+attainments
+fanatic
+measures
+rightfulness
+select distinct fld3,count(*) from t2 group by companynr,fld3 limit 10;
+fld3	count(*)
+affixed	1
+and	1
+annoyers	1
+Anthony	1
+assayed	1
+assurers	1
+attendants	1
+bedlam	1
+bedpost	1
+boasted	1
+SET BIG_TABLES=1;
+select distinct fld3,count(*) from t2 group by companynr,fld3 limit 10;
+fld3	count(*)
+affixed	1
+and	1
+annoyers	1
+Anthony	1
+assayed	1
+assurers	1
+attendants	1
+bedlam	1
+bedpost	1
+boasted	1
+SET BIG_TABLES=0;
+select distinct fld3,repeat("a",length(fld3)),count(*) from t2 group by companynr,fld3 limit 100,10;
+fld3	repeat("a",length(fld3))	count(*)
+circus	aaaaaa	1
+cited	aaaaa	1
+Colombo	aaaaaaa	1
+congresswoman	aaaaaaaaaaaaa	1
+contrition	aaaaaaaaaa	1
+corny	aaaaa	1
+cultivation	aaaaaaaaaaa	1
+definiteness	aaaaaaaaaaaa	1
+demultiplex	aaaaaaaaaaa	1
+disappointing	aaaaaaaaaaaaa	1
+select distinct companynr,rtrim(space(512+companynr)) from t3 order by 1,2;
+companynr	rtrim(space(512+companynr))
+37	
+78	
+101	
+154	
+311	
+447	
+512	
+select distinct fld3 from t2,t3 where t2.companynr = 34 and t2.fld1=t3.t2nr order by fld3;
+fld3
+explain select t3.t2nr,fld3 from t2,t3 where t2.companynr = 34 and t2.fld1=t3.t2nr order by t3.t2nr,fld3;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	fld1	NULL	NULL	NULL	1199	Using where; Using temporary; Using filesort
+1	SIMPLE	t3	eq_ref	PRIMARY	PRIMARY	4	test.t2.fld1	1	Using where; Using index
+explain select * from t3 as t1,t3 where t1.period=t3.period order by t3.period;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t1	ALL	period	NULL	NULL	NULL	41810	Using temporary; Using filesort
+1	SIMPLE	t3	ref	period	period	4	test.t1.period	4181	NULL
+explain select * from t3 as t1,t3 where t1.period=t3.period order by t3.period limit 10;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t3	index	period	period	4	NULL	1	NULL
+1	SIMPLE	t1	ref	period	period	4	test.t3.period	4181	NULL
+explain select * from t3 as t1,t3 where t1.period=t3.period order by t1.period limit 10;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t1	index	period	period	4	NULL	1	NULL
+1	SIMPLE	t3	ref	period	period	4	test.t1.period	4181	NULL
+select period from t1;
+period
+9410
+select period from t1 where period=1900;
+period
+select fld3,period from t1,t2 where fld1 = 011401 order by period;
+fld3	period
+breaking	9410
+select fld3,period from t2,t3 where t2.fld1 = 011401 and t2.fld1=t3.t2nr and t3.period=1001;
+fld3	period
+breaking	1001
+explain select fld3,period from t2,t3 where t2.fld1 = 011401 and t3.t2nr=t2.fld1 and 1001 = t3.period;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	const	fld1	fld1	4	const	1	NULL
+1	SIMPLE	t3	const	PRIMARY,period	PRIMARY	4	const	1	NULL
+select fld3,period from t2,t1 where companynr*10 = 37*10;
+fld3	period
+breaking	9410
+Romans	9410
+intercepted	9410
+bewilderingly	9410
+astound	9410
+admonishing	9410
+sumac	9410
+flanking	9410
+combed	9410
+subjective	9410
+scatterbrain	9410
+Eulerian	9410
+Kane	9410
+overlay	9410
+perturb	9410
+goblins	9410
+annihilates	9410
+Wotan	9410
+snatching	9410
+concludes	9410
+laterally	9410
+yelped	9410
+grazing	9410
+Baird	9410
+celery	9410
+misunderstander	9410
+handgun	9410
+foldout	9410
+mystic	9410
+succumbed	9410
+Nabisco	9410
+fingerings	9410
+aging	9410
+afield	9410
+ammonium	9410
+boat	9410
+intelligibility	9410
+Augustine	9410
+teethe	9410
+dreaded	9410
+scholastics	9410
+audiology	9410
+wallet	9410
+parters	9410
+eschew	9410
+quitter	9410
+neat	9410
+Steinberg	9410
+jarring	9410
+tinily	9410
+balled	9410
+persist	9410
+attainments	9410
+fanatic	9410
+measures	9410
+rightfulness	9410
+capably	9410
+impulsive	9410
+starlet	9410
+terminators	9410
+untying	9410
+announces	9410
+featherweight	9410
+pessimist	9410
+daughter	9410
+decliner	9410
+lawgiver	9410
+stated	9410
+readable	9410
+attrition	9410
+cascade	9410
+motors	9410
+interrogate	9410
+pests	9410
+stairway	9410
+dopers	9410
+testicle	9410
+Parsifal	9410
+leavings	9410
+postulation	9410
+squeaking	9410
+contrasted	9410
+leftover	9410
+whiteners	9410
+erases	9410
+Punjab	9410
+Merritt	9410
+Quixotism	9410
+sweetish	9410
+dogging	9410
+scornfully	9410
+bellow	9410
+bills	9410
+cupboard	9410
+sureties	9410
+puddings	9410
+fetters	9410
+bivalves	9410
+incurring	9410
+Adolph	9410
+pithed	9410
+Miles	9410
+trimmings	9410
+tragedies	9410
+skulking	9410
+flint	9410
+flopping	9410
+relaxing	9410
+offload	9410
+suites	9410
+lists	9410
+animized	9410
+multilayer	9410
+standardizes	9410
+Judas	9410
+vacuuming	9410
+dentally	9410
+humanness	9410
+inch	9410
+Weissmuller	9410
+irresponsibly	9410
+luckily	9410
+culled	9410
+medical	9410
+bloodbath	9410
+subschema	9410
+animals	9410
+Micronesia	9410
+repetitions	9410
+Antares	9410
+ventilate	9410
+pityingly	9410
+interdependent	9410
+Graves	9410
+neonatal	9410
+chafe	9410
+honoring	9410
+realtor	9410
+elite	9410
+funereal	9410
+abrogating	9410
+sorters	9410
+Conley	9410
+lectured	9410
+Abraham	9410
+Hawaii	9410
+cage	9410
+hushes	9410
+Simla	9410
+reporters	9410
+Dutchman	9410
+descendants	9410
+groupings	9410
+dissociate	9410
+coexist	9410
+Beebe	9410
+Taoism	9410
+Connally	9410
+fetched	9410
+checkpoints	9410
+rusting	9410
+galling	9410
+obliterates	9410
+traitor	9410
+resumes	9410
+analyzable	9410
+terminator	9410
+gritty	9410
+firearm	9410
+minima	9410
+Selfridge	9410
+disable	9410
+witchcraft	9410
+betroth	9410
+Manhattanize	9410
+imprint	9410
+peeked	9410
+swelling	9410
+interrelationships	9410
+riser	9410
+Gandhian	9410
+peacock	9410
+bee	9410
+kanji	9410
+dental	9410
+scarf	9410
+chasm	9410
+insolence	9410
+syndicate	9410
+alike	9410
+imperial	9410
+convulsion	9410
+railway	9410
+validate	9410
+normalizes	9410
+comprehensive	9410
+chewing	9410
+denizen	9410
+schemer	9410
+chronicle	9410
+Kline	9410
+Anatole	9410
+partridges	9410
+brunch	9410
+recruited	9410
+dimensions	9410
+Chicana	9410
+announced	9410
+praised	9410
+employing	9410
+linear	9410
+quagmire	9410
+western	9410
+relishing	9410
+serving	9410
+scheduling	9410
+lore	9410
+eventful	9410
+arteriole	9410
+disentangle	9410
+cured	9410
+Fenton	9410
+avoidable	9410
+drains	9410
+detectably	9410
+husky	9410
+impelling	9410
+undoes	9410
+evened	9410
+squeezes	9410
+destroyer	9410
+rudeness	9410
+beaner	9410
+boorish	9410
+Everhart	9410
+encompass	9410
+mushrooms	9410
+Alison	9410
+externally	9410
+pellagra	9410
+cult	9410
+creek	9410
+Huffman	9410
+Majorca	9410
+governing	9410
+gadfly	9410
+reassigned	9410
+intentness	9410
+craziness	9410
+psychic	9410
+squabbled	9410
+burlesque	9410
+capped	9410
+extracted	9410
+DiMaggio	9410
+exclamation	9410
+subdirectory	9410
+Gothicism	9410
+feminine	9410
+metaphysically	9410
+sanding	9410
+Miltonism	9410
+freakish	9410
+index	9410
+straight	9410
+flurried	9410
+denotative	9410
+coming	9410
+commencements	9410
+gentleman	9410
+gifted	9410
+Shanghais	9410
+sportswriting	9410
+sloping	9410
+navies	9410
+leaflet	9410
+shooter	9410
+Joplin	9410
+babies	9410
+assails	9410
+admiring	9410
+swaying	9410
+Goldstine	9410
+fitting	9410
+Norwalk	9410
+analogy	9410
+deludes	9410
+cokes	9410
+Clayton	9410
+exhausts	9410
+causality	9410
+sating	9410
+icon	9410
+throttles	9410
+communicants	9410
+dehydrate	9410
+priceless	9410
+publicly	9410
+incidentals	9410
+commonplace	9410
+mumbles	9410
+furthermore	9410
+cautioned	9410
+parametrized	9410
+registration	9410
+sadly	9410
+positioning	9410
+babysitting	9410
+eternal	9410
+hoarder	9410
+congregates	9410
+rains	9410
+workers	9410
+sags	9410
+unplug	9410
+garage	9410
+boulder	9410
+specifics	9410
+Teresa	9410
+Winsett	9410
+convenient	9410
+buckboards	9410
+amenities	9410
+resplendent	9410
+sews	9410
+participated	9410
+Simon	9410
+certificates	9410
+Fitzpatrick	9410
+Evanston	9410
+misted	9410
+textures	9410
+save	9410
+count	9410
+rightful	9410
+chaperone	9410
+Lizzy	9410
+clenched	9410
+effortlessly	9410
+accessed	9410
+beaters	9410
+Hornblower	9410
+vests	9410
+indulgences	9410
+infallibly	9410
+unwilling	9410
+excrete	9410
+spools	9410
+crunches	9410
+overestimating	9410
+ineffective	9410
+humiliation	9410
+sophomore	9410
+star	9410
+rifles	9410
+dialysis	9410
+arriving	9410
+indulge	9410
+clockers	9410
+languages	9410
+Antarctica	9410
+percentage	9410
+ceiling	9410
+specification	9410
+regimented	9410
+ciphers	9410
+pictures	9410
+serpents	9410
+allot	9410
+realized	9410
+mayoral	9410
+opaquely	9410
+hostess	9410
+fiftieth	9410
+incorrectly	9410
+decomposition	9410
+stranglings	9410
+mixture	9410
+electroencephalography	9410
+similarities	9410
+charges	9410
+freest	9410
+Greenberg	9410
+tinting	9410
+expelled	9410
+warm	9410
+smoothed	9410
+deductions	9410
+Romano	9410
+bitterroot	9410
+corset	9410
+securing	9410
+environing	9410
+cute	9410
+Crays	9410
+heiress	9410
+inform	9410
+avenge	9410
+universals	9410
+Kinsey	9410
+ravines	9410
+bestseller	9410
+equilibrium	9410
+extents	9410
+relatively	9410
+pressure	9410
+critiques	9410
+befouled	9410
+rightfully	9410
+mechanizing	9410
+Latinizes	9410
+timesharing	9410
+Aden	9410
+embassies	9410
+males	9410
+shapelessly	9410
+mastering	9410
+Newtonian	9410
+finishers	9410
+abates	9410
+teem	9410
+kiting	9410
+stodgy	9410
+feed	9410
+guitars	9410
+airships	9410
+store	9410
+denounces	9410
+Pyle	9410
+Saxony	9410
+serializations	9410
+Peruvian	9410
+taxonomically	9410
+kingdom	9410
+stint	9410
+Sault	9410
+faithful	9410
+Ganymede	9410
+tidiness	9410
+gainful	9410
+contrary	9410
+Tipperary	9410
+tropics	9410
+theorizers	9410
+renew	9410
+already	9410
+terminal	9410
+Hegelian	9410
+hypothesizer	9410
+warningly	9410
+journalizing	9410
+nested	9410
+Lars	9410
+saplings	9410
+foothill	9410
+labeled	9410
+imperiously	9410
+reporters	9410
+furnishings	9410
+precipitable	9410
+discounts	9410
+excises	9410
+Stalin	9410
+despot	9410
+ripeness	9410
+Arabia	9410
+unruly	9410
+mournfulness	9410
+boom	9410
+slaughter	9410
+Sabine	9410
+handy	9410
+rural	9410
+organizer	9410
+shipyard	9410
+civics	9410
+inaccuracy	9410
+rules	9410
+juveniles	9410
+comprised	9410
+investigations	9410
+stabilizes	9410
+seminaries	9410
+Hunter	9410
+sporty	9410
+test	9410
+weasels	9410
+CERN	9410
+tempering	9410
+afore	9410
+Galatean	9410
+techniques	9410
+error	9410
+veranda	9410
+severely	9410
+Cassites	9410
+forthcoming	9410
+guides	9410
+vanish	9410
+lied	9410
+sawtooth	9410
+fated	9410
+gradually	9410
+widens	9410
+preclude	9410
+evenhandedly	9410
+percentage	9410
+disobedience	9410
+humility	9410
+gleaning	9410
+petted	9410
+bloater	9410
+minion	9410
+marginal	9410
+apiary	9410
+measures	9410
+precaution	9410
+repelled	9410
+primary	9410
+coverings	9410
+Artemia	9410
+navigate	9410
+spatial	9410
+Gurkha	9410
+meanwhile	9410
+Melinda	9410
+Butterfield	9410
+Aldrich	9410
+previewing	9410
+glut	9410
+unaffected	9410
+inmate	9410
+mineral	9410
+impending	9410
+meditation	9410
+ideas	9410
+miniaturizes	9410
+lewdly	9410
+title	9410
+youthfulness	9410
+creak	9410
+Chippewa	9410
+clamored	9410
+freezes	9410
+forgivably	9410
+reduce	9410
+McGovern	9410
+Nazis	9410
+epistle	9410
+socializes	9410
+conceptions	9410
+Kevin	9410
+uncovering	9410
+chews	9410
+appendixes	9410
+appendixes	9410
+appendixes	9410
+appendixes	9410
+appendixes	9410
+appendixes	9410
+raining	9410
+infest	9410
+compartment	9410
+minting	9410
+ducks	9410
+roped	9410
+waltz	9410
+Lillian	9410
+repressions	9410
+chillingly	9410
+noncritical	9410
+lithograph	9410
+spongers	9410
+parenthood	9410
+posed	9410
+instruments	9410
+filial	9410
+fixedly	9410
+relives	9410
+Pandora	9410
+watering	9410
+ungrateful	9410
+secures	9410
+poison	9410
+dusted	9410
+encompasses	9410
+presentation	9410
+Kantian	9410
+select fld3,period,price,price2 from t2,t3 where t2.fld1=t3.t2nr and period >= 1001 and period <= 1002 and t2.companynr = 37 order by fld3,period, price;
+fld3	period	price	price2
+admonishing	1002	28357832	8723648
+analyzable	1002	28357832	8723648
+annihilates	1001	5987435	234724
+Antares	1002	28357832	8723648
+astound	1001	5987435	234724
+audiology	1001	5987435	234724
+Augustine	1002	28357832	8723648
+Baird	1002	28357832	8723648
+bewilderingly	1001	5987435	234724
+breaking	1001	5987435	234724
+Conley	1001	5987435	234724
+dentally	1002	28357832	8723648
+dissociate	1002	28357832	8723648
+elite	1001	5987435	234724
+eschew	1001	5987435	234724
+Eulerian	1001	5987435	234724
+flanking	1001	5987435	234724
+foldout	1002	28357832	8723648
+funereal	1002	28357832	8723648
+galling	1002	28357832	8723648
+Graves	1001	5987435	234724
+grazing	1001	5987435	234724
+groupings	1001	5987435	234724
+handgun	1001	5987435	234724
+humility	1002	28357832	8723648
+impulsive	1002	28357832	8723648
+inch	1001	5987435	234724
+intelligibility	1001	5987435	234724
+jarring	1001	5987435	234724
+lawgiver	1001	5987435	234724
+lectured	1002	28357832	8723648
+Merritt	1002	28357832	8723648
+neonatal	1001	5987435	234724
+offload	1002	28357832	8723648
+parters	1002	28357832	8723648
+pityingly	1002	28357832	8723648
+puddings	1002	28357832	8723648
+Punjab	1001	5987435	234724
+quitter	1002	28357832	8723648
+realtor	1001	5987435	234724
+relaxing	1001	5987435	234724
+repetitions	1001	5987435	234724
+resumes	1001	5987435	234724
+Romans	1002	28357832	8723648
+rusting	1001	5987435	234724
+scholastics	1001	5987435	234724
+skulking	1002	28357832	8723648
+stated	1002	28357832	8723648
+suites	1002	28357832	8723648
+sureties	1001	5987435	234724
+testicle	1002	28357832	8723648
+tinily	1002	28357832	8723648
+tragedies	1001	5987435	234724
+trimmings	1001	5987435	234724
+vacuuming	1001	5987435	234724
+ventilate	1001	5987435	234724
+wallet	1001	5987435	234724
+Weissmuller	1002	28357832	8723648
+Wotan	1002	28357832	8723648
+select t2.fld1,fld3,period,price,price2 from t2,t3 where t2.fld1>= 18201 and t2.fld1 <= 18811 and t2.fld1=t3.t2nr and period = 1001 and t2.companynr = 37;
+fld1	fld3	period	price	price2
+018201	relaxing	1001	5987435	234724
+018601	vacuuming	1001	5987435	234724
+018801	inch	1001	5987435	234724
+018811	repetitions	1001	5987435	234724
+create table t4 (
+companynr tinyint(2) unsigned zerofill NOT NULL default '00',
+companyname char(30) NOT NULL default '',
+PRIMARY KEY (companynr),
+UNIQUE KEY companyname(companyname)
+) ENGINE=MyISAM MAX_ROWS=50 PACK_KEYS=1 COMMENT='companynames';
+select STRAIGHT_JOIN t2.companynr,companyname from t4,t2 where t2.companynr=t4.companynr group by t2.companynr;
+companynr	companyname
+00	Unknown
+29	company 1
+34	company 2
+36	company 3
+37	company 4
+40	company 5
+41	company 6
+50	company 11
+53	company 7
+58	company 8
+65	company 9
+68	company 10
+select SQL_SMALL_RESULT t2.companynr,companyname from t4,t2 where t2.companynr=t4.companynr group by t2.companynr;
+companynr	companyname
+00	Unknown
+29	company 1
+34	company 2
+36	company 3
+37	company 4
+40	company 5
+41	company 6
+50	company 11
+53	company 7
+58	company 8
+65	company 9
+68	company 10
+select * from t1,t1 t12;
+Period	Varor_period	Period	Varor_period
+9410	9412	9410	9412
+select t2.fld1,t22.fld1 from t2,t2 t22 where t2.fld1 >= 250501 and t2.fld1 <= 250505 and t22.fld1 >= 250501 and t22.fld1 <= 250505;
+fld1	fld1
+250501	250501
+250502	250501
+250503	250501
+250504	250501
+250505	250501
+250501	250502
+250502	250502
+250503	250502
+250504	250502
+250505	250502
+250501	250503
+250502	250503
+250503	250503
+250504	250503
+250505	250503
+250501	250504
+250502	250504
+250503	250504
+250504	250504
+250505	250504
+250501	250505
+250502	250505
+250503	250505
+250504	250505
+250505	250505
+insert into t2 (fld1, companynr) values (999999,99);
+select t2.companynr,companyname from t2 left join t4 using (companynr) where t4.companynr is null;
+companynr	companyname
+99	NULL
+select count(*) from t2 left join t4 using (companynr) where t4.companynr is not null;
+count(*)
+1199
+explain select t2.companynr,companyname from t2 left join t4 using (companynr) where t4.companynr is null;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1200	NULL
+1	SIMPLE	t4	eq_ref	PRIMARY	PRIMARY	1	test.t2.companynr	1	Using where; Not exists
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where t2.companynr is null;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	NULL	NULL	NULL	NULL	12	NULL
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1200	Using where; Not exists; Using join buffer (Block Nested Loop)
+select companynr,companyname from t2 left join t4 using (companynr) where companynr is null;
+companynr	companyname
+select count(*) from t2 left join t4 using (companynr) where companynr is not null;
+count(*)
+1200
+explain select companynr,companyname from t2 left join t4 using (companynr) where companynr is null;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	NULL	NULL	NULL	NULL	NULL	NULL	NULL	Impossible WHERE
+explain select companynr,companyname from t4 left join t2 using (companynr) where companynr is null;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	NULL	NULL	NULL	NULL	NULL	NULL	NULL	Impossible WHERE
+delete from t2 where fld1=999999;
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where t2.companynr > 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where
+1	SIMPLE	t4	eq_ref	PRIMARY	PRIMARY	1	test.t2.companynr	1	NULL
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where t2.companynr > 0 or t2.companynr < 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where
+1	SIMPLE	t4	eq_ref	PRIMARY	PRIMARY	1	test.t2.companynr	1	NULL
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where t2.companynr > 0 and t4.companynr > 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where
+1	SIMPLE	t4	eq_ref	PRIMARY	PRIMARY	1	test.t2.companynr	1	NULL
+explain select companynr,companyname from t4 left join t2 using (companynr) where companynr > 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	PRIMARY	NULL	NULL	NULL	12	Using where
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select companynr,companyname from t4 left join t2 using (companynr) where companynr > 0 or companynr < 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	PRIMARY	NULL	NULL	NULL	12	Using where
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select companynr,companyname from t4 left join t2 using (companynr) where companynr > 0 and companynr > 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	PRIMARY	NULL	NULL	NULL	12	Using where
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where t2.companynr > 0 or t2.companynr is null;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	NULL	NULL	NULL	NULL	12	NULL
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where t2.companynr > 0 or t2.companynr < 0 or t4.companynr > 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	PRIMARY	NULL	NULL	NULL	12	NULL
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where ifnull(t2.companynr,1)>0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	NULL	NULL	NULL	NULL	12	NULL
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select companynr,companyname from t4 left join t2 using (companynr) where companynr > 0 or companynr is null;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	PRIMARY	NULL	NULL	NULL	12	Using where
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select companynr,companyname from t4 left join t2 using (companynr) where companynr > 0 or companynr < 0 or companynr > 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	PRIMARY	NULL	NULL	NULL	12	Using where
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select companynr,companyname from t4 left join t2 using (companynr) where ifnull(companynr,1)>0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	NULL	NULL	NULL	NULL	12	Using where
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+select distinct t2.companynr,t4.companynr from t2,t4 where t2.companynr=t4.companynr+1;
+companynr	companynr
+37	36
+41	40
+explain select distinct t2.companynr,t4.companynr from t2,t4 where t2.companynr=t4.companynr+1;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	index	NULL	PRIMARY	1	NULL	12	Using index; Using temporary
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+select t2.fld1,t2.companynr,fld3,period from t3,t2 where t2.fld1 = 38208 and t2.fld1=t3.t2nr and period = 1008 or t2.fld1 = 38008 and t2.fld1 =t3.t2nr and period = 1008;
+fld1	companynr	fld3	period
+038008	37	reporters	1008
+038208	37	Selfridge	1008
+select t2.fld1,t2.companynr,fld3,period from t3,t2 where (t2.fld1 = 38208 or t2.fld1 = 38008) and t2.fld1=t3.t2nr and period>=1008 and period<=1009;
+fld1	companynr	fld3	period
+038008	37	reporters	1008
+038208	37	Selfridge	1008
+select t2.fld1,t2.companynr,fld3,period from t3,t2 where (t3.t2nr = 38208 or t3.t2nr = 38008) and t2.fld1=t3.t2nr and period>=1008 and period<=1009;
+fld1	companynr	fld3	period
+038008	37	reporters	1008
+038208	37	Selfridge	1008
+select period from t1 where (((period > 0) or period < 10000 or (period = 1900)) and (period=1900 and period <= 1901) or (period=1903 and (period=1903)) and period>=1902) or ((period=1904 or period=1905) or (period=1906 or period>1907)) or (period=1908 and period = 1909);
+period
+9410
+select period from t1 where ((period > 0 and period < 1) or (((period > 0 and period < 100) and (period > 10)) or (period > 10)) or (period > 0 and (period > 5 or period > 6)));
+period
+9410
+select a.fld1 from t2 as a,t2 b where ((a.fld1 = 250501 and a.fld1=b.fld1) or a.fld1=250502 or a.fld1=250503 or (a.fld1=250505 and a.fld1<=b.fld1 and b.fld1>=a.fld1)) and a.fld1=b.fld1;
+fld1
+250501
+250502
+250503
+250505
+select fld1 from t2 where fld1 in (250502,98005,98006,250503,250605,250606) and fld1 >=250502 and fld1 not in (250605,250606);
+fld1
+250502
+250503
+select fld1 from t2 where fld1 between 250502 and 250504;
+fld1
+250502
+250503
+250504
+select fld3 from t2 where (((fld3 like "_%L%" ) or (fld3 like "%ok%")) and ( fld3 like "L%" or fld3 like "G%")) and fld3 like "L%" ;
+fld3
+label
+labeled
+labeled
+landslide
+laterally
+leaflet
+lewdly
+Lillian
+luckily
+select count(*) from t1;
+count(*)
+1
+select companynr,count(*),sum(fld1) from t2 group by companynr;
+companynr	count(*)	sum(fld1)
+00	82	10355753
+29	95	14473298
+34	70	17788966
+36	215	22786296
+37	588	83602098
+40	37	6618386
+41	52	12816335
+50	11	1595438
+53	4	793210
+58	23	2254293
+65	10	2284055
+68	12	3097288
+select companynr,count(*) from t2 group by companynr order by companynr desc limit 5;
+companynr	count(*)
+68	12
+65	10
+58	23
+53	4
+50	11
+select count(*),min(fld4),max(fld4),sum(fld1),avg(fld1),std(fld1),variance(fld1) from t2 where companynr = 34 and fld4<>"";
+count(*)	min(fld4)	max(fld4)	sum(fld1)	avg(fld1)	std(fld1)	variance(fld1)
+70	absentee	vest	17788966	254128.0857	3272.5939722090234	10709871.306938833
+explain extended select count(*),min(fld4),max(fld4),sum(fld1),avg(fld1),std(fld1),variance(fld1) from t2 where companynr = 34 and fld4<>"";
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	filtered	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	100.00	Using where
+Warnings:
+Note	1003	/* select#1 */ select count(0) AS `count(*)`,min(`test`.`t2`.`fld4`) AS `min(fld4)`,max(`test`.`t2`.`fld4`) AS `max(fld4)`,sum(`test`.`t2`.`fld1`) AS `sum(fld1)`,avg(`test`.`t2`.`fld1`) AS `avg(fld1)`,std(`test`.`t2`.`fld1`) AS `std(fld1)`,variance(`test`.`t2`.`fld1`) AS `variance(fld1)` from `test`.`t2` where ((`test`.`t2`.`companynr` = 34) and (`test`.`t2`.`fld4` <> ''))
+select companynr,count(*),min(fld4),max(fld4),sum(fld1),avg(fld1),std(fld1),variance(fld1) from t2 group by companynr limit 3;
+companynr	count(*)	min(fld4)	max(fld4)	sum(fld1)	avg(fld1)	std(fld1)	variance(fld1)
+00	82	Anthony	windmills	10355753	126289.6707	115550.97568479746	13352027981.708656
+29	95	abut	wetness	14473298	152350.5053	8368.547956641249	70032594.90260443
+34	70	absentee	vest	17788966	254128.0857	3272.5939722090234	10709871.306938833
+select companynr,t2nr,count(price),sum(price),min(price),max(price),avg(price) from t3 where companynr = 37 group by companynr,t2nr limit 10;
+companynr	t2nr	count(price)	sum(price)	min(price)	max(price)	avg(price)
+37	1	1	5987435	5987435	5987435	5987435.0000
+37	2	1	28357832	28357832	28357832	28357832.0000
+37	3	1	39654943	39654943	39654943	39654943.0000
+37	11	1	5987435	5987435	5987435	5987435.0000
+37	12	1	28357832	28357832	28357832	28357832.0000
+37	13	1	39654943	39654943	39654943	39654943.0000
+37	21	1	5987435	5987435	5987435	5987435.0000
+37	22	1	28357832	28357832	28357832	28357832.0000
+37	23	1	39654943	39654943	39654943	39654943.0000
+37	31	1	5987435	5987435	5987435	5987435.0000
+select /*! SQL_SMALL_RESULT */ companynr,t2nr,count(price),sum(price),min(price),max(price),avg(price) from t3 where companynr = 37 group by companynr,t2nr limit 10;
+companynr	t2nr	count(price)	sum(price)	min(price)	max(price)	avg(price)
+37	1	1	5987435	5987435	5987435	5987435.0000
+37	2	1	28357832	28357832	28357832	28357832.0000
+37	3	1	39654943	39654943	39654943	39654943.0000
+37	11	1	5987435	5987435	5987435	5987435.0000
+37	12	1	28357832	28357832	28357832	28357832.0000
+37	13	1	39654943	39654943	39654943	39654943.0000
+37	21	1	5987435	5987435	5987435	5987435.0000
+37	22	1	28357832	28357832	28357832	28357832.0000
+37	23	1	39654943	39654943	39654943	39654943.0000
+37	31	1	5987435	5987435	5987435	5987435.0000
+select companynr,count(price),sum(price),min(price),max(price),avg(price) from t3 group by companynr ;
+companynr	count(price)	sum(price)	min(price)	max(price)	avg(price)
+37	12543	309394878010	5987435	39654943	24666736.6667
+78	8362	414611089292	726498	98439034	49582766.0000
+101	4181	3489454238	834598	834598	834598.0000
+154	4181	4112197254950	983543950	983543950	983543950.0000
+311	4181	979599938	234298	234298	234298.0000
+447	4181	9929180954	2374834	2374834	2374834.0000
+512	4181	3288532102	786542	786542	786542.0000
+select distinct mod(companynr,10) from t4 group by companynr;
+mod(companynr,10)
+0
+9
+4
+6
+7
+1
+3
+8
+5
+select distinct 1 from t4 group by companynr;
+1
+1
+select count(distinct fld1) from t2;
+count(distinct fld1)
+1199
+select companynr,count(distinct fld1) from t2 group by companynr;
+companynr	count(distinct fld1)
+00	82
+29	95
+34	70
+36	215
+37	588
+40	37
+41	52
+50	11
+53	4
+58	23
+65	10
+68	12
+select companynr,count(*) from t2 group by companynr;
+companynr	count(*)
+00	82
+29	95
+34	70
+36	215
+37	588
+40	37
+41	52
+50	11
+53	4
+58	23
+65	10
+68	12
+select companynr,count(distinct concat(fld1,repeat(65,1000))) from t2 group by companynr;
+companynr	count(distinct concat(fld1,repeat(65,1000)))
+00	82
+29	95
+34	70
+36	215
+37	588
+40	37
+41	52
+50	11
+53	4
+58	23
+65	10
+68	12
+select companynr,count(distinct concat(fld1,repeat(65,200))) from t2 group by companynr;
+companynr	count(distinct concat(fld1,repeat(65,200)))
+00	82
+29	95
+34	70
+36	215
+37	588
+40	37
+41	52
+50	11
+53	4
+58	23
+65	10
+68	12
+select companynr,count(distinct floor(fld1/100)) from t2 group by companynr;
+companynr	count(distinct floor(fld1/100))
+00	47
+29	35
+34	14
+36	69
+37	108
+40	16
+41	11
+50	9
+53	1
+58	1
+65	1
+68	1
+select companynr,count(distinct concat(repeat(65,1000),floor(fld1/100))) from t2 group by companynr;
+companynr	count(distinct concat(repeat(65,1000),floor(fld1/100)))
+00	47
+29	35
+34	14
+36	69
+37	108
+40	16
+41	11
+50	9
+53	1
+58	1
+65	1
+68	1
+select sum(fld1),fld3 from t2 where fld3="Romans" group by fld1 limit 10;
+sum(fld1)	fld3
+11402	Romans
+select name,count(*) from t3 where name='cloakroom' group by name;
+name	count(*)
+cloakroom	4181
+select name,count(*) from t3 where name='cloakroom' and price>10 group by name;
+name	count(*)
+cloakroom	4181
+select count(*) from t3 where name='cloakroom' and price2=823742;
+count(*)
+4181
+select name,count(*) from t3 where name='cloakroom' and price2=823742 group by name;
+name	count(*)
+cloakroom	4181
+select name,count(*) from t3 where name >= "extramarital" and price <= 39654943 group by name;
+name	count(*)
+extramarital	4181
+gazer	4181
+gems	4181
+Iranizes	4181
+spates	4181
+tucked	4181
+violinist	4181
+select t2.fld3,count(*) from t2,t3 where t2.fld1=158402 and t3.name=t2.fld3 group by t3.name;
+fld3	count(*)
+spates	4181
+select companynr|0,companyname from t4 group by 1;
+companynr|0	companyname
+0	Unknown
+29	company 1
+34	company 2
+36	company 3
+37	company 4
+40	company 5
+41	company 6
+50	company 11
+53	company 7
+58	company 8
+65	company 9
+68	company 10
+select t2.companynr,companyname,count(*) from t2,t4 where t2.companynr=t4.companynr group by t2.companynr order by companyname;
+companynr	companyname	count(*)
+29	company 1	95
+68	company 10	12
+50	company 11	11
+34	company 2	70
+36	company 3	215
+37	company 4	588
+40	company 5	37
+41	company 6	52
+53	company 7	4
+58	company 8	23
+65	company 9	10
+00	Unknown	82
+select t2.fld1,count(*) from t2,t3 where t2.fld1=158402 and t3.name=t2.fld3 group by t3.name;
+fld1	count(*)
+158402	4181
+select sum(Period)/count(*) from t1;
+sum(Period)/count(*)
+9410.0000
+select companynr,count(price) as "count",sum(price) as "sum" ,abs(sum(price)/count(price)-avg(price)) as "diff",(0+count(price))*companynr as func from t3 group by companynr;
+companynr	count	sum	diff	func
+37	12543	309394878010	0.0000	464091
+78	8362	414611089292	0.0000	652236
+101	4181	3489454238	0.0000	422281
+154	4181	4112197254950	0.0000	643874
+311	4181	979599938	0.0000	1300291
+447	4181	9929180954	0.0000	1868907
+512	4181	3288532102	0.0000	2140672
+select companynr,sum(price)/count(price) as avg from t3 group by companynr having avg > 70000000 order by avg;
+companynr	avg
+154	983543950.0000
+select companynr,count(*) from t2 group by companynr order by 2 desc;
+companynr	count(*)
+37	588
+36	215
+29	95
+00	82
+34	70
+41	52
+40	37
+58	23
+68	12
+50	11
+65	10
+53	4
+select companynr,count(*) from t2 where companynr > 40 group by companynr order by 2 desc;
+companynr	count(*)
+41	52
+58	23
+68	12
+50	11
+65	10
+53	4
+select t2.fld4,t2.fld1,count(price),sum(price),min(price),max(price),avg(price) from t3,t2 where t3.companynr = 37 and t2.fld1 = t3.t2nr group by fld1,t2.fld4;
+fld4	fld1	count(price)	sum(price)	min(price)	max(price)	avg(price)
+teethe	000001	1	5987435	5987435	5987435	5987435.0000
+dreaded	011401	1	5987435	5987435	5987435	5987435.0000
+scholastics	011402	1	28357832	28357832	28357832	28357832.0000
+audiology	011403	1	39654943	39654943	39654943	39654943.0000
+wallet	011501	1	5987435	5987435	5987435	5987435.0000
+parters	011701	1	5987435	5987435	5987435	5987435.0000
+eschew	011702	1	28357832	28357832	28357832	28357832.0000
+quitter	011703	1	39654943	39654943	39654943	39654943.0000
+neat	012001	1	5987435	5987435	5987435	5987435.0000
+Steinberg	012003	1	39654943	39654943	39654943	39654943.0000
+balled	012301	1	5987435	5987435	5987435	5987435.0000
+persist	012302	1	28357832	28357832	28357832	28357832.0000
+attainments	012303	1	39654943	39654943	39654943	39654943.0000
+capably	012501	1	5987435	5987435	5987435	5987435.0000
+impulsive	012602	1	28357832	28357832	28357832	28357832.0000
+starlet	012603	1	39654943	39654943	39654943	39654943.0000
+featherweight	012701	1	5987435	5987435	5987435	5987435.0000
+pessimist	012702	1	28357832	28357832	28357832	28357832.0000
+daughter	012703	1	39654943	39654943	39654943	39654943.0000
+lawgiver	013601	1	5987435	5987435	5987435	5987435.0000
+stated	013602	1	28357832	28357832	28357832	28357832.0000
+readable	013603	1	39654943	39654943	39654943	39654943.0000
+testicle	013801	1	5987435	5987435	5987435	5987435.0000
+Parsifal	013802	1	28357832	28357832	28357832	28357832.0000
+leavings	013803	1	39654943	39654943	39654943	39654943.0000
+squeaking	013901	1	5987435	5987435	5987435	5987435.0000
+contrasted	016001	1	5987435	5987435	5987435	5987435.0000
+leftover	016201	1	5987435	5987435	5987435	5987435.0000
+whiteners	016202	1	28357832	28357832	28357832	28357832.0000
+erases	016301	1	5987435	5987435	5987435	5987435.0000
+Punjab	016302	1	28357832	28357832	28357832	28357832.0000
+Merritt	016303	1	39654943	39654943	39654943	39654943.0000
+sweetish	018001	1	5987435	5987435	5987435	5987435.0000
+dogging	018002	1	28357832	28357832	28357832	28357832.0000
+scornfully	018003	1	39654943	39654943	39654943	39654943.0000
+fetters	018012	1	28357832	28357832	28357832	28357832.0000
+bivalves	018013	1	39654943	39654943	39654943	39654943.0000
+skulking	018021	1	5987435	5987435	5987435	5987435.0000
+flint	018022	1	28357832	28357832	28357832	28357832.0000
+flopping	018023	1	39654943	39654943	39654943	39654943.0000
+Judas	018032	1	28357832	28357832	28357832	28357832.0000
+vacuuming	018033	1	39654943	39654943	39654943	39654943.0000
+medical	018041	1	5987435	5987435	5987435	5987435.0000
+bloodbath	018042	1	28357832	28357832	28357832	28357832.0000
+subschema	018043	1	39654943	39654943	39654943	39654943.0000
+interdependent	018051	1	5987435	5987435	5987435	5987435.0000
+Graves	018052	1	28357832	28357832	28357832	28357832.0000
+neonatal	018053	1	39654943	39654943	39654943	39654943.0000
+sorters	018061	1	5987435	5987435	5987435	5987435.0000
+epistle	018062	1	28357832	28357832	28357832	28357832.0000
+Conley	018101	1	5987435	5987435	5987435	5987435.0000
+lectured	018102	1	28357832	28357832	28357832	28357832.0000
+Abraham	018103	1	39654943	39654943	39654943	39654943.0000
+cage	018201	1	5987435	5987435	5987435	5987435.0000
+hushes	018202	1	28357832	28357832	28357832	28357832.0000
+Simla	018402	1	28357832	28357832	28357832	28357832.0000
+reporters	018403	1	39654943	39654943	39654943	39654943.0000
+coexist	018601	1	5987435	5987435	5987435	5987435.0000
+Beebe	018602	1	28357832	28357832	28357832	28357832.0000
+Taoism	018603	1	39654943	39654943	39654943	39654943.0000
+Connally	018801	1	5987435	5987435	5987435	5987435.0000
+fetched	018802	1	28357832	28357832	28357832	28357832.0000
+checkpoints	018803	1	39654943	39654943	39654943	39654943.0000
+gritty	018811	1	5987435	5987435	5987435	5987435.0000
+firearm	018812	1	28357832	28357832	28357832	28357832.0000
+minima	019101	1	5987435	5987435	5987435	5987435.0000
+Selfridge	019102	1	28357832	28357832	28357832	28357832.0000
+disable	019103	1	39654943	39654943	39654943	39654943.0000
+witchcraft	019201	1	5987435	5987435	5987435	5987435.0000
+betroth	030501	1	5987435	5987435	5987435	5987435.0000
+Manhattanize	030502	1	28357832	28357832	28357832	28357832.0000
+imprint	030503	1	39654943	39654943	39654943	39654943.0000
+swelling	031901	1	5987435	5987435	5987435	5987435.0000
+interrelationships	036001	1	5987435	5987435	5987435	5987435.0000
+riser	036002	1	28357832	28357832	28357832	28357832.0000
+bee	038001	1	5987435	5987435	5987435	5987435.0000
+kanji	038002	1	28357832	28357832	28357832	28357832.0000
+dental	038003	1	39654943	39654943	39654943	39654943.0000
+railway	038011	1	5987435	5987435	5987435	5987435.0000
+validate	038012	1	28357832	28357832	28357832	28357832.0000
+normalizes	038013	1	39654943	39654943	39654943	39654943.0000
+Kline	038101	1	5987435	5987435	5987435	5987435.0000
+Anatole	038102	1	28357832	28357832	28357832	28357832.0000
+partridges	038103	1	39654943	39654943	39654943	39654943.0000
+recruited	038201	1	5987435	5987435	5987435	5987435.0000
+dimensions	038202	1	28357832	28357832	28357832	28357832.0000
+Chicana	038203	1	39654943	39654943	39654943	39654943.0000
+select t3.companynr,fld3,sum(price) from t3,t2 where t2.fld1 = t3.t2nr and t3.companynr = 512 group by companynr,fld3;
+companynr	fld3	sum(price)
+512	boat	786542
+512	capably	786542
+512	cupboard	786542
+512	decliner	786542
+512	descendants	786542
+512	dopers	786542
+512	erases	786542
+512	Micronesia	786542
+512	Miles	786542
+512	skies	786542
+select t2.companynr,count(*),min(fld3),max(fld3),sum(price),avg(price) from t2,t3 where t3.companynr >= 30 and t3.companynr <= 58 and t3.t2nr = t2.fld1 and 1+1=2 group by t2.companynr;
+companynr	count(*)	min(fld3)	max(fld3)	sum(price)	avg(price)
+00	1	Omaha	Omaha	5987435	5987435.0000
+36	1	dubbed	dubbed	28357832	28357832.0000
+37	83	Abraham	Wotan	1908978016	22999735.1325
+50	2	scribbled	tapestry	68012775	34006387.5000
+select t3.companynr+0,t3.t2nr,fld3,sum(price) from t3,t2 where t2.fld1 = t3.t2nr and t3.companynr = 37 group by 1,t3.t2nr,fld3,fld3,fld3,fld3,fld3 order by fld1;
+t3.companynr+0	t2nr	fld3	sum(price)
+37	1	Omaha	5987435
+37	11401	breaking	5987435
+37	11402	Romans	28357832
+37	11403	intercepted	39654943
+37	11501	bewilderingly	5987435
+37	11701	astound	5987435
+37	11702	admonishing	28357832
+37	11703	sumac	39654943
+37	12001	flanking	5987435
+37	12003	combed	39654943
+37	12301	Eulerian	5987435
+37	12302	dubbed	28357832
+37	12303	Kane	39654943
+37	12501	annihilates	5987435
+37	12602	Wotan	28357832
+37	12603	snatching	39654943
+37	12701	grazing	5987435
+37	12702	Baird	28357832
+37	12703	celery	39654943
+37	13601	handgun	5987435
+37	13602	foldout	28357832
+37	13603	mystic	39654943
+37	13801	intelligibility	5987435
+37	13802	Augustine	28357832
+37	13803	teethe	39654943
+37	13901	scholastics	5987435
+37	16001	audiology	5987435
+37	16201	wallet	5987435
+37	16202	parters	28357832
+37	16301	eschew	5987435
+37	16302	quitter	28357832
+37	16303	neat	39654943
+37	18001	jarring	5987435
+37	18002	tinily	28357832
+37	18003	balled	39654943
+37	18012	impulsive	28357832
+37	18013	starlet	39654943
+37	18021	lawgiver	5987435
+37	18022	stated	28357832
+37	18023	readable	39654943
+37	18032	testicle	28357832
+37	18033	Parsifal	39654943
+37	18041	Punjab	5987435
+37	18042	Merritt	28357832
+37	18043	Quixotism	39654943
+37	18051	sureties	5987435
+37	18052	puddings	28357832
+37	18053	tapestry	39654943
+37	18061	trimmings	5987435
+37	18062	humility	28357832
+37	18101	tragedies	5987435
+37	18102	skulking	28357832
+37	18103	flint	39654943
+37	18201	relaxing	5987435
+37	18202	offload	28357832
+37	18402	suites	28357832
+37	18403	lists	39654943
+37	18601	vacuuming	5987435
+37	18602	dentally	28357832
+37	18603	humanness	39654943
+37	18801	inch	5987435
+37	18802	Weissmuller	28357832
+37	18803	irresponsibly	39654943
+37	18811	repetitions	5987435
+37	18812	Antares	28357832
+37	19101	ventilate	5987435
+37	19102	pityingly	28357832
+37	19103	interdependent	39654943
+37	19201	Graves	5987435
+37	30501	neonatal	5987435
+37	30502	scribbled	28357832
+37	30503	chafe	39654943
+37	31901	realtor	5987435
+37	36001	elite	5987435
+37	36002	funereal	28357832
+37	38001	Conley	5987435
+37	38002	lectured	28357832
+37	38003	Abraham	39654943
+37	38011	groupings	5987435
+37	38012	dissociate	28357832
+37	38013	coexist	39654943
+37	38101	rusting	5987435
+37	38102	galling	28357832
+37	38103	obliterates	39654943
+37	38201	resumes	5987435
+37	38202	analyzable	28357832
+37	38203	terminator	39654943
+select sum(price) from t3,t2 where t2.fld1 = t3.t2nr and t3.companynr = 512 and t3.t2nr = 38008 and t2.fld1 = 38008 or t2.fld1= t3.t2nr and t3.t2nr = 38008 and t2.fld1 = 38008;
+sum(price)
+234298
+select t2.fld1,sum(price) from t3,t2 where t2.fld1 = t3.t2nr and t3.companynr = 512 and t3.t2nr = 38008 and t2.fld1 = 38008 or t2.fld1 = t3.t2nr and t3.t2nr = 38008 and t2.fld1 = 38008 or t3.t2nr = t2.fld1 and t2.fld1 = 38008 group by t2.fld1;
+fld1	sum(price)
+038008	234298
+explain select fld3 from t2 where 1>2 or 2>3;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	NULL	NULL	NULL	NULL	NULL	NULL	NULL	Impossible WHERE
+explain select fld3 from t2 where fld1=fld1;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	NULL
+select companynr,fld1 from t2 HAVING fld1=250501 or fld1=250502;
+companynr	fld1
+34	250501
+34	250502
+select companynr,fld1 from t2 WHERE fld1>=250501 HAVING fld1<=250502;
+companynr	fld1
+34	250501
+34	250502
+select companynr,count(*) as count,sum(fld1) as sum from t2 group by companynr having count > 40 and sum/count >= 120000;
+companynr	count	sum
+00	82	10355753
+29	95	14473298
+34	70	17788966
+37	588	83602098
+41	52	12816335
+select companynr from t2 group by companynr having count(*) > 40 and sum(fld1)/count(*) >= 120000 ;
+companynr
+00
+29
+34
+37
+41
+select t2.companynr,companyname,count(*) from t2,t4 where t2.companynr=t4.companynr group by companyname having t2.companynr >= 40;
+companynr	companyname	count(*)
+68	company 10	12
+50	company 11	11
+40	company 5	37
+41	company 6	52
+53	company 7	4
+58	company 8	23
+65	company 9	10
+select count(*) from t2;
+count(*)
+1199
+select count(*) from t2 where fld1 < 098024;
+count(*)
+387
+select min(fld1) from t2 where fld1>= 098024;
+min(fld1)
+98024
+select max(fld1) from t2 where fld1>= 098024;
+max(fld1)
+1232609
+select count(*) from t3 where price2=76234234;
+count(*)
+4181
+select count(*) from t3 where companynr=512 and price2=76234234;
+count(*)
+4181
+explain select min(fld1),max(fld1),count(*) from t2;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	NULL	NULL	NULL	NULL	NULL	NULL	NULL	Select tables optimized away
+select min(fld1),max(fld1),count(*) from t2;
+min(fld1)	max(fld1)	count(*)
+0	1232609	1199
+select min(t2nr),max(t2nr) from t3 where t2nr=2115 and price2=823742;
+min(t2nr)	max(t2nr)
+2115	2115
+select count(*),min(t2nr),max(t2nr) from t3 where name='spates' and companynr=78;
+count(*)	min(t2nr)	max(t2nr)
+4181	4	41804
+select t2nr,count(*) from t3 where name='gems' group by t2nr limit 20;
+t2nr	count(*)
+9	1
+19	1
+29	1
+39	1
+49	1
+59	1
+69	1
+79	1
+89	1
+99	1
+109	1
+119	1
+129	1
+139	1
+149	1
+159	1
+169	1
+179	1
+189	1
+199	1
+select max(t2nr) from t3 where price=983543950;
+max(t2nr)
+41807
+select t1.period from t3 = t1 limit 1;
+period
+1001
+select t1.period from t1 as t1 limit 1;
+period
+9410
+select t1.period as "Nuvarande period" from t1 as t1 limit 1;
+Nuvarande period
+9410
+select period as ok_period from t1 limit 1;
+ok_period
+9410
+select period as ok_period from t1 group by ok_period limit 1;
+ok_period
+9410
+select 1+1 as summa from t1 group by summa limit 1;
+summa
+2
+select period as "Nuvarande period" from t1 group by "Nuvarande period" limit 1;
+Nuvarande period
+9410
+show tables;
+Tables_in_test
+t1
+t2
+t3
+t4
+show tables from test like "s%";
+Tables_in_test (s%)
+show tables from test like "t?";
+Tables_in_test (t?)
+show full columns from t2;
+Field	Type	Collation	Null	Key	Default	Extra	Privileges	Comment
+auto	int(11)	NULL	NO	PRI	NULL	auto_increment	#	
+fld1	int(6) unsigned zerofill	NULL	NO	UNI	000000		#	
+companynr	tinyint(2) unsigned zerofill	NULL	NO		00		#	
+fld3	char(30)	latin1_swedish_ci	NO	MUL			#	
+fld4	char(35)	latin1_swedish_ci	NO				#	
+fld5	char(35)	latin1_swedish_ci	NO				#	
+fld6	char(4)	latin1_swedish_ci	NO				#	
+show full columns from t2 from test like 'f%';
+Field	Type	Collation	Null	Key	Default	Extra	Privileges	Comment
+fld1	int(6) unsigned zerofill	NULL	NO	UNI	000000		#	
+fld3	char(30)	latin1_swedish_ci	NO	MUL			#	
+fld4	char(35)	latin1_swedish_ci	NO				#	
+fld5	char(35)	latin1_swedish_ci	NO				#	
+fld6	char(4)	latin1_swedish_ci	NO				#	
+show full columns from t2 from test like 's%';
+Field	Type	Collation	Null	Key	Default	Extra	Privileges	Comment
+show keys from t2;
+Table	Non_unique	Key_name	Seq_in_index	Column_name	Collation	Cardinality	Sub_part	Packed	Null	Index_type	Comment	Index_comment
+t2	0	PRIMARY	1	auto	A	1199	NULL	NULL		BTREE		
+t2	0	fld1	1	fld1	A	1199	NULL	NULL		BTREE		
+t2	1	fld3	1	fld3	A	NULL	NULL	NULL		BTREE		
+drop table t4, t3, t2, t1;
+CREATE TABLE t1 (
+cont_nr int(11) NOT NULL auto_increment,
+ver_nr int(11) NOT NULL default '0',
+aufnr int(11) NOT NULL default '0',
+username varchar(50) NOT NULL default '',
+hdl_nr int(11) NOT NULL default '0',
+eintrag date NOT NULL default '0000-00-00',
+st_klasse varchar(40) NOT NULL default '',
+st_wert varchar(40) NOT NULL default '',
+st_zusatz varchar(40) NOT NULL default '',
+st_bemerkung varchar(255) NOT NULL default '',
+kunden_art varchar(40) NOT NULL default '',
+mcbs_knr int(11) default NULL,
+mcbs_aufnr int(11) NOT NULL default '0',
+schufa_status char(1) default '?',
+bemerkung text,
+wirknetz text,
+wf_igz int(11) NOT NULL default '0',
+tarifcode varchar(80) default NULL,
+recycle char(1) default NULL,
+sim varchar(30) default NULL,
+mcbs_tpl varchar(30) default NULL,
+emp_nr int(11) NOT NULL default '0',
+laufzeit int(11) default NULL,
+hdl_name varchar(30) default NULL,
+prov_hdl_nr int(11) NOT NULL default '0',
+auto_wirknetz varchar(50) default NULL,
+auto_billing varchar(50) default NULL,
+touch timestamp NOT NULL,
+kategorie varchar(50) default NULL,
+kundentyp varchar(20) NOT NULL default '',
+sammel_rech_msisdn varchar(30) NOT NULL default '',
+p_nr varchar(9) NOT NULL default '',
+suffix char(3) NOT NULL default '',
+PRIMARY KEY (cont_nr),
+KEY idx_aufnr(aufnr),
+KEY idx_hdl_nr(hdl_nr),
+KEY idx_st_klasse(st_klasse),
+KEY ver_nr(ver_nr),
+KEY eintrag_idx(eintrag),
+KEY emp_nr_idx(emp_nr),
+KEY wf_igz(wf_igz),
+KEY touch(touch),
+KEY hdl_tag(eintrag,hdl_nr),
+KEY prov_hdl_nr(prov_hdl_nr),
+KEY mcbs_aufnr(mcbs_aufnr),
+KEY kundentyp(kundentyp),
+KEY p_nr(p_nr,suffix)
+) ENGINE=MyISAM;
+INSERT INTO t1 VALUES (3359356,405,3359356,'Mustermann Musterfrau',52500,'2000-05-20','workflow','Auftrag erledigt','Originalvertrag eingegangen und geprüft','','privat',1485525,2122316,'+','','N',1909160,'MobilComSuper92000D2',NULL,NULL,'MS9ND2',3,24,'MobilCom Shop Koeln',52500,NULL,'auto',20010202105916,'Mobilfunk','PP','','','');
+INSERT INTO t1 VALUES (3359357,468,3359357,'Mustermann Musterfrau',7001,'2000-05-20','workflow','Auftrag erledigt','Originalvertrag eingegangen und geprüft','','privat',1503580,2139699,'+','','P',1909171,'MobilComSuper9D1T10SFreisprech(Akquise)',NULL,NULL,'MS9NS1',327,24,'MobilCom Intern',7003,NULL,'auto',20010202105916,'Mobilfunk','PP','','','');
+INSERT INTO t1 VALUES (3359358,407,3359358,'Mustermann Musterfrau',7001,'2000-05-20','workflow','Auftrag erledigt','Originalvertrag eingegangen und geprüft','','privat',1501358,2137473,'N','','N',1909159,'MobilComSuper92000D2',NULL,NULL,'MS9ND2',325,24,'MobilCom Intern',7003,NULL,'auto',20010202105916,'Mobilfunk','PP','','','');
+INSERT INTO t1 VALUES (3359359,468,3359359,'Mustermann Musterfrau',7001,'2000-05-20','workflow','Auftrag erledigt','Originalvertrag eingegangen und geprüft','','privat',1507831,2143894,'+','','P',1909162,'MobilComSuper9D1T10SFreisprech(Akquise)',NULL,NULL,'MS9NS1',327,24,'MobilCom Intern',7003,NULL,'auto',20010202105916,'Mobilfunk','PP','','','');
+INSERT INTO t1 VALUES (3359360,0,0,'Mustermann Musterfrau',29674907,'2000-05-20','workflow','Auftrag erledigt','Originalvertrag eingegangen und geprüft','','privat',1900169997,2414578,'+',NULL,'N',1909148,'',NULL,NULL,'RV99066_2',20,NULL,'POS',29674907,NULL,NULL,20010202105916,'Mobilfunk','','','97317481','007');
+INSERT INTO t1 VALUES (3359361,406,3359361,'Mustermann Musterfrau',7001,'2000-05-20','workflow','Auftrag storniert','','(7001-84):Storno, Kd. möchte nicht mehr','privat',NULL,0,'+','','P',1909150,'MobilComSuper92000D1(Akquise)',NULL,NULL,'MS9ND1',325,24,'MobilCom Intern',7003,NULL,'auto',20010202105916,'Mobilfunk','PP','','','');
+INSERT INTO t1 VALUES (3359362,406,3359362,'Mustermann Musterfrau',7001,'2000-05-20','workflow','Auftrag erledigt','Originalvertrag eingegangen und geprüft','','privat',1509984,2145874,'+','','P',1909154,'MobilComSuper92000D1(Akquise)',NULL,NULL,'MS9ND1',327,24,'MobilCom Intern',7003,NULL,'auto',20010202105916,'Mobilfunk','PP','','','');
+SELECT ELT(FIELD(kundentyp,'PP','PPA','PG','PGA','FK','FKA','FP','FPA','K','KA','V','VA',''), 'Privat (Private Nutzung)','Privat (Private Nutzung) Sitz im Ausland','Privat (geschaeftliche Nutzung)','Privat (geschaeftliche Nutzung) Sitz im Ausland','Firma (Kapitalgesellschaft)','Firma (Kapitalgesellschaft) Sitz im Ausland','Firma (Personengesellschaft)','Firma (Personengesellschaft) Sitz im Ausland','oeff. rechtl. Koerperschaft','oeff. rechtl. Koerperschaft Sitz im Ausland','Eingetragener Verein','Eingetragener Verein Sitz im Ausland','Typ unbekannt') AS Kundentyp ,kategorie FROM t1 WHERE hdl_nr < 2000000 AND kategorie IN ('Prepaid','Mobilfunk') AND st_klasse = 'Workflow' GROUP BY kundentyp ORDER BY kategorie;
+Kundentyp	kategorie
+Privat (Private Nutzung)	Mobilfunk
+Warnings:
+Warning	1052	Column 'kundentyp' in group statement is ambiguous
+drop table t1;
+SELECT sleep(50000);
+SELECT sleep(50000);
+# -- Success: more than --thread_pool_max_threads normal connections not possible
+SELECT 'Connection on extra port ok';
+Connection on extra port ok
+Connection on extra port ok
+KILL QUERY @id;
+KILL QUERY @id;
+SELECT 'Connection on extra port 2 ok';
+Connection on extra port 2 ok
+Connection on extra port 2 ok
+# -- Success: more than --extra-max-connections + 1 normal connections not possible
+sleep(50000)
+1
+sleep(50000)
+1

=== added file 'Percona-Server/mysql-test/r/pool_of_threads_high_prio_tickets.result'
--- Percona-Server/mysql-test/r/pool_of_threads_high_prio_tickets.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/r/pool_of_threads_high_prio_tickets.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,2169 @@
+SELECT @@thread_pool_high_prio_tickets;
+@@thread_pool_high_prio_tickets
+2
+drop table if exists t1,t2,t3,t4;
+CREATE TABLE t1 (
+Period smallint(4) unsigned zerofill DEFAULT '0000' NOT NULL,
+Varor_period smallint(4) unsigned DEFAULT '0' NOT NULL
+);
+INSERT INTO t1 VALUES (9410,9412);
+select period from t1;
+period
+9410
+select * from t1;
+Period	Varor_period
+9410	9412
+select t1.* from t1;
+Period	Varor_period
+9410	9412
+CREATE TABLE t2 (
+auto int not null auto_increment,
+fld1 int(6) unsigned zerofill DEFAULT '000000' NOT NULL,
+companynr tinyint(2) unsigned zerofill DEFAULT '00' NOT NULL,
+fld3 char(30) DEFAULT '' NOT NULL,
+fld4 char(35) DEFAULT '' NOT NULL,
+fld5 char(35) DEFAULT '' NOT NULL,
+fld6 char(4) DEFAULT '' NOT NULL,
+UNIQUE fld1 (fld1),
+KEY fld3 (fld3),
+PRIMARY KEY (auto)
+);
+select t2.fld3 from t2 where companynr = 58 and fld3 like "%imaginable%";
+fld3
+imaginable
+select fld3 from t2 where fld3 like "%cultivation" ;
+fld3
+cultivation
+select t2.fld3,companynr from t2 where companynr = 57+1 order by fld3;
+fld3	companynr
+concoct	58
+druggists	58
+engrossing	58
+Eurydice	58
+exclaimers	58
+ferociousness	58
+hopelessness	58
+Huey	58
+imaginable	58
+judges	58
+merging	58
+ostrich	58
+peering	58
+Phelps	58
+presumes	58
+Ruth	58
+sentences	58
+Shylock	58
+straggled	58
+synergy	58
+thanking	58
+tying	58
+unlocks	58
+select fld3,companynr from t2 where companynr = 58 order by fld3;
+fld3	companynr
+concoct	58
+druggists	58
+engrossing	58
+Eurydice	58
+exclaimers	58
+ferociousness	58
+hopelessness	58
+Huey	58
+imaginable	58
+judges	58
+merging	58
+ostrich	58
+peering	58
+Phelps	58
+presumes	58
+Ruth	58
+sentences	58
+Shylock	58
+straggled	58
+synergy	58
+thanking	58
+tying	58
+unlocks	58
+select fld3 from t2 order by fld3 desc limit 10;
+fld3
+youthfulness
+yelped
+Wotan
+workers
+Witt
+witchcraft
+Winsett
+Willy
+willed
+wildcats
+select fld3 from t2 order by fld3 desc limit 5;
+fld3
+youthfulness
+yelped
+Wotan
+workers
+Witt
+select fld3 from t2 order by fld3 desc limit 5,5;
+fld3
+witchcraft
+Winsett
+Willy
+willed
+wildcats
+select t2.fld3 from t2 where fld3 = 'honeysuckle';
+fld3
+honeysuckle
+select t2.fld3 from t2 where fld3 LIKE 'honeysuckl_';
+fld3
+honeysuckle
+select t2.fld3 from t2 where fld3 LIKE 'hon_ysuckl_';
+fld3
+honeysuckle
+select t2.fld3 from t2 where fld3 LIKE 'honeysuckle%';
+fld3
+honeysuckle
+select t2.fld3 from t2 where fld3 LIKE 'h%le';
+fld3
+honeysuckle
+select t2.fld3 from t2 where fld3 LIKE 'honeysuckle_';
+fld3
+select t2.fld3 from t2 where fld3 LIKE 'don_t_find_me_please%';
+fld3
+explain select t2.fld3 from t2 where fld3 = 'honeysuckle';
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ref	fld3	fld3	30	const	1	Using where; Using index
+explain select fld3 from t2 ignore index (fld3) where fld3 = 'honeysuckle';
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where
+explain select fld3 from t2 use index (fld1) where fld3 = 'honeysuckle';
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where
+explain select fld3 from t2 use index (fld3) where fld3 = 'honeysuckle';
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ref	fld3	fld3	30	const	1	Using where; Using index
+explain select fld3 from t2 use index (fld1,fld3) where fld3 = 'honeysuckle';
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ref	fld3	fld3	30	const	1	Using where; Using index
+explain select fld3 from t2 ignore index (fld3,not_used);
+ERROR 42000: Key 'not_used' doesn't exist in table 't2'
+explain select fld3 from t2 use index (not_used);
+ERROR 42000: Key 'not_used' doesn't exist in table 't2'
+select t2.fld3 from t2 where fld3 >= 'honeysuckle' and fld3 <= 'honoring' order by fld3;
+fld3
+honeysuckle
+honoring
+explain select t2.fld3 from t2 where fld3 >= 'honeysuckle' and fld3 <= 'honoring' order by fld3;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	range	fld3	fld3	30	NULL	2	Using where; Using index
+select fld1,fld3 from t2 where fld3="Colombo" or fld3 = "nondecreasing" order by fld3;
+fld1	fld3
+148504	Colombo
+068305	Colombo
+000000	nondecreasing
+select fld1,fld3 from t2 where companynr = 37 and fld3 = 'appendixes';
+fld1	fld3
+232605	appendixes
+1232605	appendixes
+1232606	appendixes
+1232607	appendixes
+1232608	appendixes
+1232609	appendixes
+select fld1 from t2 where fld1=250501 or fld1="250502";
+fld1
+250501
+250502
+explain select fld1 from t2 where fld1=250501 or fld1="250502";
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	range	fld1	fld1	4	NULL	2	Using where; Using index
+select fld1 from t2 where fld1=250501 or fld1=250502 or fld1 >= 250505 and fld1 <= 250601 or fld1 between 250501 and 250502;
+fld1
+250501
+250502
+250505
+250601
+explain select fld1 from t2 where fld1=250501 or fld1=250502 or fld1 >= 250505 and fld1 <= 250601 or fld1 between 250501 and 250502;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	range	fld1	fld1	4	NULL	4	Using where; Using index
+select fld1,fld3 from t2 where companynr = 37 and fld3 like 'f%';
+fld1	fld3
+012001	flanking
+013602	foldout
+013606	fingerings
+018007	fanatic
+018017	featherweight
+018054	fetters
+018103	flint
+018104	flopping
+036002	funereal
+038017	fetched
+038205	firearm
+058004	Fenton
+088303	feminine
+186002	freakish
+188007	flurried
+188505	fitting
+198006	furthermore
+202301	Fitzpatrick
+208101	fiftieth
+208113	freest
+218008	finishers
+218022	feed
+218401	faithful
+226205	foothill
+226209	furnishings
+228306	forthcoming
+228311	fated
+231315	freezes
+232102	forgivably
+238007	filial
+238008	fixedly
+select fld3 from t2 where fld3 like "L%" and fld3 = "ok";
+fld3
+select fld3 from t2 where (fld3 like "C%" and fld3 = "Chantilly");
+fld3
+Chantilly
+select fld1,fld3 from t2 where fld1 like "25050%";
+fld1	fld3
+250501	poisoning
+250502	Iraqis
+250503	heaving
+250504	population
+250505	bomb
+select fld1,fld3 from t2 where fld1 like "25050_";
+fld1	fld3
+250501	poisoning
+250502	Iraqis
+250503	heaving
+250504	population
+250505	bomb
+select distinct companynr from t2;
+companynr
+00
+37
+36
+50
+58
+29
+40
+53
+65
+41
+34
+68
+select distinct companynr from t2 order by companynr;
+companynr
+00
+29
+34
+36
+37
+40
+41
+50
+53
+58
+65
+68
+select distinct companynr from t2 order by companynr desc;
+companynr
+68
+65
+58
+53
+50
+41
+40
+37
+36
+34
+29
+00
+select distinct t2.fld3,period from t2,t1 where companynr=37 and fld3 like "O%";
+fld3	period
+obliterates	9410
+offload	9410
+opaquely	9410
+organizer	9410
+overestimating	9410
+overlay	9410
+select distinct fld3 from t2 where companynr = 34 order by fld3;
+fld3
+absentee
+accessed
+ahead
+alphabetic
+Asiaticizations
+attitude
+aye
+bankruptcies
+belays
+Blythe
+bomb
+boulevard
+bulldozes
+cannot
+caressing
+charcoal
+checksumming
+chess
+clubroom
+colorful
+cosy
+creator
+crying
+Darius
+diffusing
+duality
+Eiffel
+Epiphany
+Ernestine
+explorers
+exterminated
+famine
+forked
+Gershwins
+heaving
+Hodges
+Iraqis
+Italianization
+Lagos
+landslide
+libretto
+Majorca
+mastering
+narrowed
+occurred
+offerers
+Palestine
+Peruvianizes
+pharmaceutic
+poisoning
+population
+Pygmalion
+rats
+realest
+recording
+regimented
+retransmitting
+reviver
+rouses
+scars
+sicker
+sleepwalk
+stopped
+sugars
+translatable
+uncles
+unexpected
+uprisings
+versatility
+vest
+select distinct fld3 from t2 limit 10;
+fld3
+abates
+abiding
+Abraham
+abrogating
+absentee
+abut
+accessed
+accruing
+accumulating
+accuracies
+select distinct fld3 from t2 having fld3 like "A%" limit 10;
+fld3
+abates
+abiding
+Abraham
+abrogating
+absentee
+abut
+accessed
+accruing
+accumulating
+accuracies
+select distinct substring(fld3,1,3) from t2 where fld3 like "A%";
+substring(fld3,1,3)
+aba
+abi
+Abr
+abs
+abu
+acc
+acq
+acu
+Ade
+adj
+Adl
+adm
+Ado
+ads
+adv
+aer
+aff
+afi
+afl
+afo
+agi
+ahe
+aim
+air
+Ald
+alg
+ali
+all
+alp
+alr
+ama
+ame
+amm
+ana
+and
+ane
+Ang
+ani
+Ann
+Ant
+api
+app
+aqu
+Ara
+arc
+Arm
+arr
+Art
+Asi
+ask
+asp
+ass
+ast
+att
+aud
+Aug
+aut
+ave
+avo
+awe
+aye
+Azt
+select distinct substring(fld3,1,3) as a from t2 having a like "A%" order by a limit 10;
+a
+aba
+abi
+Abr
+abs
+abu
+acc
+acq
+acu
+Ade
+adj
+select distinct substring(fld3,1,3) from t2 where fld3 like "A%" limit 10;
+substring(fld3,1,3)
+aba
+abi
+Abr
+abs
+abu
+acc
+acq
+acu
+Ade
+adj
+select distinct substring(fld3,1,3) as a from t2 having a like "A%" limit 10;
+a
+aba
+abi
+Abr
+abs
+abu
+acc
+acq
+acu
+Ade
+adj
+create table t3 (
+period    int not null,
+name      char(32) not null,
+companynr int not null,
+price     double(11,0),
+price2     double(11,0),
+key (period),
+key (name)
+);
+create temporary table tmp engine = myisam select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+insert into tmp select * from t3;
+insert into t3 select * from tmp;
+alter table t3 add t2nr int not null auto_increment primary key first;
+drop table tmp;
+SET BIG_TABLES=1;
+select distinct concat(fld3," ",fld3) as namn from t2,t3 where t2.fld1=t3.t2nr order by namn limit 10;
+namn
+Abraham Abraham
+abrogating abrogating
+admonishing admonishing
+Adolph Adolph
+afield afield
+aging aging
+ammonium ammonium
+analyzable analyzable
+animals animals
+animized animized
+SET BIG_TABLES=0;
+select distinct concat(fld3," ",fld3) from t2,t3 where t2.fld1=t3.t2nr order by fld3 limit 10;
+concat(fld3," ",fld3)
+Abraham Abraham
+abrogating abrogating
+admonishing admonishing
+Adolph Adolph
+afield afield
+aging aging
+ammonium ammonium
+analyzable analyzable
+animals animals
+animized animized
+select distinct fld5 from t2 limit 10;
+fld5
+neat
+Steinberg
+jarring
+tinily
+balled
+persist
+attainments
+fanatic
+measures
+rightfulness
+select distinct fld3,count(*) from t2 group by companynr,fld3 limit 10;
+fld3	count(*)
+affixed	1
+and	1
+annoyers	1
+Anthony	1
+assayed	1
+assurers	1
+attendants	1
+bedlam	1
+bedpost	1
+boasted	1
+SET BIG_TABLES=1;
+select distinct fld3,count(*) from t2 group by companynr,fld3 limit 10;
+fld3	count(*)
+affixed	1
+and	1
+annoyers	1
+Anthony	1
+assayed	1
+assurers	1
+attendants	1
+bedlam	1
+bedpost	1
+boasted	1
+SET BIG_TABLES=0;
+select distinct fld3,repeat("a",length(fld3)),count(*) from t2 group by companynr,fld3 limit 100,10;
+fld3	repeat("a",length(fld3))	count(*)
+circus	aaaaaa	1
+cited	aaaaa	1
+Colombo	aaaaaaa	1
+congresswoman	aaaaaaaaaaaaa	1
+contrition	aaaaaaaaaa	1
+corny	aaaaa	1
+cultivation	aaaaaaaaaaa	1
+definiteness	aaaaaaaaaaaa	1
+demultiplex	aaaaaaaaaaa	1
+disappointing	aaaaaaaaaaaaa	1
+select distinct companynr,rtrim(space(512+companynr)) from t3 order by 1,2;
+companynr	rtrim(space(512+companynr))
+37	
+78	
+101	
+154	
+311	
+447	
+512	
+select distinct fld3 from t2,t3 where t2.companynr = 34 and t2.fld1=t3.t2nr order by fld3;
+fld3
+explain select t3.t2nr,fld3 from t2,t3 where t2.companynr = 34 and t2.fld1=t3.t2nr order by t3.t2nr,fld3;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	fld1	NULL	NULL	NULL	1199	Using where; Using temporary; Using filesort
+1	SIMPLE	t3	eq_ref	PRIMARY	PRIMARY	4	test.t2.fld1	1	Using where; Using index
+explain select * from t3 as t1,t3 where t1.period=t3.period order by t3.period;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t1	ALL	period	NULL	NULL	NULL	41810	Using temporary; Using filesort
+1	SIMPLE	t3	ref	period	period	4	test.t1.period	4181	NULL
+explain select * from t3 as t1,t3 where t1.period=t3.period order by t3.period limit 10;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t3	index	period	period	4	NULL	1	NULL
+1	SIMPLE	t1	ref	period	period	4	test.t3.period	4181	NULL
+explain select * from t3 as t1,t3 where t1.period=t3.period order by t1.period limit 10;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t1	index	period	period	4	NULL	1	NULL
+1	SIMPLE	t3	ref	period	period	4	test.t1.period	4181	NULL
+select period from t1;
+period
+9410
+select period from t1 where period=1900;
+period
+select fld3,period from t1,t2 where fld1 = 011401 order by period;
+fld3	period
+breaking	9410
+select fld3,period from t2,t3 where t2.fld1 = 011401 and t2.fld1=t3.t2nr and t3.period=1001;
+fld3	period
+breaking	1001
+explain select fld3,period from t2,t3 where t2.fld1 = 011401 and t3.t2nr=t2.fld1 and 1001 = t3.period;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	const	fld1	fld1	4	const	1	NULL
+1	SIMPLE	t3	const	PRIMARY,period	PRIMARY	4	const	1	NULL
+select fld3,period from t2,t1 where companynr*10 = 37*10;
+fld3	period
+breaking	9410
+Romans	9410
+intercepted	9410
+bewilderingly	9410
+astound	9410
+admonishing	9410
+sumac	9410
+flanking	9410
+combed	9410
+subjective	9410
+scatterbrain	9410
+Eulerian	9410
+Kane	9410
+overlay	9410
+perturb	9410
+goblins	9410
+annihilates	9410
+Wotan	9410
+snatching	9410
+concludes	9410
+laterally	9410
+yelped	9410
+grazing	9410
+Baird	9410
+celery	9410
+misunderstander	9410
+handgun	9410
+foldout	9410
+mystic	9410
+succumbed	9410
+Nabisco	9410
+fingerings	9410
+aging	9410
+afield	9410
+ammonium	9410
+boat	9410
+intelligibility	9410
+Augustine	9410
+teethe	9410
+dreaded	9410
+scholastics	9410
+audiology	9410
+wallet	9410
+parters	9410
+eschew	9410
+quitter	9410
+neat	9410
+Steinberg	9410
+jarring	9410
+tinily	9410
+balled	9410
+persist	9410
+attainments	9410
+fanatic	9410
+measures	9410
+rightfulness	9410
+capably	9410
+impulsive	9410
+starlet	9410
+terminators	9410
+untying	9410
+announces	9410
+featherweight	9410
+pessimist	9410
+daughter	9410
+decliner	9410
+lawgiver	9410
+stated	9410
+readable	9410
+attrition	9410
+cascade	9410
+motors	9410
+interrogate	9410
+pests	9410
+stairway	9410
+dopers	9410
+testicle	9410
+Parsifal	9410
+leavings	9410
+postulation	9410
+squeaking	9410
+contrasted	9410
+leftover	9410
+whiteners	9410
+erases	9410
+Punjab	9410
+Merritt	9410
+Quixotism	9410
+sweetish	9410
+dogging	9410
+scornfully	9410
+bellow	9410
+bills	9410
+cupboard	9410
+sureties	9410
+puddings	9410
+fetters	9410
+bivalves	9410
+incurring	9410
+Adolph	9410
+pithed	9410
+Miles	9410
+trimmings	9410
+tragedies	9410
+skulking	9410
+flint	9410
+flopping	9410
+relaxing	9410
+offload	9410
+suites	9410
+lists	9410
+animized	9410
+multilayer	9410
+standardizes	9410
+Judas	9410
+vacuuming	9410
+dentally	9410
+humanness	9410
+inch	9410
+Weissmuller	9410
+irresponsibly	9410
+luckily	9410
+culled	9410
+medical	9410
+bloodbath	9410
+subschema	9410
+animals	9410
+Micronesia	9410
+repetitions	9410
+Antares	9410
+ventilate	9410
+pityingly	9410
+interdependent	9410
+Graves	9410
+neonatal	9410
+chafe	9410
+honoring	9410
+realtor	9410
+elite	9410
+funereal	9410
+abrogating	9410
+sorters	9410
+Conley	9410
+lectured	9410
+Abraham	9410
+Hawaii	9410
+cage	9410
+hushes	9410
+Simla	9410
+reporters	9410
+Dutchman	9410
+descendants	9410
+groupings	9410
+dissociate	9410
+coexist	9410
+Beebe	9410
+Taoism	9410
+Connally	9410
+fetched	9410
+checkpoints	9410
+rusting	9410
+galling	9410
+obliterates	9410
+traitor	9410
+resumes	9410
+analyzable	9410
+terminator	9410
+gritty	9410
+firearm	9410
+minima	9410
+Selfridge	9410
+disable	9410
+witchcraft	9410
+betroth	9410
+Manhattanize	9410
+imprint	9410
+peeked	9410
+swelling	9410
+interrelationships	9410
+riser	9410
+Gandhian	9410
+peacock	9410
+bee	9410
+kanji	9410
+dental	9410
+scarf	9410
+chasm	9410
+insolence	9410
+syndicate	9410
+alike	9410
+imperial	9410
+convulsion	9410
+railway	9410
+validate	9410
+normalizes	9410
+comprehensive	9410
+chewing	9410
+denizen	9410
+schemer	9410
+chronicle	9410
+Kline	9410
+Anatole	9410
+partridges	9410
+brunch	9410
+recruited	9410
+dimensions	9410
+Chicana	9410
+announced	9410
+praised	9410
+employing	9410
+linear	9410
+quagmire	9410
+western	9410
+relishing	9410
+serving	9410
+scheduling	9410
+lore	9410
+eventful	9410
+arteriole	9410
+disentangle	9410
+cured	9410
+Fenton	9410
+avoidable	9410
+drains	9410
+detectably	9410
+husky	9410
+impelling	9410
+undoes	9410
+evened	9410
+squeezes	9410
+destroyer	9410
+rudeness	9410
+beaner	9410
+boorish	9410
+Everhart	9410
+encompass	9410
+mushrooms	9410
+Alison	9410
+externally	9410
+pellagra	9410
+cult	9410
+creek	9410
+Huffman	9410
+Majorca	9410
+governing	9410
+gadfly	9410
+reassigned	9410
+intentness	9410
+craziness	9410
+psychic	9410
+squabbled	9410
+burlesque	9410
+capped	9410
+extracted	9410
+DiMaggio	9410
+exclamation	9410
+subdirectory	9410
+Gothicism	9410
+feminine	9410
+metaphysically	9410
+sanding	9410
+Miltonism	9410
+freakish	9410
+index	9410
+straight	9410
+flurried	9410
+denotative	9410
+coming	9410
+commencements	9410
+gentleman	9410
+gifted	9410
+Shanghais	9410
+sportswriting	9410
+sloping	9410
+navies	9410
+leaflet	9410
+shooter	9410
+Joplin	9410
+babies	9410
+assails	9410
+admiring	9410
+swaying	9410
+Goldstine	9410
+fitting	9410
+Norwalk	9410
+analogy	9410
+deludes	9410
+cokes	9410
+Clayton	9410
+exhausts	9410
+causality	9410
+sating	9410
+icon	9410
+throttles	9410
+communicants	9410
+dehydrate	9410
+priceless	9410
+publicly	9410
+incidentals	9410
+commonplace	9410
+mumbles	9410
+furthermore	9410
+cautioned	9410
+parametrized	9410
+registration	9410
+sadly	9410
+positioning	9410
+babysitting	9410
+eternal	9410
+hoarder	9410
+congregates	9410
+rains	9410
+workers	9410
+sags	9410
+unplug	9410
+garage	9410
+boulder	9410
+specifics	9410
+Teresa	9410
+Winsett	9410
+convenient	9410
+buckboards	9410
+amenities	9410
+resplendent	9410
+sews	9410
+participated	9410
+Simon	9410
+certificates	9410
+Fitzpatrick	9410
+Evanston	9410
+misted	9410
+textures	9410
+save	9410
+count	9410
+rightful	9410
+chaperone	9410
+Lizzy	9410
+clenched	9410
+effortlessly	9410
+accessed	9410
+beaters	9410
+Hornblower	9410
+vests	9410
+indulgences	9410
+infallibly	9410
+unwilling	9410
+excrete	9410
+spools	9410
+crunches	9410
+overestimating	9410
+ineffective	9410
+humiliation	9410
+sophomore	9410
+star	9410
+rifles	9410
+dialysis	9410
+arriving	9410
+indulge	9410
+clockers	9410
+languages	9410
+Antarctica	9410
+percentage	9410
+ceiling	9410
+specification	9410
+regimented	9410
+ciphers	9410
+pictures	9410
+serpents	9410
+allot	9410
+realized	9410
+mayoral	9410
+opaquely	9410
+hostess	9410
+fiftieth	9410
+incorrectly	9410
+decomposition	9410
+stranglings	9410
+mixture	9410
+electroencephalography	9410
+similarities	9410
+charges	9410
+freest	9410
+Greenberg	9410
+tinting	9410
+expelled	9410
+warm	9410
+smoothed	9410
+deductions	9410
+Romano	9410
+bitterroot	9410
+corset	9410
+securing	9410
+environing	9410
+cute	9410
+Crays	9410
+heiress	9410
+inform	9410
+avenge	9410
+universals	9410
+Kinsey	9410
+ravines	9410
+bestseller	9410
+equilibrium	9410
+extents	9410
+relatively	9410
+pressure	9410
+critiques	9410
+befouled	9410
+rightfully	9410
+mechanizing	9410
+Latinizes	9410
+timesharing	9410
+Aden	9410
+embassies	9410
+males	9410
+shapelessly	9410
+mastering	9410
+Newtonian	9410
+finishers	9410
+abates	9410
+teem	9410
+kiting	9410
+stodgy	9410
+feed	9410
+guitars	9410
+airships	9410
+store	9410
+denounces	9410
+Pyle	9410
+Saxony	9410
+serializations	9410
+Peruvian	9410
+taxonomically	9410
+kingdom	9410
+stint	9410
+Sault	9410
+faithful	9410
+Ganymede	9410
+tidiness	9410
+gainful	9410
+contrary	9410
+Tipperary	9410
+tropics	9410
+theorizers	9410
+renew	9410
+already	9410
+terminal	9410
+Hegelian	9410
+hypothesizer	9410
+warningly	9410
+journalizing	9410
+nested	9410
+Lars	9410
+saplings	9410
+foothill	9410
+labeled	9410
+imperiously	9410
+reporters	9410
+furnishings	9410
+precipitable	9410
+discounts	9410
+excises	9410
+Stalin	9410
+despot	9410
+ripeness	9410
+Arabia	9410
+unruly	9410
+mournfulness	9410
+boom	9410
+slaughter	9410
+Sabine	9410
+handy	9410
+rural	9410
+organizer	9410
+shipyard	9410
+civics	9410
+inaccuracy	9410
+rules	9410
+juveniles	9410
+comprised	9410
+investigations	9410
+stabilizes	9410
+seminaries	9410
+Hunter	9410
+sporty	9410
+test	9410
+weasels	9410
+CERN	9410
+tempering	9410
+afore	9410
+Galatean	9410
+techniques	9410
+error	9410
+veranda	9410
+severely	9410
+Cassites	9410
+forthcoming	9410
+guides	9410
+vanish	9410
+lied	9410
+sawtooth	9410
+fated	9410
+gradually	9410
+widens	9410
+preclude	9410
+evenhandedly	9410
+percentage	9410
+disobedience	9410
+humility	9410
+gleaning	9410
+petted	9410
+bloater	9410
+minion	9410
+marginal	9410
+apiary	9410
+measures	9410
+precaution	9410
+repelled	9410
+primary	9410
+coverings	9410
+Artemia	9410
+navigate	9410
+spatial	9410
+Gurkha	9410
+meanwhile	9410
+Melinda	9410
+Butterfield	9410
+Aldrich	9410
+previewing	9410
+glut	9410
+unaffected	9410
+inmate	9410
+mineral	9410
+impending	9410
+meditation	9410
+ideas	9410
+miniaturizes	9410
+lewdly	9410
+title	9410
+youthfulness	9410
+creak	9410
+Chippewa	9410
+clamored	9410
+freezes	9410
+forgivably	9410
+reduce	9410
+McGovern	9410
+Nazis	9410
+epistle	9410
+socializes	9410
+conceptions	9410
+Kevin	9410
+uncovering	9410
+chews	9410
+appendixes	9410
+appendixes	9410
+appendixes	9410
+appendixes	9410
+appendixes	9410
+appendixes	9410
+raining	9410
+infest	9410
+compartment	9410
+minting	9410
+ducks	9410
+roped	9410
+waltz	9410
+Lillian	9410
+repressions	9410
+chillingly	9410
+noncritical	9410
+lithograph	9410
+spongers	9410
+parenthood	9410
+posed	9410
+instruments	9410
+filial	9410
+fixedly	9410
+relives	9410
+Pandora	9410
+watering	9410
+ungrateful	9410
+secures	9410
+poison	9410
+dusted	9410
+encompasses	9410
+presentation	9410
+Kantian	9410
+select fld3,period,price,price2 from t2,t3 where t2.fld1=t3.t2nr and period >= 1001 and period <= 1002 and t2.companynr = 37 order by fld3,period, price;
+fld3	period	price	price2
+admonishing	1002	28357832	8723648
+analyzable	1002	28357832	8723648
+annihilates	1001	5987435	234724
+Antares	1002	28357832	8723648
+astound	1001	5987435	234724
+audiology	1001	5987435	234724
+Augustine	1002	28357832	8723648
+Baird	1002	28357832	8723648
+bewilderingly	1001	5987435	234724
+breaking	1001	5987435	234724
+Conley	1001	5987435	234724
+dentally	1002	28357832	8723648
+dissociate	1002	28357832	8723648
+elite	1001	5987435	234724
+eschew	1001	5987435	234724
+Eulerian	1001	5987435	234724
+flanking	1001	5987435	234724
+foldout	1002	28357832	8723648
+funereal	1002	28357832	8723648
+galling	1002	28357832	8723648
+Graves	1001	5987435	234724
+grazing	1001	5987435	234724
+groupings	1001	5987435	234724
+handgun	1001	5987435	234724
+humility	1002	28357832	8723648
+impulsive	1002	28357832	8723648
+inch	1001	5987435	234724
+intelligibility	1001	5987435	234724
+jarring	1001	5987435	234724
+lawgiver	1001	5987435	234724
+lectured	1002	28357832	8723648
+Merritt	1002	28357832	8723648
+neonatal	1001	5987435	234724
+offload	1002	28357832	8723648
+parters	1002	28357832	8723648
+pityingly	1002	28357832	8723648
+puddings	1002	28357832	8723648
+Punjab	1001	5987435	234724
+quitter	1002	28357832	8723648
+realtor	1001	5987435	234724
+relaxing	1001	5987435	234724
+repetitions	1001	5987435	234724
+resumes	1001	5987435	234724
+Romans	1002	28357832	8723648
+rusting	1001	5987435	234724
+scholastics	1001	5987435	234724
+skulking	1002	28357832	8723648
+stated	1002	28357832	8723648
+suites	1002	28357832	8723648
+sureties	1001	5987435	234724
+testicle	1002	28357832	8723648
+tinily	1002	28357832	8723648
+tragedies	1001	5987435	234724
+trimmings	1001	5987435	234724
+vacuuming	1001	5987435	234724
+ventilate	1001	5987435	234724
+wallet	1001	5987435	234724
+Weissmuller	1002	28357832	8723648
+Wotan	1002	28357832	8723648
+select t2.fld1,fld3,period,price,price2 from t2,t3 where t2.fld1>= 18201 and t2.fld1 <= 18811 and t2.fld1=t3.t2nr and period = 1001 and t2.companynr = 37;
+fld1	fld3	period	price	price2
+018201	relaxing	1001	5987435	234724
+018601	vacuuming	1001	5987435	234724
+018801	inch	1001	5987435	234724
+018811	repetitions	1001	5987435	234724
+create table t4 (
+companynr tinyint(2) unsigned zerofill NOT NULL default '00',
+companyname char(30) NOT NULL default '',
+PRIMARY KEY (companynr),
+UNIQUE KEY companyname(companyname)
+) ENGINE=MyISAM MAX_ROWS=50 PACK_KEYS=1 COMMENT='companynames';
+select STRAIGHT_JOIN t2.companynr,companyname from t4,t2 where t2.companynr=t4.companynr group by t2.companynr;
+companynr	companyname
+00	Unknown
+29	company 1
+34	company 2
+36	company 3
+37	company 4
+40	company 5
+41	company 6
+50	company 11
+53	company 7
+58	company 8
+65	company 9
+68	company 10
+select SQL_SMALL_RESULT t2.companynr,companyname from t4,t2 where t2.companynr=t4.companynr group by t2.companynr;
+companynr	companyname
+00	Unknown
+29	company 1
+34	company 2
+36	company 3
+37	company 4
+40	company 5
+41	company 6
+50	company 11
+53	company 7
+58	company 8
+65	company 9
+68	company 10
+select * from t1,t1 t12;
+Period	Varor_period	Period	Varor_period
+9410	9412	9410	9412
+select t2.fld1,t22.fld1 from t2,t2 t22 where t2.fld1 >= 250501 and t2.fld1 <= 250505 and t22.fld1 >= 250501 and t22.fld1 <= 250505;
+fld1	fld1
+250501	250501
+250502	250501
+250503	250501
+250504	250501
+250505	250501
+250501	250502
+250502	250502
+250503	250502
+250504	250502
+250505	250502
+250501	250503
+250502	250503
+250503	250503
+250504	250503
+250505	250503
+250501	250504
+250502	250504
+250503	250504
+250504	250504
+250505	250504
+250501	250505
+250502	250505
+250503	250505
+250504	250505
+250505	250505
+insert into t2 (fld1, companynr) values (999999,99);
+select t2.companynr,companyname from t2 left join t4 using (companynr) where t4.companynr is null;
+companynr	companyname
+99	NULL
+select count(*) from t2 left join t4 using (companynr) where t4.companynr is not null;
+count(*)
+1199
+explain select t2.companynr,companyname from t2 left join t4 using (companynr) where t4.companynr is null;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1200	NULL
+1	SIMPLE	t4	eq_ref	PRIMARY	PRIMARY	1	test.t2.companynr	1	Using where; Not exists
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where t2.companynr is null;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	NULL	NULL	NULL	NULL	12	NULL
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1200	Using where; Not exists; Using join buffer (Block Nested Loop)
+select companynr,companyname from t2 left join t4 using (companynr) where companynr is null;
+companynr	companyname
+select count(*) from t2 left join t4 using (companynr) where companynr is not null;
+count(*)
+1200
+explain select companynr,companyname from t2 left join t4 using (companynr) where companynr is null;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	NULL	NULL	NULL	NULL	NULL	NULL	NULL	Impossible WHERE
+explain select companynr,companyname from t4 left join t2 using (companynr) where companynr is null;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	NULL	NULL	NULL	NULL	NULL	NULL	NULL	Impossible WHERE
+delete from t2 where fld1=999999;
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where t2.companynr > 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where
+1	SIMPLE	t4	eq_ref	PRIMARY	PRIMARY	1	test.t2.companynr	1	NULL
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where t2.companynr > 0 or t2.companynr < 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where
+1	SIMPLE	t4	eq_ref	PRIMARY	PRIMARY	1	test.t2.companynr	1	NULL
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where t2.companynr > 0 and t4.companynr > 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where
+1	SIMPLE	t4	eq_ref	PRIMARY	PRIMARY	1	test.t2.companynr	1	NULL
+explain select companynr,companyname from t4 left join t2 using (companynr) where companynr > 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	PRIMARY	NULL	NULL	NULL	12	Using where
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select companynr,companyname from t4 left join t2 using (companynr) where companynr > 0 or companynr < 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	PRIMARY	NULL	NULL	NULL	12	Using where
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select companynr,companyname from t4 left join t2 using (companynr) where companynr > 0 and companynr > 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	PRIMARY	NULL	NULL	NULL	12	Using where
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where t2.companynr > 0 or t2.companynr is null;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	NULL	NULL	NULL	NULL	12	NULL
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where t2.companynr > 0 or t2.companynr < 0 or t4.companynr > 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	PRIMARY	NULL	NULL	NULL	12	NULL
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select t2.companynr,companyname from t4 left join t2 using (companynr) where ifnull(t2.companynr,1)>0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	NULL	NULL	NULL	NULL	12	NULL
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select companynr,companyname from t4 left join t2 using (companynr) where companynr > 0 or companynr is null;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	PRIMARY	NULL	NULL	NULL	12	Using where
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select companynr,companyname from t4 left join t2 using (companynr) where companynr > 0 or companynr < 0 or companynr > 0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	PRIMARY	NULL	NULL	NULL	12	Using where
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+explain select companynr,companyname from t4 left join t2 using (companynr) where ifnull(companynr,1)>0;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	ALL	NULL	NULL	NULL	NULL	12	Using where
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+select distinct t2.companynr,t4.companynr from t2,t4 where t2.companynr=t4.companynr+1;
+companynr	companynr
+37	36
+41	40
+explain select distinct t2.companynr,t4.companynr from t2,t4 where t2.companynr=t4.companynr+1;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t4	index	NULL	PRIMARY	1	NULL	12	Using index; Using temporary
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	Using where; Using join buffer (Block Nested Loop)
+select t2.fld1,t2.companynr,fld3,period from t3,t2 where t2.fld1 = 38208 and t2.fld1=t3.t2nr and period = 1008 or t2.fld1 = 38008 and t2.fld1 =t3.t2nr and period = 1008;
+fld1	companynr	fld3	period
+038008	37	reporters	1008
+038208	37	Selfridge	1008
+select t2.fld1,t2.companynr,fld3,period from t3,t2 where (t2.fld1 = 38208 or t2.fld1 = 38008) and t2.fld1=t3.t2nr and period>=1008 and period<=1009;
+fld1	companynr	fld3	period
+038008	37	reporters	1008
+038208	37	Selfridge	1008
+select t2.fld1,t2.companynr,fld3,period from t3,t2 where (t3.t2nr = 38208 or t3.t2nr = 38008) and t2.fld1=t3.t2nr and period>=1008 and period<=1009;
+fld1	companynr	fld3	period
+038008	37	reporters	1008
+038208	37	Selfridge	1008
+select period from t1 where (((period > 0) or period < 10000 or (period = 1900)) and (period=1900 and period <= 1901) or (period=1903 and (period=1903)) and period>=1902) or ((period=1904 or period=1905) or (period=1906 or period>1907)) or (period=1908 and period = 1909);
+period
+9410
+select period from t1 where ((period > 0 and period < 1) or (((period > 0 and period < 100) and (period > 10)) or (period > 10)) or (period > 0 and (period > 5 or period > 6)));
+period
+9410
+select a.fld1 from t2 as a,t2 b where ((a.fld1 = 250501 and a.fld1=b.fld1) or a.fld1=250502 or a.fld1=250503 or (a.fld1=250505 and a.fld1<=b.fld1 and b.fld1>=a.fld1)) and a.fld1=b.fld1;
+fld1
+250501
+250502
+250503
+250505
+select fld1 from t2 where fld1 in (250502,98005,98006,250503,250605,250606) and fld1 >=250502 and fld1 not in (250605,250606);
+fld1
+250502
+250503
+select fld1 from t2 where fld1 between 250502 and 250504;
+fld1
+250502
+250503
+250504
+select fld3 from t2 where (((fld3 like "_%L%" ) or (fld3 like "%ok%")) and ( fld3 like "L%" or fld3 like "G%")) and fld3 like "L%" ;
+fld3
+label
+labeled
+labeled
+landslide
+laterally
+leaflet
+lewdly
+Lillian
+luckily
+select count(*) from t1;
+count(*)
+1
+select companynr,count(*),sum(fld1) from t2 group by companynr;
+companynr	count(*)	sum(fld1)
+00	82	10355753
+29	95	14473298
+34	70	17788966
+36	215	22786296
+37	588	83602098
+40	37	6618386
+41	52	12816335
+50	11	1595438
+53	4	793210
+58	23	2254293
+65	10	2284055
+68	12	3097288
+select companynr,count(*) from t2 group by companynr order by companynr desc limit 5;
+companynr	count(*)
+68	12
+65	10
+58	23
+53	4
+50	11
+select count(*),min(fld4),max(fld4),sum(fld1),avg(fld1),std(fld1),variance(fld1) from t2 where companynr = 34 and fld4<>"";
+count(*)	min(fld4)	max(fld4)	sum(fld1)	avg(fld1)	std(fld1)	variance(fld1)
+70	absentee	vest	17788966	254128.0857	3272.5939722090234	10709871.306938833
+explain extended select count(*),min(fld4),max(fld4),sum(fld1),avg(fld1),std(fld1),variance(fld1) from t2 where companynr = 34 and fld4<>"";
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	filtered	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	100.00	Using where
+Warnings:
+Note	1003	/* select#1 */ select count(0) AS `count(*)`,min(`test`.`t2`.`fld4`) AS `min(fld4)`,max(`test`.`t2`.`fld4`) AS `max(fld4)`,sum(`test`.`t2`.`fld1`) AS `sum(fld1)`,avg(`test`.`t2`.`fld1`) AS `avg(fld1)`,std(`test`.`t2`.`fld1`) AS `std(fld1)`,variance(`test`.`t2`.`fld1`) AS `variance(fld1)` from `test`.`t2` where ((`test`.`t2`.`companynr` = 34) and (`test`.`t2`.`fld4` <> ''))
+select companynr,count(*),min(fld4),max(fld4),sum(fld1),avg(fld1),std(fld1),variance(fld1) from t2 group by companynr limit 3;
+companynr	count(*)	min(fld4)	max(fld4)	sum(fld1)	avg(fld1)	std(fld1)	variance(fld1)
+00	82	Anthony	windmills	10355753	126289.6707	115550.97568479746	13352027981.708656
+29	95	abut	wetness	14473298	152350.5053	8368.547956641249	70032594.90260443
+34	70	absentee	vest	17788966	254128.0857	3272.5939722090234	10709871.306938833
+select companynr,t2nr,count(price),sum(price),min(price),max(price),avg(price) from t3 where companynr = 37 group by companynr,t2nr limit 10;
+companynr	t2nr	count(price)	sum(price)	min(price)	max(price)	avg(price)
+37	1	1	5987435	5987435	5987435	5987435.0000
+37	2	1	28357832	28357832	28357832	28357832.0000
+37	3	1	39654943	39654943	39654943	39654943.0000
+37	11	1	5987435	5987435	5987435	5987435.0000
+37	12	1	28357832	28357832	28357832	28357832.0000
+37	13	1	39654943	39654943	39654943	39654943.0000
+37	21	1	5987435	5987435	5987435	5987435.0000
+37	22	1	28357832	28357832	28357832	28357832.0000
+37	23	1	39654943	39654943	39654943	39654943.0000
+37	31	1	5987435	5987435	5987435	5987435.0000
+select /*! SQL_SMALL_RESULT */ companynr,t2nr,count(price),sum(price),min(price),max(price),avg(price) from t3 where companynr = 37 group by companynr,t2nr limit 10;
+companynr	t2nr	count(price)	sum(price)	min(price)	max(price)	avg(price)
+37	1	1	5987435	5987435	5987435	5987435.0000
+37	2	1	28357832	28357832	28357832	28357832.0000
+37	3	1	39654943	39654943	39654943	39654943.0000
+37	11	1	5987435	5987435	5987435	5987435.0000
+37	12	1	28357832	28357832	28357832	28357832.0000
+37	13	1	39654943	39654943	39654943	39654943.0000
+37	21	1	5987435	5987435	5987435	5987435.0000
+37	22	1	28357832	28357832	28357832	28357832.0000
+37	23	1	39654943	39654943	39654943	39654943.0000
+37	31	1	5987435	5987435	5987435	5987435.0000
+select companynr,count(price),sum(price),min(price),max(price),avg(price) from t3 group by companynr ;
+companynr	count(price)	sum(price)	min(price)	max(price)	avg(price)
+37	12543	309394878010	5987435	39654943	24666736.6667
+78	8362	414611089292	726498	98439034	49582766.0000
+101	4181	3489454238	834598	834598	834598.0000
+154	4181	4112197254950	983543950	983543950	983543950.0000
+311	4181	979599938	234298	234298	234298.0000
+447	4181	9929180954	2374834	2374834	2374834.0000
+512	4181	3288532102	786542	786542	786542.0000
+select distinct mod(companynr,10) from t4 group by companynr;
+mod(companynr,10)
+0
+9
+4
+6
+7
+1
+3
+8
+5
+select distinct 1 from t4 group by companynr;
+1
+1
+select count(distinct fld1) from t2;
+count(distinct fld1)
+1199
+select companynr,count(distinct fld1) from t2 group by companynr;
+companynr	count(distinct fld1)
+00	82
+29	95
+34	70
+36	215
+37	588
+40	37
+41	52
+50	11
+53	4
+58	23
+65	10
+68	12
+select companynr,count(*) from t2 group by companynr;
+companynr	count(*)
+00	82
+29	95
+34	70
+36	215
+37	588
+40	37
+41	52
+50	11
+53	4
+58	23
+65	10
+68	12
+select companynr,count(distinct concat(fld1,repeat(65,1000))) from t2 group by companynr;
+companynr	count(distinct concat(fld1,repeat(65,1000)))
+00	82
+29	95
+34	70
+36	215
+37	588
+40	37
+41	52
+50	11
+53	4
+58	23
+65	10
+68	12
+select companynr,count(distinct concat(fld1,repeat(65,200))) from t2 group by companynr;
+companynr	count(distinct concat(fld1,repeat(65,200)))
+00	82
+29	95
+34	70
+36	215
+37	588
+40	37
+41	52
+50	11
+53	4
+58	23
+65	10
+68	12
+select companynr,count(distinct floor(fld1/100)) from t2 group by companynr;
+companynr	count(distinct floor(fld1/100))
+00	47
+29	35
+34	14
+36	69
+37	108
+40	16
+41	11
+50	9
+53	1
+58	1
+65	1
+68	1
+select companynr,count(distinct concat(repeat(65,1000),floor(fld1/100))) from t2 group by companynr;
+companynr	count(distinct concat(repeat(65,1000),floor(fld1/100)))
+00	47
+29	35
+34	14
+36	69
+37	108
+40	16
+41	11
+50	9
+53	1
+58	1
+65	1
+68	1
+select sum(fld1),fld3 from t2 where fld3="Romans" group by fld1 limit 10;
+sum(fld1)	fld3
+11402	Romans
+select name,count(*) from t3 where name='cloakroom' group by name;
+name	count(*)
+cloakroom	4181
+select name,count(*) from t3 where name='cloakroom' and price>10 group by name;
+name	count(*)
+cloakroom	4181
+select count(*) from t3 where name='cloakroom' and price2=823742;
+count(*)
+4181
+select name,count(*) from t3 where name='cloakroom' and price2=823742 group by name;
+name	count(*)
+cloakroom	4181
+select name,count(*) from t3 where name >= "extramarital" and price <= 39654943 group by name;
+name	count(*)
+extramarital	4181
+gazer	4181
+gems	4181
+Iranizes	4181
+spates	4181
+tucked	4181
+violinist	4181
+select t2.fld3,count(*) from t2,t3 where t2.fld1=158402 and t3.name=t2.fld3 group by t3.name;
+fld3	count(*)
+spates	4181
+select companynr|0,companyname from t4 group by 1;
+companynr|0	companyname
+0	Unknown
+29	company 1
+34	company 2
+36	company 3
+37	company 4
+40	company 5
+41	company 6
+50	company 11
+53	company 7
+58	company 8
+65	company 9
+68	company 10
+select t2.companynr,companyname,count(*) from t2,t4 where t2.companynr=t4.companynr group by t2.companynr order by companyname;
+companynr	companyname	count(*)
+29	company 1	95
+68	company 10	12
+50	company 11	11
+34	company 2	70
+36	company 3	215
+37	company 4	588
+40	company 5	37
+41	company 6	52
+53	company 7	4
+58	company 8	23
+65	company 9	10
+00	Unknown	82
+select t2.fld1,count(*) from t2,t3 where t2.fld1=158402 and t3.name=t2.fld3 group by t3.name;
+fld1	count(*)
+158402	4181
+select sum(Period)/count(*) from t1;
+sum(Period)/count(*)
+9410.0000
+select companynr,count(price) as "count",sum(price) as "sum" ,abs(sum(price)/count(price)-avg(price)) as "diff",(0+count(price))*companynr as func from t3 group by companynr;
+companynr	count	sum	diff	func
+37	12543	309394878010	0.0000	464091
+78	8362	414611089292	0.0000	652236
+101	4181	3489454238	0.0000	422281
+154	4181	4112197254950	0.0000	643874
+311	4181	979599938	0.0000	1300291
+447	4181	9929180954	0.0000	1868907
+512	4181	3288532102	0.0000	2140672
+select companynr,sum(price)/count(price) as avg from t3 group by companynr having avg > 70000000 order by avg;
+companynr	avg
+154	983543950.0000
+select companynr,count(*) from t2 group by companynr order by 2 desc;
+companynr	count(*)
+37	588
+36	215
+29	95
+00	82
+34	70
+41	52
+40	37
+58	23
+68	12
+50	11
+65	10
+53	4
+select companynr,count(*) from t2 where companynr > 40 group by companynr order by 2 desc;
+companynr	count(*)
+41	52
+58	23
+68	12
+50	11
+65	10
+53	4
+select t2.fld4,t2.fld1,count(price),sum(price),min(price),max(price),avg(price) from t3,t2 where t3.companynr = 37 and t2.fld1 = t3.t2nr group by fld1,t2.fld4;
+fld4	fld1	count(price)	sum(price)	min(price)	max(price)	avg(price)
+teethe	000001	1	5987435	5987435	5987435	5987435.0000
+dreaded	011401	1	5987435	5987435	5987435	5987435.0000
+scholastics	011402	1	28357832	28357832	28357832	28357832.0000
+audiology	011403	1	39654943	39654943	39654943	39654943.0000
+wallet	011501	1	5987435	5987435	5987435	5987435.0000
+parters	011701	1	5987435	5987435	5987435	5987435.0000
+eschew	011702	1	28357832	28357832	28357832	28357832.0000
+quitter	011703	1	39654943	39654943	39654943	39654943.0000
+neat	012001	1	5987435	5987435	5987435	5987435.0000
+Steinberg	012003	1	39654943	39654943	39654943	39654943.0000
+balled	012301	1	5987435	5987435	5987435	5987435.0000
+persist	012302	1	28357832	28357832	28357832	28357832.0000
+attainments	012303	1	39654943	39654943	39654943	39654943.0000
+capably	012501	1	5987435	5987435	5987435	5987435.0000
+impulsive	012602	1	28357832	28357832	28357832	28357832.0000
+starlet	012603	1	39654943	39654943	39654943	39654943.0000
+featherweight	012701	1	5987435	5987435	5987435	5987435.0000
+pessimist	012702	1	28357832	28357832	28357832	28357832.0000
+daughter	012703	1	39654943	39654943	39654943	39654943.0000
+lawgiver	013601	1	5987435	5987435	5987435	5987435.0000
+stated	013602	1	28357832	28357832	28357832	28357832.0000
+readable	013603	1	39654943	39654943	39654943	39654943.0000
+testicle	013801	1	5987435	5987435	5987435	5987435.0000
+Parsifal	013802	1	28357832	28357832	28357832	28357832.0000
+leavings	013803	1	39654943	39654943	39654943	39654943.0000
+squeaking	013901	1	5987435	5987435	5987435	5987435.0000
+contrasted	016001	1	5987435	5987435	5987435	5987435.0000
+leftover	016201	1	5987435	5987435	5987435	5987435.0000
+whiteners	016202	1	28357832	28357832	28357832	28357832.0000
+erases	016301	1	5987435	5987435	5987435	5987435.0000
+Punjab	016302	1	28357832	28357832	28357832	28357832.0000
+Merritt	016303	1	39654943	39654943	39654943	39654943.0000
+sweetish	018001	1	5987435	5987435	5987435	5987435.0000
+dogging	018002	1	28357832	28357832	28357832	28357832.0000
+scornfully	018003	1	39654943	39654943	39654943	39654943.0000
+fetters	018012	1	28357832	28357832	28357832	28357832.0000
+bivalves	018013	1	39654943	39654943	39654943	39654943.0000
+skulking	018021	1	5987435	5987435	5987435	5987435.0000
+flint	018022	1	28357832	28357832	28357832	28357832.0000
+flopping	018023	1	39654943	39654943	39654943	39654943.0000
+Judas	018032	1	28357832	28357832	28357832	28357832.0000
+vacuuming	018033	1	39654943	39654943	39654943	39654943.0000
+medical	018041	1	5987435	5987435	5987435	5987435.0000
+bloodbath	018042	1	28357832	28357832	28357832	28357832.0000
+subschema	018043	1	39654943	39654943	39654943	39654943.0000
+interdependent	018051	1	5987435	5987435	5987435	5987435.0000
+Graves	018052	1	28357832	28357832	28357832	28357832.0000
+neonatal	018053	1	39654943	39654943	39654943	39654943.0000
+sorters	018061	1	5987435	5987435	5987435	5987435.0000
+epistle	018062	1	28357832	28357832	28357832	28357832.0000
+Conley	018101	1	5987435	5987435	5987435	5987435.0000
+lectured	018102	1	28357832	28357832	28357832	28357832.0000
+Abraham	018103	1	39654943	39654943	39654943	39654943.0000
+cage	018201	1	5987435	5987435	5987435	5987435.0000
+hushes	018202	1	28357832	28357832	28357832	28357832.0000
+Simla	018402	1	28357832	28357832	28357832	28357832.0000
+reporters	018403	1	39654943	39654943	39654943	39654943.0000
+coexist	018601	1	5987435	5987435	5987435	5987435.0000
+Beebe	018602	1	28357832	28357832	28357832	28357832.0000
+Taoism	018603	1	39654943	39654943	39654943	39654943.0000
+Connally	018801	1	5987435	5987435	5987435	5987435.0000
+fetched	018802	1	28357832	28357832	28357832	28357832.0000
+checkpoints	018803	1	39654943	39654943	39654943	39654943.0000
+gritty	018811	1	5987435	5987435	5987435	5987435.0000
+firearm	018812	1	28357832	28357832	28357832	28357832.0000
+minima	019101	1	5987435	5987435	5987435	5987435.0000
+Selfridge	019102	1	28357832	28357832	28357832	28357832.0000
+disable	019103	1	39654943	39654943	39654943	39654943.0000
+witchcraft	019201	1	5987435	5987435	5987435	5987435.0000
+betroth	030501	1	5987435	5987435	5987435	5987435.0000
+Manhattanize	030502	1	28357832	28357832	28357832	28357832.0000
+imprint	030503	1	39654943	39654943	39654943	39654943.0000
+swelling	031901	1	5987435	5987435	5987435	5987435.0000
+interrelationships	036001	1	5987435	5987435	5987435	5987435.0000
+riser	036002	1	28357832	28357832	28357832	28357832.0000
+bee	038001	1	5987435	5987435	5987435	5987435.0000
+kanji	038002	1	28357832	28357832	28357832	28357832.0000
+dental	038003	1	39654943	39654943	39654943	39654943.0000
+railway	038011	1	5987435	5987435	5987435	5987435.0000
+validate	038012	1	28357832	28357832	28357832	28357832.0000
+normalizes	038013	1	39654943	39654943	39654943	39654943.0000
+Kline	038101	1	5987435	5987435	5987435	5987435.0000
+Anatole	038102	1	28357832	28357832	28357832	28357832.0000
+partridges	038103	1	39654943	39654943	39654943	39654943.0000
+recruited	038201	1	5987435	5987435	5987435	5987435.0000
+dimensions	038202	1	28357832	28357832	28357832	28357832.0000
+Chicana	038203	1	39654943	39654943	39654943	39654943.0000
+select t3.companynr,fld3,sum(price) from t3,t2 where t2.fld1 = t3.t2nr and t3.companynr = 512 group by companynr,fld3;
+companynr	fld3	sum(price)
+512	boat	786542
+512	capably	786542
+512	cupboard	786542
+512	decliner	786542
+512	descendants	786542
+512	dopers	786542
+512	erases	786542
+512	Micronesia	786542
+512	Miles	786542
+512	skies	786542
+select t2.companynr,count(*),min(fld3),max(fld3),sum(price),avg(price) from t2,t3 where t3.companynr >= 30 and t3.companynr <= 58 and t3.t2nr = t2.fld1 and 1+1=2 group by t2.companynr;
+companynr	count(*)	min(fld3)	max(fld3)	sum(price)	avg(price)
+00	1	Omaha	Omaha	5987435	5987435.0000
+36	1	dubbed	dubbed	28357832	28357832.0000
+37	83	Abraham	Wotan	1908978016	22999735.1325
+50	2	scribbled	tapestry	68012775	34006387.5000
+select t3.companynr+0,t3.t2nr,fld3,sum(price) from t3,t2 where t2.fld1 = t3.t2nr and t3.companynr = 37 group by 1,t3.t2nr,fld3,fld3,fld3,fld3,fld3 order by fld1;
+t3.companynr+0	t2nr	fld3	sum(price)
+37	1	Omaha	5987435
+37	11401	breaking	5987435
+37	11402	Romans	28357832
+37	11403	intercepted	39654943
+37	11501	bewilderingly	5987435
+37	11701	astound	5987435
+37	11702	admonishing	28357832
+37	11703	sumac	39654943
+37	12001	flanking	5987435
+37	12003	combed	39654943
+37	12301	Eulerian	5987435
+37	12302	dubbed	28357832
+37	12303	Kane	39654943
+37	12501	annihilates	5987435
+37	12602	Wotan	28357832
+37	12603	snatching	39654943
+37	12701	grazing	5987435
+37	12702	Baird	28357832
+37	12703	celery	39654943
+37	13601	handgun	5987435
+37	13602	foldout	28357832
+37	13603	mystic	39654943
+37	13801	intelligibility	5987435
+37	13802	Augustine	28357832
+37	13803	teethe	39654943
+37	13901	scholastics	5987435
+37	16001	audiology	5987435
+37	16201	wallet	5987435
+37	16202	parters	28357832
+37	16301	eschew	5987435
+37	16302	quitter	28357832
+37	16303	neat	39654943
+37	18001	jarring	5987435
+37	18002	tinily	28357832
+37	18003	balled	39654943
+37	18012	impulsive	28357832
+37	18013	starlet	39654943
+37	18021	lawgiver	5987435
+37	18022	stated	28357832
+37	18023	readable	39654943
+37	18032	testicle	28357832
+37	18033	Parsifal	39654943
+37	18041	Punjab	5987435
+37	18042	Merritt	28357832
+37	18043	Quixotism	39654943
+37	18051	sureties	5987435
+37	18052	puddings	28357832
+37	18053	tapestry	39654943
+37	18061	trimmings	5987435
+37	18062	humility	28357832
+37	18101	tragedies	5987435
+37	18102	skulking	28357832
+37	18103	flint	39654943
+37	18201	relaxing	5987435
+37	18202	offload	28357832
+37	18402	suites	28357832
+37	18403	lists	39654943
+37	18601	vacuuming	5987435
+37	18602	dentally	28357832
+37	18603	humanness	39654943
+37	18801	inch	5987435
+37	18802	Weissmuller	28357832
+37	18803	irresponsibly	39654943
+37	18811	repetitions	5987435
+37	18812	Antares	28357832
+37	19101	ventilate	5987435
+37	19102	pityingly	28357832
+37	19103	interdependent	39654943
+37	19201	Graves	5987435
+37	30501	neonatal	5987435
+37	30502	scribbled	28357832
+37	30503	chafe	39654943
+37	31901	realtor	5987435
+37	36001	elite	5987435
+37	36002	funereal	28357832
+37	38001	Conley	5987435
+37	38002	lectured	28357832
+37	38003	Abraham	39654943
+37	38011	groupings	5987435
+37	38012	dissociate	28357832
+37	38013	coexist	39654943
+37	38101	rusting	5987435
+37	38102	galling	28357832
+37	38103	obliterates	39654943
+37	38201	resumes	5987435
+37	38202	analyzable	28357832
+37	38203	terminator	39654943
+select sum(price) from t3,t2 where t2.fld1 = t3.t2nr and t3.companynr = 512 and t3.t2nr = 38008 and t2.fld1 = 38008 or t2.fld1= t3.t2nr and t3.t2nr = 38008 and t2.fld1 = 38008;
+sum(price)
+234298
+select t2.fld1,sum(price) from t3,t2 where t2.fld1 = t3.t2nr and t3.companynr = 512 and t3.t2nr = 38008 and t2.fld1 = 38008 or t2.fld1 = t3.t2nr and t3.t2nr = 38008 and t2.fld1 = 38008 or t3.t2nr = t2.fld1 and t2.fld1 = 38008 group by t2.fld1;
+fld1	sum(price)
+038008	234298
+explain select fld3 from t2 where 1>2 or 2>3;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	NULL	NULL	NULL	NULL	NULL	NULL	NULL	Impossible WHERE
+explain select fld3 from t2 where fld1=fld1;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	t2	ALL	NULL	NULL	NULL	NULL	1199	NULL
+select companynr,fld1 from t2 HAVING fld1=250501 or fld1=250502;
+companynr	fld1
+34	250501
+34	250502
+select companynr,fld1 from t2 WHERE fld1>=250501 HAVING fld1<=250502;
+companynr	fld1
+34	250501
+34	250502
+select companynr,count(*) as count,sum(fld1) as sum from t2 group by companynr having count > 40 and sum/count >= 120000;
+companynr	count	sum
+00	82	10355753
+29	95	14473298
+34	70	17788966
+37	588	83602098
+41	52	12816335
+select companynr from t2 group by companynr having count(*) > 40 and sum(fld1)/count(*) >= 120000 ;
+companynr
+00
+29
+34
+37
+41
+select t2.companynr,companyname,count(*) from t2,t4 where t2.companynr=t4.companynr group by companyname having t2.companynr >= 40;
+companynr	companyname	count(*)
+68	company 10	12
+50	company 11	11
+40	company 5	37
+41	company 6	52
+53	company 7	4
+58	company 8	23
+65	company 9	10
+select count(*) from t2;
+count(*)
+1199
+select count(*) from t2 where fld1 < 098024;
+count(*)
+387
+select min(fld1) from t2 where fld1>= 098024;
+min(fld1)
+98024
+select max(fld1) from t2 where fld1>= 098024;
+max(fld1)
+1232609
+select count(*) from t3 where price2=76234234;
+count(*)
+4181
+select count(*) from t3 where companynr=512 and price2=76234234;
+count(*)
+4181
+explain select min(fld1),max(fld1),count(*) from t2;
+id	select_type	table	type	possible_keys	key	key_len	ref	rows	Extra
+1	SIMPLE	NULL	NULL	NULL	NULL	NULL	NULL	NULL	Select tables optimized away
+select min(fld1),max(fld1),count(*) from t2;
+min(fld1)	max(fld1)	count(*)
+0	1232609	1199
+select min(t2nr),max(t2nr) from t3 where t2nr=2115 and price2=823742;
+min(t2nr)	max(t2nr)
+2115	2115
+select count(*),min(t2nr),max(t2nr) from t3 where name='spates' and companynr=78;
+count(*)	min(t2nr)	max(t2nr)
+4181	4	41804
+select t2nr,count(*) from t3 where name='gems' group by t2nr limit 20;
+t2nr	count(*)
+9	1
+19	1
+29	1
+39	1
+49	1
+59	1
+69	1
+79	1
+89	1
+99	1
+109	1
+119	1
+129	1
+139	1
+149	1
+159	1
+169	1
+179	1
+189	1
+199	1
+select max(t2nr) from t3 where price=983543950;
+max(t2nr)
+41807
+select t1.period from t3 = t1 limit 1;
+period
+1001
+select t1.period from t1 as t1 limit 1;
+period
+9410
+select t1.period as "Nuvarande period" from t1 as t1 limit 1;
+Nuvarande period
+9410
+select period as ok_period from t1 limit 1;
+ok_period
+9410
+select period as ok_period from t1 group by ok_period limit 1;
+ok_period
+9410
+select 1+1 as summa from t1 group by summa limit 1;
+summa
+2
+select period as "Nuvarande period" from t1 group by "Nuvarande period" limit 1;
+Nuvarande period
+9410
+show tables;
+Tables_in_test
+t1
+t2
+t3
+t4
+show tables from test like "s%";
+Tables_in_test (s%)
+show tables from test like "t?";
+Tables_in_test (t?)
+show full columns from t2;
+Field	Type	Collation	Null	Key	Default	Extra	Privileges	Comment
+auto	int(11)	NULL	NO	PRI	NULL	auto_increment	#	
+fld1	int(6) unsigned zerofill	NULL	NO	UNI	000000		#	
+companynr	tinyint(2) unsigned zerofill	NULL	NO		00		#	
+fld3	char(30)	latin1_swedish_ci	NO	MUL			#	
+fld4	char(35)	latin1_swedish_ci	NO				#	
+fld5	char(35)	latin1_swedish_ci	NO				#	
+fld6	char(4)	latin1_swedish_ci	NO				#	
+show full columns from t2 from test like 'f%';
+Field	Type	Collation	Null	Key	Default	Extra	Privileges	Comment
+fld1	int(6) unsigned zerofill	NULL	NO	UNI	000000		#	
+fld3	char(30)	latin1_swedish_ci	NO	MUL			#	
+fld4	char(35)	latin1_swedish_ci	NO				#	
+fld5	char(35)	latin1_swedish_ci	NO				#	
+fld6	char(4)	latin1_swedish_ci	NO				#	
+show full columns from t2 from test like 's%';
+Field	Type	Collation	Null	Key	Default	Extra	Privileges	Comment
+show keys from t2;
+Table	Non_unique	Key_name	Seq_in_index	Column_name	Collation	Cardinality	Sub_part	Packed	Null	Index_type	Comment	Index_comment
+t2	0	PRIMARY	1	auto	A	1199	NULL	NULL		BTREE		
+t2	0	fld1	1	fld1	A	1199	NULL	NULL		BTREE		
+t2	1	fld3	1	fld3	A	NULL	NULL	NULL		BTREE		
+drop table t4, t3, t2, t1;
+CREATE TABLE t1 (
+cont_nr int(11) NOT NULL auto_increment,
+ver_nr int(11) NOT NULL default '0',
+aufnr int(11) NOT NULL default '0',
+username varchar(50) NOT NULL default '',
+hdl_nr int(11) NOT NULL default '0',
+eintrag date NOT NULL default '0000-00-00',
+st_klasse varchar(40) NOT NULL default '',
+st_wert varchar(40) NOT NULL default '',
+st_zusatz varchar(40) NOT NULL default '',
+st_bemerkung varchar(255) NOT NULL default '',
+kunden_art varchar(40) NOT NULL default '',
+mcbs_knr int(11) default NULL,
+mcbs_aufnr int(11) NOT NULL default '0',
+schufa_status char(1) default '?',
+bemerkung text,
+wirknetz text,
+wf_igz int(11) NOT NULL default '0',
+tarifcode varchar(80) default NULL,
+recycle char(1) default NULL,
+sim varchar(30) default NULL,
+mcbs_tpl varchar(30) default NULL,
+emp_nr int(11) NOT NULL default '0',
+laufzeit int(11) default NULL,
+hdl_name varchar(30) default NULL,
+prov_hdl_nr int(11) NOT NULL default '0',
+auto_wirknetz varchar(50) default NULL,
+auto_billing varchar(50) default NULL,
+touch timestamp NOT NULL,
+kategorie varchar(50) default NULL,
+kundentyp varchar(20) NOT NULL default '',
+sammel_rech_msisdn varchar(30) NOT NULL default '',
+p_nr varchar(9) NOT NULL default '',
+suffix char(3) NOT NULL default '',
+PRIMARY KEY (cont_nr),
+KEY idx_aufnr(aufnr),
+KEY idx_hdl_nr(hdl_nr),
+KEY idx_st_klasse(st_klasse),
+KEY ver_nr(ver_nr),
+KEY eintrag_idx(eintrag),
+KEY emp_nr_idx(emp_nr),
+KEY wf_igz(wf_igz),
+KEY touch(touch),
+KEY hdl_tag(eintrag,hdl_nr),
+KEY prov_hdl_nr(prov_hdl_nr),
+KEY mcbs_aufnr(mcbs_aufnr),
+KEY kundentyp(kundentyp),
+KEY p_nr(p_nr,suffix)
+) ENGINE=MyISAM;
+INSERT INTO t1 VALUES (3359356,405,3359356,'Mustermann Musterfrau',52500,'2000-05-20','workflow','Auftrag erledigt','Originalvertrag eingegangen und geprüft','','privat',1485525,2122316,'+','','N',1909160,'MobilComSuper92000D2',NULL,NULL,'MS9ND2',3,24,'MobilCom Shop Koeln',52500,NULL,'auto',20010202105916,'Mobilfunk','PP','','','');
+INSERT INTO t1 VALUES (3359357,468,3359357,'Mustermann Musterfrau',7001,'2000-05-20','workflow','Auftrag erledigt','Originalvertrag eingegangen und geprüft','','privat',1503580,2139699,'+','','P',1909171,'MobilComSuper9D1T10SFreisprech(Akquise)',NULL,NULL,'MS9NS1',327,24,'MobilCom Intern',7003,NULL,'auto',20010202105916,'Mobilfunk','PP','','','');
+INSERT INTO t1 VALUES (3359358,407,3359358,'Mustermann Musterfrau',7001,'2000-05-20','workflow','Auftrag erledigt','Originalvertrag eingegangen und geprüft','','privat',1501358,2137473,'N','','N',1909159,'MobilComSuper92000D2',NULL,NULL,'MS9ND2',325,24,'MobilCom Intern',7003,NULL,'auto',20010202105916,'Mobilfunk','PP','','','');
+INSERT INTO t1 VALUES (3359359,468,3359359,'Mustermann Musterfrau',7001,'2000-05-20','workflow','Auftrag erledigt','Originalvertrag eingegangen und geprüft','','privat',1507831,2143894,'+','','P',1909162,'MobilComSuper9D1T10SFreisprech(Akquise)',NULL,NULL,'MS9NS1',327,24,'MobilCom Intern',7003,NULL,'auto',20010202105916,'Mobilfunk','PP','','','');
+INSERT INTO t1 VALUES (3359360,0,0,'Mustermann Musterfrau',29674907,'2000-05-20','workflow','Auftrag erledigt','Originalvertrag eingegangen und geprüft','','privat',1900169997,2414578,'+',NULL,'N',1909148,'',NULL,NULL,'RV99066_2',20,NULL,'POS',29674907,NULL,NULL,20010202105916,'Mobilfunk','','','97317481','007');
+INSERT INTO t1 VALUES (3359361,406,3359361,'Mustermann Musterfrau',7001,'2000-05-20','workflow','Auftrag storniert','','(7001-84):Storno, Kd. möchte nicht mehr','privat',NULL,0,'+','','P',1909150,'MobilComSuper92000D1(Akquise)',NULL,NULL,'MS9ND1',325,24,'MobilCom Intern',7003,NULL,'auto',20010202105916,'Mobilfunk','PP','','','');
+INSERT INTO t1 VALUES (3359362,406,3359362,'Mustermann Musterfrau',7001,'2000-05-20','workflow','Auftrag erledigt','Originalvertrag eingegangen und geprüft','','privat',1509984,2145874,'+','','P',1909154,'MobilComSuper92000D1(Akquise)',NULL,NULL,'MS9ND1',327,24,'MobilCom Intern',7003,NULL,'auto',20010202105916,'Mobilfunk','PP','','','');
+SELECT ELT(FIELD(kundentyp,'PP','PPA','PG','PGA','FK','FKA','FP','FPA','K','KA','V','VA',''), 'Privat (Private Nutzung)','Privat (Private Nutzung) Sitz im Ausland','Privat (geschaeftliche Nutzung)','Privat (geschaeftliche Nutzung) Sitz im Ausland','Firma (Kapitalgesellschaft)','Firma (Kapitalgesellschaft) Sitz im Ausland','Firma (Personengesellschaft)','Firma (Personengesellschaft) Sitz im Ausland','oeff. rechtl. Koerperschaft','oeff. rechtl. Koerperschaft Sitz im Ausland','Eingetragener Verein','Eingetragener Verein Sitz im Ausland','Typ unbekannt') AS Kundentyp ,kategorie FROM t1 WHERE hdl_nr < 2000000 AND kategorie IN ('Prepaid','Mobilfunk') AND st_klasse = 'Workflow' GROUP BY kundentyp ORDER BY kategorie;
+Kundentyp	kategorie
+Privat (Private Nutzung)	Mobilfunk
+Warnings:
+Warning	1052	Column 'kundentyp' in group statement is ambiguous
+drop table t1;
+CREATE TABLE t1(a INT);
+START TRANSACTION;
+INSERT INTO t1 VALUES (1);
+SELECT * FROM t1;
+a
+1
+INSERT INTO t1 VALUES (2);
+SELECT * FROM t1;
+a
+1
+2
+COMMIT;
+DROP TABLE t1;

=== added file 'Percona-Server/mysql-test/suite/binlog/r/percona_bug1162085.result'
--- Percona-Server/mysql-test/suite/binlog/r/percona_bug1162085.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/suite/binlog/r/percona_bug1162085.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,6 @@
+CREATE TABLE t1 (data LONGBLOB) ENGINE=InnoDB;
+START TRANSACTION;
+SAVEPOINT savepoint_1;
+ROLLBACK TO SAVEPOINT_1;
+COMMIT;
+DROP TABLE t1;

=== added file 'Percona-Server/mysql-test/suite/binlog/t/percona_bug1162085.test'
--- Percona-Server/mysql-test/suite/binlog/t/percona_bug1162085.test	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/suite/binlog/t/percona_bug1162085.test	2013-05-27 12:16:38 +0000
@@ -0,0 +1,30 @@
+########################################################################
+# Bug #1162085: Percona server 5.5.30-rel30.1.465 reproducable hang
+########################################################################
+
+-- source include/have_log_bin.inc
+-- source include/have_innodb.inc
+
+CREATE TABLE t1 (data LONGBLOB) ENGINE=InnoDB;
+
+START TRANSACTION;
+
+--disable_query_log
+let $i=1024;
+while($i)
+{
+	# Don't use REPEAT() here so we generate long enough writes to the
+	# binlog cache in both stmt and row-based mode
+
+	INSERT INTO t1 (data) VALUES ('XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX');
+	dec $i;
+}
+--enable_query_log
+
+SAVEPOINT savepoint_1;
+
+ROLLBACK TO SAVEPOINT_1;
+
+COMMIT;
+
+DROP TABLE t1;

=== modified file 'Percona-Server/mysql-test/suite/innodb/r/percona_changed_page_bmp.result'
--- Percona-Server/mysql-test/suite/innodb/r/percona_changed_page_bmp.result	2013-05-09 19:43:37 +0000
+++ Percona-Server/mysql-test/suite/innodb/r/percona_changed_page_bmp.result	2013-05-27 12:16:38 +0000
@@ -55,13 +55,18 @@
 RESET CHANGED_PAGE_BITMAPS;
 call mtr.add_suppression("InnoDB: Error: page [0-9]* log sequence number [0-9]*");
 9th restart
-DROP TABLE t1, t2;
+CREATE TABLE t3 (a MEDIUMBLOB) ENGINE=InnoDB;
+call mtr.add_suppression("InnoDB: Error: the age of the oldest untracked record exceeds the log group capacity!");
+call mtr.add_suppression("InnoDB: Error: stopping the log tracking thread at LSN");
+INSERT INTO t3 VALUES (REPEAT('a', 12582912));
+10th restart
+DROP TABLE t1, t2, t3;
 SET GLOBAL innodb_fast_shutdown=0;
-10th restart
+11th restart
 SELECT @@innodb_read_only;
 @@innodb_read_only
 1
 SELECT @@innodb_track_changed_pages;
 @@innodb_track_changed_pages
 0
-11th restart
+12th restart

=== modified file 'Percona-Server/mysql-test/suite/innodb/t/percona_changed_page_bmp-master.opt'
--- Percona-Server/mysql-test/suite/innodb/t/percona_changed_page_bmp-master.opt	2013-01-31 11:52:07 +0000
+++ Percona-Server/mysql-test/suite/innodb/t/percona_changed_page_bmp-master.opt	2013-05-27 12:16:38 +0000
@@ -1,1 +1,1 @@
---innodb_track_changed_pages=TRUE --innodb_log_file_size=5M --innodb_file_per_table 
+--innodb_track_changed_pages=TRUE --innodb_log_file_size=5M --innodb_file_per_table --max_allowed_packet=13631488 

=== modified file 'Percona-Server/mysql-test/suite/innodb/t/percona_changed_page_bmp.test'
--- Percona-Server/mysql-test/suite/innodb/t/percona_changed_page_bmp.test	2013-05-09 19:43:37 +0000
+++ Percona-Server/mysql-test/suite/innodb/t/percona_changed_page_bmp.test	2013-05-27 12:16:38 +0000
@@ -231,14 +231,27 @@
 --echo 9th restart
 --source include/restart_mysqld.inc
 
-DROP TABLE t1, t2;
+#
+# Test that impossible to track log is handled gracefully (bug 1108613)
+#
+CREATE TABLE t3 (a MEDIUMBLOB) ENGINE=InnoDB;
+
+call mtr.add_suppression("InnoDB: Error: the age of the oldest untracked record exceeds the log group capacity!");
+call mtr.add_suppression("InnoDB: Error: stopping the log tracking thread at LSN");
+
+INSERT INTO t3 VALUES (REPEAT('a', 12582912));
+
+--echo 10th restart
+--source include/restart_mysqld.inc
+
+DROP TABLE t1, t2, t3;
 
 #
 # Test that --innodb-read-only works
 #
 SET GLOBAL innodb_fast_shutdown=0;
+--echo 11th restart
 
---echo 10th restart
 --exec echo "wait" > $MYSQLTEST_VARDIR/tmp/mysqld.1.expect
 --shutdown_server 10
 --source include/wait_until_disconnected.inc
@@ -249,5 +262,5 @@
 SELECT @@innodb_read_only;
 SELECT @@innodb_track_changed_pages;
 
---echo 11th restart
+--echo 12th restart
 --source include/restart_mysqld.inc

=== removed file 'Percona-Server/mysql-test/suite/rpl/r/percona_bug860910.result'
--- Percona-Server/mysql-test/suite/rpl/r/percona_bug860910.result	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/suite/rpl/r/percona_bug860910.result	1970-01-01 00:00:00 +0000
@@ -1,23 +0,0 @@
-*** Set up master (server_1) <-> master (server_2) replication  ***
-include/rpl_init.inc [topology=1->2->1]
-Warnings:
-Note	####	Sending passwords in plain text without SSL/TLS is extremely insecure.
-Note	####	Storing MySQL user name or password information in the master.info repository is not secure and is therefore not recommended. Please see the MySQL Manual for more about this issue and possible alternatives.
-Warnings:
-Note	####	Sending passwords in plain text without SSL/TLS is extremely insecure.
-Note	####	Storing MySQL user name or password information in the master.info repository is not secure and is therefore not recommended. Please see the MySQL Manual for more about this issue and possible alternatives.
-
-SELECT @@global.log_slave_updates;
-@@global.log_slave_updates
-1
-SELECT @@global.log_slave_updates;
-@@global.log_slave_updates
-1
-CREATE TABLE t1(a INT);
-SET @var:=0;
-INSERT INTO t1 VALUES (@var);
-INSERT INTO t1 VALUES (1);
-DROP TABLE t1;
-include/rpl_sync.inc
-include/check_slave_param.inc [Exec_Master_Log_Pos]
-include/rpl_end.inc

=== removed file 'Percona-Server/mysql-test/suite/rpl/t/percona_bug860910.test'
--- Percona-Server/mysql-test/suite/rpl/t/percona_bug860910.test	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/suite/rpl/t/percona_bug860910.test	1970-01-01 00:00:00 +0000
@@ -1,38 +0,0 @@
-########################################################################
-# Bug #860910: SHOW SLAVE STATUS gives wrong output with master-master
-#              and using SET uservars
-########################################################################
-
---echo *** Set up master (server_1) <-> master (server_2) replication  ***
---let $rpl_topology= 1->2->1
---source include/rpl_init.inc
---echo
-
---connection server_1
-SELECT @@global.log_slave_updates;
-
---connection server_2
-SELECT @@global.log_slave_updates;
-
---connection server_1
-CREATE TABLE t1(a INT);
-SET @var:=0;
-INSERT INTO t1 VALUES (@var);
-INSERT INTO t1 VALUES (1);
-
-DROP TABLE t1;
-
-# The following would hang with the bug not fixed due to incorrect
-# Exec_Master_Log_Pos
---source include/rpl_sync.inc
-
---connection server_2
---let $master_log_pos= query_get_value(SHOW MASTER STATUS, Position, 1)
-
---connection server_1
---let $slave_param= Exec_Master_Log_Pos
---let $slave_param_value= $master_log_pos
---source include/check_slave_param.inc
-
-# Cleanup
---source include/rpl_end.inc

=== added file 'Percona-Server/mysql-test/suite/sys_vars/r/extra_max_connections_basic.result'
--- Percona-Server/mysql-test/suite/sys_vars/r/extra_max_connections_basic.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/r/extra_max_connections_basic.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,43 @@
+SET @start_global_value = @@global.extra_max_connections;
+select @@global.extra_max_connections;
+@@global.extra_max_connections
+1
+select @@session.extra_max_connections;
+ERROR HY000: Variable 'extra_max_connections' is a GLOBAL variable
+show global variables like 'extra_max_connections';
+Variable_name	Value
+extra_max_connections	1
+show session variables like 'extra_max_connections';
+Variable_name	Value
+extra_max_connections	1
+select * from information_schema.global_variables where variable_name='extra_max_connections';
+VARIABLE_NAME	VARIABLE_VALUE
+EXTRA_MAX_CONNECTIONS	1
+select * from information_schema.session_variables where variable_name='extra_max_connections';
+VARIABLE_NAME	VARIABLE_VALUE
+EXTRA_MAX_CONNECTIONS	1
+set global extra_max_connections=1;
+select @@global.extra_max_connections;
+@@global.extra_max_connections
+1
+set session extra_max_connections=1;
+ERROR HY000: Variable 'extra_max_connections' is a GLOBAL variable and should be set with SET GLOBAL
+set global extra_max_connections=1.1;
+ERROR 42000: Incorrect argument type to variable 'extra_max_connections'
+set global extra_max_connections=1e1;
+ERROR 42000: Incorrect argument type to variable 'extra_max_connections'
+set global extra_max_connections="foo";
+ERROR 42000: Incorrect argument type to variable 'extra_max_connections'
+set global extra_max_connections=0;
+Warnings:
+Warning	1292	Truncated incorrect extra_max_connections value: '0'
+select @@global.extra_max_connections;
+@@global.extra_max_connections
+1
+set global extra_max_connections=cast(-1 as unsigned int);
+Warnings:
+Warning	1292	Truncated incorrect extra_max_connections value: '18446744073709551615'
+select @@global.extra_max_connections;
+@@global.extra_max_connections
+100000
+SET @@global.extra_max_connections = @start_global_value;

=== added file 'Percona-Server/mysql-test/suite/sys_vars/r/extra_port_basic.result'
--- Percona-Server/mysql-test/suite/sys_vars/r/extra_port_basic.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/r/extra_port_basic.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,21 @@
+select @@global.extra_port;
+@@global.extra_port
+0
+select @@session.extra_port;
+ERROR HY000: Variable 'extra_port' is a GLOBAL variable
+show global variables like 'extra_port';
+Variable_name	Value
+extra_port	0
+show session variables like 'extra_port';
+Variable_name	Value
+extra_port	0
+select * from information_schema.global_variables where variable_name='extra_port';
+VARIABLE_NAME	VARIABLE_VALUE
+EXTRA_PORT	0
+select * from information_schema.session_variables where variable_name='extra_port';
+VARIABLE_NAME	VARIABLE_VALUE
+EXTRA_PORT	0
+set global extra_port=1;
+ERROR HY000: Variable 'extra_port' is a read only variable
+set session extra_port=1;
+ERROR HY000: Variable 'extra_port' is a read only variable

=== added file 'Percona-Server/mysql-test/suite/sys_vars/r/query_exec_id_basic.result'
--- Percona-Server/mysql-test/suite/sys_vars/r/query_exec_id_basic.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/r/query_exec_id_basic.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,2 @@
+SET GLOBAL query_exec_id=default;
+SET SESSION query_exec_id=default;

=== added file 'Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_high_prio_tickets_basic.result'
--- Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_high_prio_tickets_basic.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_high_prio_tickets_basic.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,47 @@
+SET @start_global_value = @@global.thread_pool_high_prio_tickets;
+select @@global.thread_pool_high_prio_tickets;
+@@global.thread_pool_high_prio_tickets
+0
+select @@session.thread_pool_high_prio_tickets;
+ERROR HY000: Variable 'thread_pool_high_prio_tickets' is a GLOBAL variable
+show global variables like 'thread_pool_high_prio_tickets';
+Variable_name	Value
+thread_pool_high_prio_tickets	0
+show session variables like 'thread_pool_high_prio_tickets';
+Variable_name	Value
+thread_pool_high_prio_tickets	0
+select * from information_schema.global_variables where variable_name='thread_pool_high_prio_tickets';
+VARIABLE_NAME	VARIABLE_VALUE
+THREAD_POOL_HIGH_PRIO_TICKETS	0
+select * from information_schema.session_variables where variable_name='thread_pool_high_prio_tickets';
+VARIABLE_NAME	VARIABLE_VALUE
+THREAD_POOL_HIGH_PRIO_TICKETS	0
+set global thread_pool_high_prio_tickets=60;
+select @@global.thread_pool_high_prio_tickets;
+@@global.thread_pool_high_prio_tickets
+60
+set global thread_pool_high_prio_tickets=4294967295;
+select @@global.thread_pool_high_prio_tickets;
+@@global.thread_pool_high_prio_tickets
+4294967295
+set session thread_pool_high_prio_tickets=1;
+ERROR HY000: Variable 'thread_pool_high_prio_tickets' is a GLOBAL variable and should be set with SET GLOBAL
+set global thread_pool_high_prio_tickets=1.1;
+ERROR 42000: Incorrect argument type to variable 'thread_pool_high_prio_tickets'
+set global thread_pool_high_prio_tickets=1e1;
+ERROR 42000: Incorrect argument type to variable 'thread_pool_high_prio_tickets'
+set global thread_pool_high_prio_tickets="foo";
+ERROR 42000: Incorrect argument type to variable 'thread_pool_high_prio_tickets'
+set global thread_pool_high_prio_tickets=-1;
+Warnings:
+Warning	1292	Truncated incorrect thread_pool_high_prio_tickets value: '-1'
+select @@global.thread_pool_high_prio_tickets;
+@@global.thread_pool_high_prio_tickets
+0
+set global thread_pool_high_prio_tickets=10000000000;
+Warnings:
+Warning	1292	Truncated incorrect thread_pool_high_prio_tickets value: '10000000000'
+select @@global.thread_pool_high_prio_tickets;
+@@global.thread_pool_high_prio_tickets
+4294967295
+set @@global.thread_pool_high_prio_tickets = @start_global_value;

=== added file 'Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_idle_timeout_basic.result'
--- Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_idle_timeout_basic.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_idle_timeout_basic.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,47 @@
+SET @start_global_value = @@global.thread_pool_idle_timeout;
+select @@global.thread_pool_idle_timeout;
+@@global.thread_pool_idle_timeout
+60
+select @@session.thread_pool_idle_timeout;
+ERROR HY000: Variable 'thread_pool_idle_timeout' is a GLOBAL variable
+show global variables like 'thread_pool_idle_timeout';
+Variable_name	Value
+thread_pool_idle_timeout	60
+show session variables like 'thread_pool_idle_timeout';
+Variable_name	Value
+thread_pool_idle_timeout	60
+select * from information_schema.global_variables where variable_name='thread_pool_idle_timeout';
+VARIABLE_NAME	VARIABLE_VALUE
+THREAD_POOL_IDLE_TIMEOUT	60
+select * from information_schema.session_variables where variable_name='thread_pool_idle_timeout';
+VARIABLE_NAME	VARIABLE_VALUE
+THREAD_POOL_IDLE_TIMEOUT	60
+set global thread_pool_idle_timeout=60;
+select @@global.thread_pool_idle_timeout;
+@@global.thread_pool_idle_timeout
+60
+set global thread_pool_idle_timeout=4294967295;
+select @@global.thread_pool_idle_timeout;
+@@global.thread_pool_idle_timeout
+4294967295
+set session thread_pool_idle_timeout=1;
+ERROR HY000: Variable 'thread_pool_idle_timeout' is a GLOBAL variable and should be set with SET GLOBAL
+set global thread_pool_idle_timeout=1.1;
+ERROR 42000: Incorrect argument type to variable 'thread_pool_idle_timeout'
+set global thread_pool_idle_timeout=1e1;
+ERROR 42000: Incorrect argument type to variable 'thread_pool_idle_timeout'
+set global thread_pool_idle_timeout="foo";
+ERROR 42000: Incorrect argument type to variable 'thread_pool_idle_timeout'
+set global thread_pool_idle_timeout=-1;
+Warnings:
+Warning	1292	Truncated incorrect thread_pool_idle_timeout value: '-1'
+select @@global.thread_pool_idle_timeout;
+@@global.thread_pool_idle_timeout
+1
+set global thread_pool_idle_timeout=10000000000;
+Warnings:
+Warning	1292	Truncated incorrect thread_pool_idle_timeout value: '10000000000'
+select @@global.thread_pool_idle_timeout;
+@@global.thread_pool_idle_timeout
+4294967295
+SET @@global.thread_pool_idle_timeout = @start_global_value;

=== added file 'Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_max_threads_basic.result'
--- Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_max_threads_basic.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_max_threads_basic.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,47 @@
+SET @start_global_value = @@global.thread_pool_max_threads;
+select @@global.thread_pool_max_threads;
+@@global.thread_pool_max_threads
+500
+select @@session.thread_pool_max_threads;
+ERROR HY000: Variable 'thread_pool_max_threads' is a GLOBAL variable
+show global variables like 'thread_pool_max_threads';
+Variable_name	Value
+thread_pool_max_threads	500
+show session variables like 'thread_pool_max_threads';
+Variable_name	Value
+thread_pool_max_threads	500
+select * from information_schema.global_variables where variable_name='thread_pool_max_threads';
+VARIABLE_NAME	VARIABLE_VALUE
+THREAD_POOL_MAX_THREADS	500
+select * from information_schema.session_variables where variable_name='thread_pool_max_threads';
+VARIABLE_NAME	VARIABLE_VALUE
+THREAD_POOL_MAX_THREADS	500
+set global thread_pool_max_threads=1;
+select @@global.thread_pool_max_threads;
+@@global.thread_pool_max_threads
+1
+set global thread_pool_max_threads=65536;
+select @@global.thread_pool_max_threads;
+@@global.thread_pool_max_threads
+65536
+set session thread_pool_max_threads=1;
+ERROR HY000: Variable 'thread_pool_max_threads' is a GLOBAL variable and should be set with SET GLOBAL
+set global thread_pool_max_threads=1.1;
+ERROR 42000: Incorrect argument type to variable 'thread_pool_max_threads'
+set global thread_pool_max_threads=1e1;
+ERROR 42000: Incorrect argument type to variable 'thread_pool_max_threads'
+set global thread_pool_max_threads="foo";
+ERROR 42000: Incorrect argument type to variable 'thread_pool_max_threads'
+set global thread_pool_max_threads=0;
+Warnings:
+Warning	1292	Truncated incorrect thread_pool_max_threads value: '0'
+select @@global.thread_pool_max_threads;
+@@global.thread_pool_max_threads
+1
+set global thread_pool_max_threads=10000000000;
+Warnings:
+Warning	1292	Truncated incorrect thread_pool_max_threads value: '10000000000'
+select @@global.thread_pool_max_threads;
+@@global.thread_pool_max_threads
+65536
+SET @@global.thread_pool_max_threads = @start_global_value;

=== added file 'Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_min_threads_basic.result'
--- Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_min_threads_basic.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_min_threads_basic.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,49 @@
+SET @start_global_value = @@global.thread_pool_min_threads;
+select @@global.thread_pool_min_threads;
+@@global.thread_pool_min_threads
+1
+select @@session.thread_pool_min_threads;
+ERROR HY000: Variable 'thread_pool_min_threads' is a GLOBAL variable
+show global variables like 'thread_pool_min_threads';
+Variable_name	Value
+thread_pool_min_threads	1
+show session variables like 'thread_pool_min_threads';
+Variable_name	Value
+thread_pool_min_threads	1
+select * from information_schema.global_variables where variable_name='thread_pool_min_threads';
+VARIABLE_NAME	VARIABLE_VALUE
+THREAD_POOL_MIN_THREADS	1
+select * from information_schema.session_variables where variable_name='thread_pool_min_threads';
+VARIABLE_NAME	VARIABLE_VALUE
+THREAD_POOL_MIN_THREADS	1
+set global thread_pool_min_threads=1;
+select @@global.thread_pool_min_threads;
+@@global.thread_pool_min_threads
+1
+set global thread_pool_min_threads=65536;
+Warnings:
+Warning	1292	Truncated incorrect thread_pool_min_threads value: '65536'
+select @@global.thread_pool_min_threads;
+@@global.thread_pool_min_threads
+256
+set session thread_pool_min_threads=1;
+ERROR HY000: Variable 'thread_pool_min_threads' is a GLOBAL variable and should be set with SET GLOBAL
+set global thread_pool_min_threads=1.1;
+ERROR 42000: Incorrect argument type to variable 'thread_pool_min_threads'
+set global thread_pool_min_threads=1e1;
+ERROR 42000: Incorrect argument type to variable 'thread_pool_min_threads'
+set global thread_pool_min_threads="foo";
+ERROR 42000: Incorrect argument type to variable 'thread_pool_min_threads'
+set global thread_pool_min_threads=0;
+Warnings:
+Warning	1292	Truncated incorrect thread_pool_min_threads value: '0'
+select @@global.thread_pool_min_threads;
+@@global.thread_pool_min_threads
+1
+set global thread_pool_min_threads=10000000000;
+Warnings:
+Warning	1292	Truncated incorrect thread_pool_min_threads value: '10000000000'
+select @@global.thread_pool_min_threads;
+@@global.thread_pool_min_threads
+256
+SET @@global.thread_pool_min_threads = @start_global_value;

=== added file 'Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_oversubscribe_basic.result'
--- Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_oversubscribe_basic.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_oversubscribe_basic.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,47 @@
+SET @start_global_value = @@global.thread_pool_oversubscribe;
+select @@global.thread_pool_oversubscribe;
+@@global.thread_pool_oversubscribe
+3
+select @@session.thread_pool_oversubscribe;
+ERROR HY000: Variable 'thread_pool_oversubscribe' is a GLOBAL variable
+show global variables like 'thread_pool_oversubscribe';
+Variable_name	Value
+thread_pool_oversubscribe	3
+show session variables like 'thread_pool_oversubscribe';
+Variable_name	Value
+thread_pool_oversubscribe	3
+select * from information_schema.global_variables where variable_name='thread_pool_oversubscribe';
+VARIABLE_NAME	VARIABLE_VALUE
+THREAD_POOL_OVERSUBSCRIBE	3
+select * from information_schema.session_variables where variable_name='thread_pool_oversubscribe';
+VARIABLE_NAME	VARIABLE_VALUE
+THREAD_POOL_OVERSUBSCRIBE	3
+set global thread_pool_oversubscribe=60;
+select @@global.thread_pool_oversubscribe;
+@@global.thread_pool_oversubscribe
+60
+set global thread_pool_oversubscribe=1000;
+select @@global.thread_pool_oversubscribe;
+@@global.thread_pool_oversubscribe
+1000
+set session thread_pool_oversubscribe=1;
+ERROR HY000: Variable 'thread_pool_oversubscribe' is a GLOBAL variable and should be set with SET GLOBAL
+set global thread_pool_oversubscribe=1.1;
+ERROR 42000: Incorrect argument type to variable 'thread_pool_oversubscribe'
+set global thread_pool_oversubscribe=1e1;
+ERROR 42000: Incorrect argument type to variable 'thread_pool_oversubscribe'
+set global thread_pool_oversubscribe="foo";
+ERROR 42000: Incorrect argument type to variable 'thread_pool_oversubscribe'
+set global thread_pool_oversubscribe=-1;
+Warnings:
+Warning	1292	Truncated incorrect thread_pool_oversubscribe value: '-1'
+select @@global.thread_pool_oversubscribe;
+@@global.thread_pool_oversubscribe
+1
+set global thread_pool_oversubscribe=10000000000;
+Warnings:
+Warning	1292	Truncated incorrect thread_pool_oversubscribe value: '10000000000'
+select @@global.thread_pool_oversubscribe;
+@@global.thread_pool_oversubscribe
+1000
+set @@global.thread_pool_oversubscribe = @start_global_value;

=== added file 'Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_stall_limit_basic.result'
--- Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_stall_limit_basic.result	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/r/thread_pool_stall_limit_basic.result	2013-05-27 12:16:38 +0000
@@ -0,0 +1,47 @@
+SET @start_global_value = @@global.thread_pool_stall_limit;
+select @@global.thread_pool_stall_limit;
+@@global.thread_pool_stall_limit
+500
+select @@session.thread_pool_stall_limit;
+ERROR HY000: Variable 'thread_pool_stall_limit' is a GLOBAL variable
+show global variables like 'thread_pool_stall_limit';
+Variable_name	Value
+thread_pool_stall_limit	500
+show session variables like 'thread_pool_stall_limit';
+Variable_name	Value
+thread_pool_stall_limit	500
+select * from information_schema.global_variables where variable_name='thread_pool_stall_limit';
+VARIABLE_NAME	VARIABLE_VALUE
+THREAD_POOL_STALL_LIMIT	500
+select * from information_schema.session_variables where variable_name='thread_pool_stall_limit';
+VARIABLE_NAME	VARIABLE_VALUE
+THREAD_POOL_STALL_LIMIT	500
+set global thread_pool_stall_limit=60;
+select @@global.thread_pool_stall_limit;
+@@global.thread_pool_stall_limit
+60
+set global thread_pool_stall_limit=4294967295;
+select @@global.thread_pool_stall_limit;
+@@global.thread_pool_stall_limit
+4294967295
+set session thread_pool_stall_limit=1;
+ERROR HY000: Variable 'thread_pool_stall_limit' is a GLOBAL variable and should be set with SET GLOBAL
+set global thread_pool_stall_limit=1.1;
+ERROR 42000: Incorrect argument type to variable 'thread_pool_stall_limit'
+set global thread_pool_stall_limit=1e1;
+ERROR 42000: Incorrect argument type to variable 'thread_pool_stall_limit'
+set global thread_pool_stall_limit="foo";
+ERROR 42000: Incorrect argument type to variable 'thread_pool_stall_limit'
+set global thread_pool_stall_limit=-1;
+Warnings:
+Warning	1292	Truncated incorrect thread_pool_stall_limit value: '-1'
+select @@global.thread_pool_stall_limit;
+@@global.thread_pool_stall_limit
+10
+set global thread_pool_stall_limit=10000000000;
+Warnings:
+Warning	1292	Truncated incorrect thread_pool_stall_limit value: '10000000000'
+select @@global.thread_pool_stall_limit;
+@@global.thread_pool_stall_limit
+4294967295
+set @@global.thread_pool_stall_limit = @start_global_value;

=== removed file 'Percona-Server/mysql-test/suite/sys_vars/t/extra_max_connections_basic.test'
--- Percona-Server/mysql-test/suite/sys_vars/t/extra_max_connections_basic.test	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/t/extra_max_connections_basic.test	1970-01-01 00:00:00 +0000
@@ -1,42 +0,0 @@
-# ulong global
-
-SET @start_global_value = @@global.extra_max_connections;
-
-#
-# exists as global only
-#
-select @@global.extra_max_connections;
---error ER_INCORRECT_GLOBAL_LOCAL_VAR
-select @@session.extra_max_connections;
-show global variables like 'extra_max_connections';
-show session variables like 'extra_max_connections';
-select * from information_schema.global_variables where variable_name='extra_max_connections';
-select * from information_schema.session_variables where variable_name='extra_max_connections';
-
-#
-# show that it's writable
-#
-set global extra_max_connections=1;
-select @@global.extra_max_connections;
---error ER_GLOBAL_VARIABLE
-set session extra_max_connections=1;
-
-#
-# incorrect types
-#
---error ER_WRONG_TYPE_FOR_VAR
-set global extra_max_connections=1.1;
---error ER_WRONG_TYPE_FOR_VAR
-set global extra_max_connections=1e1;
---error ER_WRONG_TYPE_FOR_VAR
-set global extra_max_connections="foo";
-
-#
-# min/max values
-#
-set global extra_max_connections=0;
-select @@global.extra_max_connections;
-set global extra_max_connections=cast(-1 as unsigned int);
-select @@global.extra_max_connections;
-
-SET @@global.extra_max_connections = @start_global_value;

=== removed file 'Percona-Server/mysql-test/suite/sys_vars/t/query_exec_id_basic.test'
--- Percona-Server/mysql-test/suite/sys_vars/t/query_exec_id_basic.test	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/t/query_exec_id_basic.test	1970-01-01 00:00:00 +0000
@@ -1,4 +0,0 @@
---source include/have_debug.inc
-
-SET GLOBAL query_exec_id=default;
-SET SESSION query_exec_id=default;

=== added file 'Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_high_prio_tickets_basic.test'
--- Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_high_prio_tickets_basic.test	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_high_prio_tickets_basic.test	2013-05-27 12:16:38 +0000
@@ -0,0 +1,43 @@
+# uint global
+--source include/not_windows.inc
+--source include/not_embedded.inc
+SET @start_global_value = @@global.thread_pool_high_prio_tickets;
+
+#
+# exists as global only
+#
+select @@global.thread_pool_high_prio_tickets;
+--error ER_INCORRECT_GLOBAL_LOCAL_VAR
+select @@session.thread_pool_high_prio_tickets;
+show global variables like 'thread_pool_high_prio_tickets';
+show session variables like 'thread_pool_high_prio_tickets';
+select * from information_schema.global_variables where variable_name='thread_pool_high_prio_tickets';
+select * from information_schema.session_variables where variable_name='thread_pool_high_prio_tickets';
+
+#
+# show that it's writable
+#
+set global thread_pool_high_prio_tickets=60;
+select @@global.thread_pool_high_prio_tickets;
+set global thread_pool_high_prio_tickets=4294967295;
+select @@global.thread_pool_high_prio_tickets;
+--error ER_GLOBAL_VARIABLE
+set session thread_pool_high_prio_tickets=1;
+
+#
+# incorrect types
+#
+--error ER_WRONG_TYPE_FOR_VAR
+set global thread_pool_high_prio_tickets=1.1;
+--error ER_WRONG_TYPE_FOR_VAR
+set global thread_pool_high_prio_tickets=1e1;
+--error ER_WRONG_TYPE_FOR_VAR
+set global thread_pool_high_prio_tickets="foo";
+
+
+set global thread_pool_high_prio_tickets=-1;
+select @@global.thread_pool_high_prio_tickets;
+set global thread_pool_high_prio_tickets=10000000000;
+select @@global.thread_pool_high_prio_tickets;
+
+set @@global.thread_pool_high_prio_tickets = @start_global_value;

=== removed file 'Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_idle_timeout_basic.test'
--- Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_idle_timeout_basic.test	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_idle_timeout_basic.test	1970-01-01 00:00:00 +0000
@@ -1,43 +0,0 @@
-# uint global
---source include/not_windows.inc
---source include/not_embedded.inc
-SET @start_global_value = @@global.thread_pool_idle_timeout;
-
-#
-# exists as global only
-#
-select @@global.thread_pool_idle_timeout;
---error ER_INCORRECT_GLOBAL_LOCAL_VAR
-select @@session.thread_pool_idle_timeout;
-show global variables like 'thread_pool_idle_timeout';
-show session variables like 'thread_pool_idle_timeout';
-select * from information_schema.global_variables where variable_name='thread_pool_idle_timeout';
-select * from information_schema.session_variables where variable_name='thread_pool_idle_timeout';
-
-#
-# show that it's writable
-#
-set global thread_pool_idle_timeout=60;
-select @@global.thread_pool_idle_timeout;
-set global thread_pool_idle_timeout=4294967295;
-select @@global.thread_pool_idle_timeout;
---error ER_GLOBAL_VARIABLE
-set session thread_pool_idle_timeout=1;
-
-#
-# incorrect types
-#
---error ER_WRONG_TYPE_FOR_VAR
-set global thread_pool_idle_timeout=1.1;
---error ER_WRONG_TYPE_FOR_VAR
-set global thread_pool_idle_timeout=1e1;
---error ER_WRONG_TYPE_FOR_VAR
-set global thread_pool_idle_timeout="foo";
-
-
-set global thread_pool_idle_timeout=-1;
-select @@global.thread_pool_idle_timeout;
-set global thread_pool_idle_timeout=10000000000;
-select @@global.thread_pool_idle_timeout;
-
-SET @@global.thread_pool_idle_timeout = @start_global_value;

=== removed file 'Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_min_threads_basic.test'
--- Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_min_threads_basic.test	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_min_threads_basic.test	1970-01-01 00:00:00 +0000
@@ -1,43 +0,0 @@
-# uint global
---source include/not_embedded.inc
---source include/windows.inc
-SET @start_global_value = @@global.thread_pool_min_threads;
-
-#
-# exists as global only
-#
-select @@global.thread_pool_min_threads;
---error ER_INCORRECT_GLOBAL_LOCAL_VAR
-select @@session.thread_pool_min_threads;
-show global variables like 'thread_pool_min_threads';
-show session variables like 'thread_pool_min_threads';
-select * from information_schema.global_variables where variable_name='thread_pool_min_threads';
-select * from information_schema.session_variables where variable_name='thread_pool_min_threads';
-
-#
-# show that it's writable
-#
-set global thread_pool_min_threads=1;
-select @@global.thread_pool_min_threads;
-set global thread_pool_min_threads=65536;
-select @@global.thread_pool_min_threads;
---error ER_GLOBAL_VARIABLE
-set session thread_pool_min_threads=1;
-
-#
-# incorrect types
-#
---error ER_WRONG_TYPE_FOR_VAR
-set global thread_pool_min_threads=1.1;
---error ER_WRONG_TYPE_FOR_VAR
-set global thread_pool_min_threads=1e1;
---error ER_WRONG_TYPE_FOR_VAR
-set global thread_pool_min_threads="foo";
-
-
-set global thread_pool_min_threads=0;
-select @@global.thread_pool_min_threads;
-set global thread_pool_min_threads=10000000000;
-select @@global.thread_pool_min_threads;
-
-SET @@global.thread_pool_min_threads = @start_global_value;

=== removed file 'Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_oversubscribe_basic.test'
--- Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_oversubscribe_basic.test	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_oversubscribe_basic.test	1970-01-01 00:00:00 +0000
@@ -1,43 +0,0 @@
-# uint global
---source include/not_windows.inc
---source include/not_embedded.inc
-SET @start_global_value = @@global.thread_pool_oversubscribe;
-
-#
-# exists as global only
-#
-select @@global.thread_pool_oversubscribe;
---error ER_INCORRECT_GLOBAL_LOCAL_VAR
-select @@session.thread_pool_oversubscribe;
-show global variables like 'thread_pool_oversubscribe';
-show session variables like 'thread_pool_oversubscribe';
-select * from information_schema.global_variables where variable_name='thread_pool_oversubscribe';
-select * from information_schema.session_variables where variable_name='thread_pool_oversubscribe';
-
-#
-# show that it's writable
-#
-set global thread_pool_oversubscribe=60;
-select @@global.thread_pool_oversubscribe;
-set global thread_pool_oversubscribe=1000;
-select @@global.thread_pool_oversubscribe;
---error ER_GLOBAL_VARIABLE
-set session thread_pool_oversubscribe=1;
-
-#
-# incorrect types
-#
---error ER_WRONG_TYPE_FOR_VAR
-set global thread_pool_oversubscribe=1.1;
---error ER_WRONG_TYPE_FOR_VAR
-set global thread_pool_oversubscribe=1e1;
---error ER_WRONG_TYPE_FOR_VAR
-set global thread_pool_oversubscribe="foo";
-
-
-set global thread_pool_oversubscribe=-1;
-select @@global.thread_pool_oversubscribe;
-set global thread_pool_oversubscribe=10000000000;
-select @@global.thread_pool_oversubscribe;
-
-set @@global.thread_pool_oversubscribe = @start_global_value;

=== added file 'Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_size_basic.test'
--- Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_size_basic.test	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_size_basic.test	2013-05-27 12:16:38 +0000
@@ -0,0 +1,47 @@
+# uint global
+--source include/not_windows.inc
+--source include/not_embedded.inc
+SET @start_global_value = @@global.thread_pool_size;
+
+#
+# exists as global only
+#
+--replace_column 1 #
+select @@global.thread_pool_size;
+--error ER_INCORRECT_GLOBAL_LOCAL_VAR
+select @@session.thread_pool_size;
+--replace_column 2 #
+show global variables like 'thread_pool_size';
+--replace_column 2 #
+show session variables like 'thread_pool_size';
+--replace_column 2 #
+select * from information_schema.global_variables where variable_name='thread_pool_size';
+--replace_column 2 #
+select * from information_schema.session_variables where variable_name='thread_pool_size';
+--replace_column 2 #
+
+#
+# show that it's writable
+#
+set global thread_pool_size=1;
+select @@global.thread_pool_size;
+set global thread_pool_size=128;
+select @@global.thread_pool_size;
+--error ER_GLOBAL_VARIABLE
+set session thread_pool_size=1;
+
+#
+# incorrect types
+#
+--error ER_WRONG_TYPE_FOR_VAR
+set global thread_pool_size=1.1;
+--error ER_WRONG_TYPE_FOR_VAR
+set global thread_pool_size=1e1;
+--error ER_WRONG_TYPE_FOR_VAR
+set global thread_pool_size="foo";
+
+set global thread_pool_size=-1;
+
+set global thread_pool_size=100000;
+
+SET @@global.thread_pool_size = @start_global_value;

=== removed file 'Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_stall_limit_basic.test'
--- Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_stall_limit_basic.test	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/suite/sys_vars/t/thread_pool_stall_limit_basic.test	1970-01-01 00:00:00 +0000
@@ -1,43 +0,0 @@
-# uint global
---source include/not_windows.inc
---source include/not_embedded.inc
-SET @start_global_value = @@global.thread_pool_stall_limit;
-
-#
-# exists as global only
-#
-select @@global.thread_pool_stall_limit;
---error ER_INCORRECT_GLOBAL_LOCAL_VAR
-select @@session.thread_pool_stall_limit;
-show global variables like 'thread_pool_stall_limit';
-show session variables like 'thread_pool_stall_limit';
-select * from information_schema.global_variables where variable_name='thread_pool_stall_limit';
-select * from information_schema.session_variables where variable_name='thread_pool_stall_limit';
-
-#
-# show that it's writable
-#
-set global thread_pool_stall_limit=60;
-select @@global.thread_pool_stall_limit;
-set global thread_pool_stall_limit=4294967295;
-select @@global.thread_pool_stall_limit;
---error ER_GLOBAL_VARIABLE
-set session thread_pool_stall_limit=1;
-
-#
-# incorrect types
-#
---error ER_WRONG_TYPE_FOR_VAR
-set global thread_pool_stall_limit=1.1;
---error ER_WRONG_TYPE_FOR_VAR
-set global thread_pool_stall_limit=1e1;
---error ER_WRONG_TYPE_FOR_VAR
-set global thread_pool_stall_limit="foo";
-
-
-set global thread_pool_stall_limit=-1;
-select @@global.thread_pool_stall_limit;
-set global thread_pool_stall_limit=10000000000;
-select @@global.thread_pool_stall_limit;
-
-set @@global.thread_pool_stall_limit = @start_global_value;

=== added file 'Percona-Server/mysql-test/t/percona_bug1170103-master.opt'
--- Percona-Server/mysql-test/t/percona_bug1170103-master.opt	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/percona_bug1170103-master.opt	2013-05-27 12:16:38 +0000
@@ -0,0 +1,1 @@
+--query_cache_type=1

=== added file 'Percona-Server/mysql-test/t/percona_bug1170103.test'
--- Percona-Server/mysql-test/t/percona_bug1170103.test	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/percona_bug1170103.test	2013-05-27 12:16:38 +0000
@@ -0,0 +1,23 @@
+########################################################################
+# Byg #1170103: Memory leak @ read_view_open_now...
+########################################################################
+
+--source include/have_query_cache.inc
+--source include/have_innodb.inc
+
+CREATE TABLE t(a INT) ENGINE=InnoDB;
+INSERT INTO t VALUES (1), (2), (3);
+
+SET @old_query_cache_size=@@query_cache_size;
+SET @old_query_cache_type=@@query_cache_type;
+
+SET GLOBAL query_cache_size=1024*1024;
+SET GLOBAL query_cache_type=1;
+
+SELECT * FROM t;
+SELECT * FROM t;
+
+SET GLOBAL query_cache_size=@old_query_cache_size;
+SET GLOBAL query_cache_type=@old_query_cache_type;
+
+DROP TABLE t;

=== removed file 'Percona-Server/mysql-test/t/percona_bug933969.test'
--- Percona-Server/mysql-test/t/percona_bug933969.test	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/t/percona_bug933969.test	1970-01-01 00:00:00 +0000
@@ -1,42 +0,0 @@
-###################### percona_bug933969.test ########################
-# Bug #933969: mysqlbinlog doesn't accept stdin                      #
-#                                                                    #
-# The goal of this testcase is to test that mysqlbinlog handle       #
-# stdin correctly when stdin is pipe.                                #
-# i.e. "cat log | mysqlbinlog -" don't cause mysqlbinlog failure     #
-######################################################################
--- source include/have_log_bin.inc
--- source include/not_windows.inc
--- source include/not_embedded.inc
-
-# deletes all the binary logs
-RESET MASTER;
-
---disable_warnings
-DROP TABLE IF EXISTS t1;
---enable_warnings
-
-# produce some statements for binlog
-
-CREATE TABLE t1 (word VARCHAR(20));
-
-INSERT INTO t1 VALUES ("hamite");
-INSERT INTO t1 VALUES ("hoho");
-INSERT INTO t1 VALUES ("znamenito");
-INSERT INTO t1 VALUES ("mrachny");
-INSERT INTO t1 VALUES ("mrak");
-INSERT INTO t1 VALUES ("zhut");
-INSERT INTO t1 VALUES ("parnisha");
-INSERT INTO t1 VALUES ("krrasota!");
-INSERT INTO t1 VALUES ("podumayesh");
-INSERT INTO t1 VALUES ("ogo!");
-
-FLUSH LOGS;
-
-# run mysqlbinlog and make sure it ends normally
-
-let $MYSQLD_DATADIR= `SELECT @@datadir`;
---exec cat $MYSQLD_DATADIR/master-bin.000001 | $MYSQL_BINLOG - >/dev/null
-
-DROP TABLE t1;
-RESET MASTER;

=== added file 'Percona-Server/mysql-test/t/percona_log_slow_admin_statements-config_false.test'
--- Percona-Server/mysql-test/t/percona_log_slow_admin_statements-config_false.test	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/percona_log_slow_admin_statements-config_false.test	2013-05-27 12:16:38 +0000
@@ -0,0 +1,2 @@
+SHOW GLOBAL VARIABLES like 'log_slow_admin_statements';
+SELECT * FROM INFORMATION_SCHEMA.GLOBAL_VARIABLES WHERE VARIABLE_NAME='log_slow_admin_statements';

=== added file 'Percona-Server/mysql-test/t/percona_log_slow_admin_statements-config_foo.test'
--- Percona-Server/mysql-test/t/percona_log_slow_admin_statements-config_foo.test	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/percona_log_slow_admin_statements-config_foo.test	2013-05-27 12:16:38 +0000
@@ -0,0 +1,3 @@
+call mtr.add_suppression("option 'log_slow_admin_statements': boolean value 'foo' wasn't recognized. Set to OFF.");
+SHOW GLOBAL VARIABLES like 'log_slow_admin_statements';
+SELECT * FROM INFORMATION_SCHEMA.GLOBAL_VARIABLES WHERE VARIABLE_NAME='log_slow_admin_statements';

=== added file 'Percona-Server/mysql-test/t/percona_log_slow_admin_statements-config_true.cnf'
--- Percona-Server/mysql-test/t/percona_log_slow_admin_statements-config_true.cnf	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/percona_log_slow_admin_statements-config_true.cnf	2013-05-27 12:16:38 +0000
@@ -0,0 +1,4 @@
+!include include/default_my.cnf
+
+[mysqld.1]
+log-slow-admin-statements=true

=== removed file 'Percona-Server/mysql-test/t/percona_log_slow_slave_statements-cl-master.opt'
--- Percona-Server/mysql-test/t/percona_log_slow_slave_statements-cl-master.opt	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/t/percona_log_slow_slave_statements-cl-master.opt	1970-01-01 00:00:00 +0000
@@ -1,1 +0,0 @@
---log_slow_slave_statements
\ No newline at end of file

=== removed file 'Percona-Server/mysql-test/t/percona_log_slow_sp_statements-cl-master.opt'
--- Percona-Server/mysql-test/t/percona_log_slow_sp_statements-cl-master.opt	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/t/percona_log_slow_sp_statements-cl-master.opt	1970-01-01 00:00:00 +0000
@@ -1,1 +0,0 @@
---log_slow_sp_statements

=== added file 'Percona-Server/mysql-test/t/percona_log_slow_sp_statements-cl.test'
--- Percona-Server/mysql-test/t/percona_log_slow_sp_statements-cl.test	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/percona_log_slow_sp_statements-cl.test	2013-05-27 12:16:38 +0000
@@ -0,0 +1,2 @@
+SHOW VARIABLES LIKE 'log_slow_sp_statements';
+SHOW GLOBAL VARIABLES LIKE 'log_slow_sp_statements';

=== removed file 'Percona-Server/mysql-test/t/percona_log_slow_verbosity-cl-master.opt'
--- Percona-Server/mysql-test/t/percona_log_slow_verbosity-cl-master.opt	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/t/percona_log_slow_verbosity-cl-master.opt	1970-01-01 00:00:00 +0000
@@ -1,1 +0,0 @@
---log_slow_verbosity="full"

=== removed file 'Percona-Server/mysql-test/t/percona_log_slow_verbosity-cl.test'
--- Percona-Server/mysql-test/t/percona_log_slow_verbosity-cl.test	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/t/percona_log_slow_verbosity-cl.test	1970-01-01 00:00:00 +0000
@@ -1,2 +0,0 @@
-SHOW VARIABLES LIKE 'log_slow_verbosity';
-SHOW GLOBAL VARIABLES LIKE 'log_slow_verbosity';

=== modified file 'Percona-Server/mysql-test/t/percona_mysqldump_innodb_optimize_keys.test'
--- Percona-Server/mysql-test/t/percona_mysqldump_innodb_optimize_keys.test	2013-03-21 04:21:28 +0000
+++ Percona-Server/mysql-test/t/percona_mysqldump_innodb_optimize_keys.test	2013-05-27 12:16:38 +0000
@@ -244,5 +244,32 @@
 
 DROP TABLE t1;
 
+#############################################################################
+# Bug #1081016: mysqldump --innodb-optimize-keys may produce invalid SQL with
+#               explicitly named FK constraints
+#############################################################################
+
+CREATE TABLE `t1` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `a` int(11) NOT NULL,
+  PRIMARY KEY (`id`),
+  KEY `a` (`a`),
+  CONSTRAINT `a` FOREIGN KEY (`a`) REFERENCES `t1` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+
+--exec $MYSQL_DUMP --skip-comments --innodb-optimize-keys test t1 >$file
+
+--echo ######################################
+--cat_file $file
+--echo ######################################
+
+# Check that the resulting dump can be imported back
+
+--exec $MYSQL test < $file
+
+--remove_file $file
+
+DROP TABLE t1;
+
 # Wait till we reached the initial number of concurrent sessions
 --source include/wait_until_count_sessions.inc

=== removed file 'Percona-Server/mysql-test/t/percona_query_cache_with_comments_prepared_statements.test'
--- Percona-Server/mysql-test/t/percona_query_cache_with_comments_prepared_statements.test	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/t/percona_query_cache_with_comments_prepared_statements.test	1970-01-01 00:00:00 +0000
@@ -1,208 +0,0 @@
--- source include/have_query_cache.inc
-
-set GLOBAL query_cache_size=1355776;
-
-# Reset query cache variables.
-flush query cache; # This crashed in some versions
-flush query cache; # This crashed in some versions
-reset query cache;
-flush status;
---disable_warnings
-drop table if exists t1;
---enable_warnings
-
-#
-# First simple test
-#
-
-create table t1 (a int not null);
-insert into t1 values (1),(2),(3);
-
-set global query_cache_strip_comments=ON;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from '/* with comment */ select * from t1';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-execute stmt;
-execute stmt;
-execute stmt;
-execute stmt;
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select * from t1';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select * /*internal comment*/from t1';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select * /*internal comment*/ from t1';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select * from t1 /* at the end */';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select * from t1 /* with "quote" */';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select * from t1 /* with \'quote\' */';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select * from t1 # 123
-';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select * from t1 # 123 with "quote"
-';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select * from t1 # 123 with \'quote\'
-';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select * from t1
-# 123
-';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from '#456
-select * from t1
-# 123
-';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select * from t1 -- 123
-';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select * from t1
--- 123
-';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from '-- comment in first
-select * from t1
-# 123
-';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from '(#456(
-select * from t1
-# 123(
-)';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from '/*test*/(-- comment in first(
-select * from t1
--- 123 asdasd
-/* test */)';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select "test",a from t1';
-execute stmt;
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select "test /* internal \'comment\' */",a from t1';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select "test #internal comment" ,a from t1';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-prepare stmt from 'select "test #internal comment" #external comment
-,a from t1';
-execute stmt;
-
-show status like "Qcache_queries_in_cache";
-show status like "Qcache_inserts";
-show status like "Qcache_hits";
-
-DROP TABLE t1;
-SET GLOBAL query_cache_size= default;
-set global query_cache_strip_comments=OFF;

=== removed file 'Percona-Server/mysql-test/t/percona_status_wait_query_cache_mutex.test'
--- Percona-Server/mysql-test/t/percona_status_wait_query_cache_mutex.test	2013-05-27 12:16:36 +0000
+++ Percona-Server/mysql-test/t/percona_status_wait_query_cache_mutex.test	1970-01-01 00:00:00 +0000
@@ -1,35 +0,0 @@
---source include/have_query_cache.inc
---source include/have_debug.inc
---source include/have_debug_sync.inc
-SET GLOBAL query_cache_size=1355776;
---source include/percona_query_cache_with_comments_clear.inc
---let try_lock_mutex_query=SELECT "try_lock_mutex_query" as action
-
---connect (mutex_locked_conn, localhost, root,,)
---connect (try_mutex_lock_conn, localhost, root,,)
-
---connection mutex_locked_conn
-SET DEBUG_SYNC='after_query_cache_mutex SIGNAL mutex_locked WAIT_FOR unlock_mutex';
-send SELECT "mutex_locked_query" as action;
-
---connection default
-SET DEBUG_SYNC='now WAIT_FOR mutex_locked';
-
---connection try_mutex_lock_conn
-SET DEBUG_SYNC='before_query_cache_mutex SIGNAL try_lock_mutex';
-send_eval $try_lock_mutex_query;
-
---connection default
-SET DEBUG_SYNC='now WAIT_FOR try_lock_mutex';
-eval SELECT SQL_NO_CACHE state FROM INFORMATION_SCHEMA.PROCESSLIST WHERE info='$try_lock_mutex_query';
-SET DEBUG_SYNC='now SIGNAL unlock_mutex';
-
---connection mutex_locked_conn
-reap;
---connection try_mutex_lock_conn
-reap;
-
---connection default
---disconnect mutex_locked_conn
---disconnect try_mutex_lock_conn
-SET GLOBAL query_cache_size=default;

=== added file 'Percona-Server/mysql-test/t/percona_xtradb_bug317074.test'
--- Percona-Server/mysql-test/t/percona_xtradb_bug317074.test	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/percona_xtradb_bug317074.test	2013-05-27 12:16:38 +0000
@@ -0,0 +1,47 @@
+-- source include/have_innodb.inc
+
+SET @old_innodb_file_format=@@innodb_file_format;
+SET @old_innodb_file_format_max=@@innodb_file_format_max;
+SET @old_innodb_file_per_table=@@innodb_file_per_table;
+SET GLOBAL innodb_file_format='Barracuda';
+SET GLOBAL innodb_file_per_table=ON;
+
+-- disable_query_log
+-- disable_result_log
+
+DROP TABLE IF EXISTS `test1`;
+CREATE TABLE IF NOT EXISTS `test1` (
+ `a` int primary key auto_increment,
+ `b` int default 0,
+ `c` char(100) default 'testtest'
+) ENGINE=InnoDB ROW_FORMAT=COMPRESSED KEY_BLOCK_SIZE=8;
+
+delimiter |;
+CREATE PROCEDURE insert_many(p1 int)
+BEGIN
+SET @x = 0;
+SET @y = 0;
+start transaction;
+REPEAT
+  insert into test1 set b=1;
+  SET @x = @x + 1;
+  SET @y = @y + 1;
+  IF @y >= 1000 THEN
+    commit;
+    start transaction;
+    SET @y = 0;
+  END IF;
+UNTIL @x >= p1 END REPEAT;
+commit;
+END|
+delimiter ;|
+call insert_many(100000);
+DROP PROCEDURE insert_many;
+
+# The bug is hangup at the following statement
+ALTER TABLE test1 ENGINE=MyISAM;
+
+DROP TABLE test1;
+SET GLOBAL innodb_file_format=@old_innodb_file_format;
+SET GLOBAL innodb_file_format_max=@old_innodb_file_format_max;
+SET GLOBAL innodb_file_per_table=@old_innodb_file_per_table;

=== added file 'Percona-Server/mysql-test/t/pool_of_threads.cnf'
--- Percona-Server/mysql-test/t/pool_of_threads.cnf	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/pool_of_threads.cnf	2013-05-27 12:16:38 +0000
@@ -0,0 +1,15 @@
+!include include/default_my.cnf
+
+[mysqld.1]
+loose-thread-handling=   pool-of-threads
+loose-thread_pool_size= 2 
+loose-thread_pool_max_threads= 2 
+#extra-port=        @ENV.MASTER_EXTRA_PORT
+extra-port=        @OPT.port
+
+[client]
+connect-timeout=  2 
+
+[ENV]
+;MASTER_EXTRA_PORT= @mysqld.2.port
+MASTER_EXTRA_PORT= @OPT.port

=== added file 'Percona-Server/mysql-test/t/pool_of_threads_high_prio_tickets.cnf'
--- Percona-Server/mysql-test/t/pool_of_threads_high_prio_tickets.cnf	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/pool_of_threads_high_prio_tickets.cnf	2013-05-27 12:16:38 +0000
@@ -0,0 +1,16 @@
+!include include/default_my.cnf
+
+[mysqld.1]
+loose-thread-handling=   pool-of-threads
+loose-thread_pool_size= 2 
+loose-thread_pool_max_threads= 2 
+#extra-port=        @ENV.MASTER_EXTRA_PORT
+extra-port=        @OPT.port
+loose-thread_pool_high_prio_tickets=2
+
+[client]
+connect-timeout=  2 
+
+[ENV]
+;MASTER_EXTRA_PORT= @mysqld.2.port
+MASTER_EXTRA_PORT= @OPT.port

=== added file 'Percona-Server/mysql-test/t/pool_of_threads_high_prio_tickets.test'
--- Percona-Server/mysql-test/t/pool_of_threads_high_prio_tickets.test	1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/pool_of_threads_high_prio_tickets.test	2013-05-27 12:16:38 +0000
@@ -0,0 +1,19 @@
+# Start with thread_handling=pool-of-threads
+# and run some basic tests with --thread_pool_high_prio_tickets=2
+
+-- source include/have_pool_of_threads.inc
+
+SELECT @@thread_pool_high_prio_tickets;
+
+-- source include/common-tests.inc
+
+CREATE TABLE t1(a INT);
+
+START TRANSACTION;
+INSERT INTO t1 VALUES (1);
+SELECT * FROM t1;
+INSERT INTO t1 VALUES (2);
+SELECT * FROM t1;
+COMMIT;
+
+DROP TABLE t1;

=== modified file 'Percona-Server/mysys/mf_cache.c'
--- Percona-Server/mysys/mf_cache.c	2013-05-12 06:24:46 +0000
+++ Percona-Server/mysys/mf_cache.c	2013-05-27 12:16:38 +0000
@@ -121,15 +121,17 @@
 }
 
 /*
-  Truncate the cached file to a given offset. The cache must be reinitialized
-  with reinit_io_cache() after this call.
+  Truncate the cached file to a given offset, if the current size is greater
+  than the offset. The cache must be reinitialized with reinit_io_cache() after
+  this call.
 */
 
 my_bool truncate_cached_file(IO_CACHE *cache, my_off_t pos)
 {
   DBUG_ENTER("truncate_cached_file");
 
-  if (my_b_inited(cache) && cache->file > -1)
+  if (my_b_inited(cache) && cache->file > -1 &&
+      my_seek(cache->file, 0L, MY_SEEK_END, MYF(MY_WME + MY_FAE)) > pos)
   {
     if (my_chsize(cache->file, pos, 0, MYF(MY_WME)))
       DBUG_RETURN(TRUE);

=== added file 'Percona-Server/sql/query_strip_comments.h'
--- Percona-Server/sql/query_strip_comments.h	1970-01-01 00:00:00 +0000
+++ Percona-Server/sql/query_strip_comments.h	2013-05-27 12:16:38 +0000
@@ -0,0 +1,37 @@
+#ifndef _SQL_QUERY_STRIPC_COMMENTS_H_
+#define _SQL_QUERY_STRIPC_COMMENTS_H_
+#ifdef HAVE_QUERY_CACHE
+
+// implemented in sql_cache.cc
+class QueryStripComments
+{
+private:
+  QueryStripComments(const QueryStripComments&);
+  QueryStripComments& operator=(const QueryStripComments&);
+public:
+  QueryStripComments();
+  ~QueryStripComments();
+  void set(const char* a_query, uint a_query_length, uint a_additional_length);
+  
+  char* query()        { return buffer; }
+  uint  query_length() { return length; }
+private:
+  void cleanup();
+private:
+  char* buffer;
+  uint  length /*query length, not buffer length*/;
+  uint  buffer_length;
+};
+class QueryStripComments_Backup
+{
+public:
+  QueryStripComments_Backup(THD* a_thd,QueryStripComments* qsc);
+  ~QueryStripComments_Backup();
+private:
+  THD*  thd;
+  char* query;
+  uint  length;
+};
+
+#endif // HAVE_QUERY_CACHE
+#endif // _SQL_QUERY_STRIPC_COMMENTS_H_

=== modified file 'Percona-Server/sql/sys_vars.cc'
--- Percona-Server/sql/sys_vars.cc	2013-05-12 09:13:00 +0000
+++ Percona-Server/sql/sys_vars.cc	2013-05-27 12:16:38 +0000
@@ -3122,6 +3122,13 @@
   NO_MUTEX_GUARD, NOT_IN_BINLOG, ON_CHECK(0), 
   ON_UPDATE(fix_threadpool_stall_limit)
 );
+static Sys_var_uint Sys_threadpool_high_prio_tickets(
+  "thread_pool_high_prio_tickets",
+  "Number of tickets to enter the high priority event queue for each "
+  "transaction.",
+  GLOBAL_VAR(threadpool_high_prio_tickets), CMD_LINE(REQUIRED_ARG),
+  VALID_RANGE(0, UINT_MAX), DEFAULT(0), BLOCK_SIZE(1)
+);
 #endif /* !WIN32 */
 static Sys_var_uint Sys_threadpool_max_threads(
   "thread_pool_max_threads",

=== added file 'Percona-Server/sql/threadpool.h'
--- Percona-Server/sql/threadpool.h	1970-01-01 00:00:00 +0000
+++ Percona-Server/sql/threadpool.h	2013-05-27 12:16:38 +0000
@@ -0,0 +1,72 @@
+/* Copyright (C) 2012 Monty Program Ab
+
+   This program is free software; you can redistribute it and/or modify
+   it under the terms of the GNU General Public License as published by
+   the Free Software Foundation; version 2 of the License.
+
+   This program is distributed in the hope that it will be useful,
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA */
+
+#define MAX_THREAD_GROUPS 128
+
+/* Threadpool parameters */
+extern uint threadpool_min_threads;  /* Minimum threads in pool */
+extern uint threadpool_idle_timeout; /* Shutdown idle worker threads  after this timeout */
+extern uint threadpool_size; /* Number of parallel executing threads */
+extern uint threadpool_stall_limit;  /* time interval in 10 ms units for stall checks*/
+extern uint threadpool_max_threads;  /* Maximum threads in pool */
+extern uint threadpool_oversubscribe;  /* Maximum active threads in group */
+#ifndef _WIN32
+extern uint threadpool_high_prio_tickets;      /* High prio queue tickets */
+#endif
+
+
+
+/* Common thread pool routines, suitable for different implementations */
+extern void threadpool_remove_connection(THD *thd);
+extern int  threadpool_process_request(THD *thd);
+extern int  threadpool_add_connection(THD *thd);
+
+/*
+  Functions used by scheduler. 
+  OS-specific implementations are in
+  threadpool_unix.cc or threadpool_win.cc
+*/
+extern bool tp_init();
+extern void tp_add_connection(THD*);
+extern void tp_wait_begin(THD *, int);
+extern void tp_wait_end(THD*);
+extern void tp_post_kill_notification(THD *thd);
+extern void tp_end(void);
+
+/* Used in SHOW for threadpool_idle_thread_count */
+extern int  tp_get_idle_thread_count();
+
+/*
+  Threadpool statistics
+*/
+struct TP_STATISTICS
+{
+  /* Current number of worker thread. */
+  volatile int32 num_worker_threads;
+};
+
+extern TP_STATISTICS tp_stats;
+
+
+/* Functions to set threadpool parameters */
+extern void tp_set_min_threads(uint val);
+extern void tp_set_max_threads(uint val);
+extern void tp_set_threadpool_size(uint val);
+extern void tp_set_threadpool_stall_limit(uint val);
+
+/* Activate threadpool scheduler */
+extern void tp_scheduler(void);
+
+extern int show_threadpool_idle_threads(THD *thd, SHOW_VAR *var, char *buff);

=== added file 'Percona-Server/sql/threadpool_common.cc'
--- Percona-Server/sql/threadpool_common.cc	1970-01-01 00:00:00 +0000
+++ Percona-Server/sql/threadpool_common.cc	2013-05-27 12:16:38 +0000
@@ -0,0 +1,367 @@
+/* Copyright (C) 2012 Monty Program Ab
+
+   This program is free software; you can redistribute it and/or modify
+   it under the terms of the GNU General Public License as published by
+   the Free Software Foundation; version 2 of the License.
+
+   This program is distributed in the hope that it will be useful,
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA */
+
+#include <my_global.h>
+#include <violite.h>
+#include <sql_priv.h>
+#include <sql_class.h>
+#include <my_pthread.h>
+#include <scheduler.h>
+#include <sql_connect.h>
+#include <sql_audit.h>
+#include <debug_sync.h>
+#include <threadpool.h>
+#include <global_threads.h>
+
+
+/* Threadpool parameters */
+
+uint threadpool_min_threads;
+uint threadpool_idle_timeout;
+uint threadpool_size;
+uint threadpool_stall_limit;
+uint threadpool_max_threads;
+uint threadpool_oversubscribe;
+#ifndef _WIN32
+uint threadpool_high_prio_tickets;
+#endif
+
+/* Stats */
+TP_STATISTICS tp_stats;
+
+
+extern "C" pthread_key(struct st_my_thread_var*, THR_KEY_mysys);
+extern bool do_command(THD*);
+
+/*
+  Worker threads contexts, and THD contexts.
+  =========================================
+  
+  Both worker threads and connections have their sets of thread local variables 
+  At the moment it is mysys_var (this has specific data for dbug, my_error and 
+  similar goodies), and PSI per-client structure.
+
+  Whenever query is executed following needs to be done:
+
+  1. Save worker thread context.
+  2. Change TLS variables to connection specific ones using thread_attach(THD*).
+     This function does some additional work , e.g setting up 
+     thread_stack/thread_ends_here pointers.
+  3. Process query
+  4. Restore worker thread context.
+
+  Connection login and termination follows similar schema w.r.t saving and 
+  restoring contexts. 
+
+  For both worker thread, and for the connection, mysys variables are created 
+  using my_thread_init() and freed with my_thread_end().
+
+*/
+struct Worker_thread_context
+{
+  PSI_thread *psi_thread;
+  st_my_thread_var* mysys_var;
+
+  void save()
+  {
+    psi_thread=  PSI_server?PSI_server->get_thread():0;
+    mysys_var= (st_my_thread_var *)pthread_getspecific(THR_KEY_mysys);
+  }
+
+  void restore()
+  {
+    if (PSI_server)
+      PSI_server->set_thread(psi_thread);
+    pthread_setspecific(THR_KEY_mysys,mysys_var);
+    pthread_setspecific(THR_THD, 0);
+    pthread_setspecific(THR_MALLOC, 0);
+  }
+};
+
+
+/*
+  Attach/associate the connection with the OS thread,
+*/
+static bool thread_attach(THD* thd)
+{
+  pthread_setspecific(THR_KEY_mysys,thd->mysys_var);
+  thd->thread_stack=(char*)&thd;
+  thd->store_globals();
+  if (PSI_server)
+    PSI_server->set_thread(thd->event_scheduler.m_psi);
+  mysql_socket_set_thread_owner(thd->net.vio->mysql_socket);
+  return 0;
+}
+
+#ifdef HAVE_PSI_STATEMENT_INTERFACE
+extern PSI_statement_info stmt_info_new_packet;
+#endif
+
+void threadpool_net_before_header_psi_noop(struct st_net * /* net */,
+                                           void * /* user_data */,
+                                           size_t /* count */)
+{ }
+
+void threadpool_net_after_header_psi(struct st_net *net, void *user_data,
+                                     size_t /* count */, my_bool rc)
+{
+  THD *thd;
+  thd= static_cast<THD*> (user_data);
+  DBUG_ASSERT(thd != NULL);
+
+  if (thd->m_server_idle)
+  {
+    /*
+      The server just got data for a network packet header,
+      from the network layer.
+      The IDLE event is now complete, since we now have a message to process.
+      We need to:
+      - start a new STATEMENT event
+      - start a new STAGE event, within this statement,
+      - start recording SOCKET WAITS events, within this stage.
+      The proper order is critical to get events numbered correctly,
+      and nested in the proper parent.
+    */
+    MYSQL_END_IDLE_WAIT(thd->m_idle_psi);
+
+    if (! rc)
+    {
+      thd->m_statement_psi= MYSQL_START_STATEMENT(&thd->m_statement_state,
+                                                  stmt_info_new_packet.m_key,
+                                                  thd->db, thd->db_length,
+                                                  thd->charset());
+
+      THD_STAGE_INFO(thd, stage_init);
+    }
+
+    /*
+      TODO: consider recording a SOCKET event for the bytes just read,
+      by also passing count here.
+    */
+    MYSQL_SOCKET_SET_STATE(net->vio->mysql_socket, PSI_SOCKET_STATE_ACTIVE);
+
+    thd->m_server_idle = false;
+  }
+}
+
+void threadpool_init_net_server_extension(THD *thd)
+{
+#ifdef HAVE_PSI_INTERFACE
+  /* Start with a clean state for connection events. */
+  thd->m_idle_psi= NULL;
+  thd->m_statement_psi= NULL;
+  thd->m_server_idle= false;
+  /* Hook up the NET_SERVER callback in the net layer. */
+  thd->m_net_server_extension.m_user_data= thd;
+  thd->m_net_server_extension.m_before_header= threadpool_net_before_header_psi_noop;
+  thd->m_net_server_extension.m_after_header= threadpool_net_after_header_psi;
+  /* Activate this private extension for the mysqld server. */
+  thd->net.extension= & thd->m_net_server_extension;
+#else
+  thd->net.extension= NULL;
+#endif
+}
+
+int threadpool_add_connection(THD *thd)
+{
+  int retval=1;
+  Worker_thread_context worker_context;
+  worker_context.save();
+
+  /*
+    Create a new connection context: mysys_thread_var and PSI thread
+    Store them in THD.
+  */
+
+  pthread_setspecific(THR_KEY_mysys, 0);
+  my_thread_init();
+  thd->mysys_var= (st_my_thread_var *)pthread_getspecific(THR_KEY_mysys);
+  if (!thd->mysys_var)
+  {
+    /* Out of memory? */
+    worker_context.restore();
+    return 1;
+  }
+
+  /* Create new PSI thread for use with the THD. */
+  if (PSI_server)
+  {
+    thd->event_scheduler.m_psi=
+      PSI_server->new_thread(key_thread_one_connection, thd, thd->thread_id);
+  }
+
+
+  /* Login. */
+  thread_attach(thd);
+  ulonglong now= my_micro_time();
+  thd->prior_thr_create_utime= now;
+  thd->start_utime= now;
+  thd->thr_create_utime= now;
+
+  if (!setup_connection_thread_globals(thd))
+  {
+    if (!login_connection(thd))
+    {
+      prepare_new_connection_state(thd);
+      
+      /* 
+        Check if THD is ok, as prepare_new_connection_state()
+        can fail, for example if init command failed.
+      */
+      if (thd_is_connection_alive(thd))
+      {
+        retval= 0;
+        thd->net.reading_or_writing= 1;
+        thd->skip_wait_timeout= true;
+        MYSQL_SOCKET_SET_STATE(thd->net.vio->mysql_socket, PSI_SOCKET_STATE_IDLE);
+        threadpool_init_net_server_extension(thd);
+      }
+    }
+  }
+  worker_context.restore();
+  return retval;
+}
+
+
+void threadpool_remove_connection(THD *thd)
+{
+
+  Worker_thread_context worker_context;
+  worker_context.save();
+
+  thread_attach(thd);
+  thd->net.reading_or_writing= 0;
+
+  end_connection(thd);
+  close_connection(thd, 0);
+
+  thd->release_resources();
+  dec_connection_count(thd);
+
+  mysql_mutex_lock(&LOCK_thread_count);
+  /*
+    Used by binlog_reset_master.  It would be cleaner to use
+    DEBUG_SYNC here, but that's not possible because the THD's debug
+    sync feature has been shut down at this point.
+  */
+  DBUG_EXECUTE_IF("sleep_after_lock_thread_count_before_delete_thd", sleep(5););
+  remove_global_thread(thd);
+  mysql_mutex_unlock(&LOCK_thread_count);
+  delete thd;
+
+  /*
+    Free resources associated with this connection: 
+    mysys thread_var and PSI thread.
+  */
+  my_thread_end();
+
+  mysql_cond_broadcast(&COND_thread_count);
+
+  worker_context.restore();
+}
+
+/**
+ Process a single client request or a single batch.
+*/
+int threadpool_process_request(THD *thd)
+{
+  int retval= 0;
+  Worker_thread_context  worker_context;
+  worker_context.save();
+
+  thread_attach(thd);
+
+  if (thd->killed == THD::KILL_CONNECTION)
+  {
+    /* 
+      killed flag was set by timeout handler 
+      or KILL command. Return error.
+    */
+    retval= 1;
+    goto end;
+  }
+
+
+  /*
+    In the loop below, the flow is essentially the copy of thead-per-connections
+    logic, see do_handle_one_connection() in sql_connect.c
+
+    The goal is to execute a single query, thus the loop is normally executed 
+    only once. However for SSL connections, it can be executed multiple times 
+    (SSL can preread and cache incoming data, and vio->has_data() checks if it 
+    was the case).
+  */
+  for(;;)
+  {
+    Vio *vio;
+    thd->net.reading_or_writing= 0;
+    mysql_audit_release(thd);
+
+    if ((retval= do_command(thd)) != 0)
+      goto end;
+
+    if (!thd_is_connection_alive(thd))
+    {
+      retval= 1;
+      goto end;
+    }
+
+    vio= thd->net.vio;
+    if (!vio->has_data(vio))
+    { 
+      /* More info on this debug sync is in sql_parse.cc*/
+      DEBUG_SYNC(thd, "before_do_command_net_read");
+      thd->net.reading_or_writing= 1;
+      goto end;
+    }
+  }
+
+end:
+  if (!thd->m_server_idle) {
+    MYSQL_SOCKET_SET_STATE(thd->net.vio->mysql_socket, PSI_SOCKET_STATE_IDLE);
+    MYSQL_START_IDLE_WAIT(thd->m_idle_psi, &thd->m_idle_state);
+    thd->m_server_idle = true;
+  }
+
+  worker_context.restore();
+  return retval;
+}
+
+
+static scheduler_functions tp_scheduler_functions=
+{
+  0,                                  // max_threads
+  NULL,
+  NULL,
+  tp_init,                            // init
+  NULL,                               // init_new_connection_thread
+  tp_add_connection,                  // add_connection
+  tp_wait_begin,                      // thd_wait_begin
+  tp_wait_end,                        // thd_wait_end
+  tp_post_kill_notification,          // post_kill_notification
+  NULL,                               // end_thread
+  tp_end                              // end
+};
+
+void pool_of_threads_scheduler(struct scheduler_functions *func,
+    ulong *arg_max_connections,
+    uint *arg_connection_count)
+{
+  *func = tp_scheduler_functions;
+  func->max_threads= threadpool_max_threads;
+  func->max_connections= arg_max_connections;
+  func->connection_count= arg_connection_count;
+  scheduler_init();
+}

=== added file 'Percona-Server/sql/threadpool_unix.cc'
--- Percona-Server/sql/threadpool_unix.cc	1970-01-01 00:00:00 +0000
+++ Percona-Server/sql/threadpool_unix.cc	2013-05-27 12:16:38 +0000
@@ -0,0 +1,1699 @@
+/* Copyright (C) 2012 Monty Program Ab
+
+   This program is free software; you can redistribute it and/or modify
+   it under the terms of the GNU General Public License as published by
+   the Free Software Foundation; version 2 of the License.
+
+   This program is distributed in the hope that it will be useful,
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA */
+
+#include <my_global.h>
+#include <violite.h>
+#include <sql_priv.h>
+#include <sql_class.h>
+#include <my_pthread.h>
+#include <scheduler.h>
+#include <sql_connect.h>
+#include <mysqld.h>
+#include <debug_sync.h>
+#include <time.h>
+#include <sql_plist.h>
+#include <threadpool.h>
+#include <global_threads.h>
+#include <mysql/thread_pool_priv.h>             // thd_is_transaction_active()
+#include <time.h>
+#ifdef __linux__
+#include <sys/epoll.h>
+typedef struct epoll_event native_event;
+#endif
+#if defined (__FreeBSD__) || defined (__APPLE__)
+#include <sys/event.h>
+typedef struct kevent native_event;
+#endif
+#if defined (__sun)
+#include <port.h>
+typedef port_event_t native_event;
+#endif
+
+/** Maximum number of native events a listener can read in one go */
+#define MAX_EVENTS 1024
+
+/** Indicates that threadpool was initialized*/
+static bool threadpool_started= false; 
+
+/* 
+  Define PSI Keys for performance schema. 
+  We have a mutex per group, worker threads, condition per worker thread, 
+  and timer thread  with its own mutex and condition.
+*/
+ 
+ 
+static PSI_mutex_key key_group_mutex;
+static PSI_mutex_key key_timer_mutex;
+static PSI_mutex_info mutex_list[]=
+{
+  { &key_group_mutex, "group_mutex", 0},
+  { &key_timer_mutex, "timer_mutex", PSI_FLAG_GLOBAL}
+};
+
+static PSI_cond_key key_worker_cond;
+static PSI_cond_key key_timer_cond;
+static PSI_cond_info cond_list[]=
+{
+  { &key_worker_cond, "worker_cond", 0},
+  { &key_timer_cond, "timer_cond", PSI_FLAG_GLOBAL}
+};
+
+static PSI_thread_key key_worker_thread;
+static PSI_thread_key key_timer_thread;
+static PSI_thread_info	thread_list[] =
+{
+ {&key_worker_thread, "worker_thread", 0},
+ {&key_timer_thread, "timer_thread", PSI_FLAG_GLOBAL}
+};
+
+/* Macro to simplify performance schema registration */ 
+#define PSI_register(X) \
+ if(PSI_server) PSI_server->register_ ## X("threadpool", X ## _list, array_elements(X ## _list))
+
+
+struct thread_group_t;
+
+/* Per-thread structure for workers */
+struct worker_thread_t
+{
+  ulonglong  event_count; /* number of request handled by this thread */
+  thread_group_t* thread_group;   
+  worker_thread_t *next_in_list;
+  worker_thread_t **prev_in_list;
+  
+  mysql_cond_t  cond;
+  bool          woken;
+};
+
+typedef I_P_List<worker_thread_t, I_P_List_adapter<worker_thread_t,
+                 &worker_thread_t::next_in_list,
+                 &worker_thread_t::prev_in_list> 
+                 >
+worker_list_t;
+
+struct connection_t
+{
+
+  THD *thd;
+  thread_group_t *thread_group;
+  connection_t *next_in_queue;
+  connection_t **prev_in_queue;
+  ulonglong abs_wait_timeout;
+  bool logged_in;
+  bool bound_to_poll_descriptor;
+  bool waiting;
+  uint tickets;
+};
+
+typedef I_P_List<connection_t,
+                     I_P_List_adapter<connection_t,
+                                      &connection_t::next_in_queue,
+                                      &connection_t::prev_in_queue>,
+                     I_P_List_null_counter,
+                     I_P_List_fast_push_back<connection_t> >
+connection_queue_t;
+
+struct thread_group_t 
+{
+  mysql_mutex_t mutex;
+  connection_queue_t queue;
+  connection_queue_t high_prio_queue;
+  worker_list_t waiting_threads; 
+  worker_thread_t *listener;
+  pthread_attr_t *pthread_attr;
+  int  pollfd;
+  int  thread_count;
+  int  active_thread_count;
+  int  connection_count;
+  /* Stats for the deadlock detection timer routine.*/
+  int io_event_count;
+  int queue_event_count;
+  ulonglong last_thread_creation_time;
+  int  shutdown_pipe[2];
+  bool shutdown;
+  bool stalled;
+  
+} MY_ALIGNED(512);
+
+static thread_group_t all_groups[MAX_THREAD_GROUPS];
+static uint group_count;
+
+/**
+ Used for printing "pool blocked" message, see
+ print_pool_blocked_message();
+*/
+static ulonglong pool_block_start;
+
+/* Global timer for all groups  */
+struct pool_timer_t
+{
+  mysql_mutex_t mutex;
+  mysql_cond_t cond;
+  volatile uint64 current_microtime;
+  volatile uint64 next_timeout_check;
+  int  tick_interval;
+  bool shutdown;
+};
+
+static pool_timer_t pool_timer;
+
+static void queue_put(thread_group_t *thread_group, connection_t *connection);
+static int  wake_thread(thread_group_t *thread_group);
+static void handle_event(connection_t *connection);
+static int  wake_or_create_thread(thread_group_t *thread_group);
+static int  create_worker(thread_group_t *thread_group);
+static void *worker_main(void *param);
+static void check_stall(thread_group_t *thread_group);
+static void connection_abort(connection_t *connection);
+void tp_post_kill_notification(THD *thd);
+static void set_wait_timeout(connection_t *connection);
+static void set_next_timeout_check(ulonglong abstime);
+static void print_pool_blocked_message(bool);
+
+/**
+ Asynchronous network IO.
+ 
+ We use native edge-triggered network IO multiplexing facility. 
+ This maps to different APIs on different Unixes.
+ 
+ Supported are currently Linux with epoll, Solaris with event ports,
+ OSX and BSD with kevent. All those API's are used with one-shot flags
+ (the event is signalled once client has written something into the socket, 
+ then socket is removed from the "poll-set" until the  command is finished,
+ and we need to re-arm/re-register socket)
+ 
+ No implementation for poll/select/AIO is currently provided.
+ 
+ The API closely resembles all of the above mentioned platform APIs 
+ and consists of following functions. 
+ 
+ - io_poll_create()
+ Creates an io_poll descriptor 
+ On Linux: epoll_create()
+ 
+ - io_poll_associate_fd(int poll_fd, int fd, void *data)
+ Associate file descriptor with io poll descriptor 
+ On Linux : epoll_ctl(..EPOLL_CTL_ADD))
+ 
+ - io_poll_disassociate_fd(int pollfd, int fd)
+  Associate file descriptor with io poll descriptor 
+  On Linux: epoll_ctl(..EPOLL_CTL_DEL)
+ 
+ 
+ - io_poll_start_read(int poll_fd,int fd, void *data)
+ The same as io_poll_associate_fd(), but cannot be used before 
+ io_poll_associate_fd() was called.
+ On Linux : epoll_ctl(..EPOLL_CTL_MOD)
+ 
+ - io_poll_wait (int pollfd, native_event *native_events, int maxevents, 
+   int timeout_ms)
+ 
+ wait until one or more descriptors added with io_poll_associate_fd() 
+ or io_poll_start_read() becomes readable. Data associated with 
+ descriptors can be retrieved from native_events array, using 
+ native_event_get_userdata() function.
+
+ 
+ On Linux: epoll_wait()
+*/
+
+#if defined (__linux__)
+#ifndef EPOLLRDHUP
+/* Early 2.6 kernel did not have EPOLLRDHUP */
+#define EPOLLRDHUP 0
+#endif
+static int io_poll_create()
+{
+  return epoll_create(1);
+}
+
+
+int io_poll_associate_fd(int pollfd, int fd, void *data)
+{
+  struct epoll_event ev;
+  ev.data.u64= 0; /* Keep valgrind happy */
+  ev.data.ptr= data;
+  ev.events=  EPOLLIN|EPOLLET|EPOLLERR|EPOLLRDHUP|EPOLLONESHOT;
+  return epoll_ctl(pollfd, EPOLL_CTL_ADD,  fd, &ev);
+}
+
+
+
+int io_poll_start_read(int pollfd, int fd, void *data)
+{
+  struct epoll_event ev;
+  ev.data.u64= 0; /* Keep valgrind happy */
+  ev.data.ptr= data;
+  ev.events=  EPOLLIN|EPOLLET|EPOLLERR|EPOLLRDHUP|EPOLLONESHOT;
+  return epoll_ctl(pollfd, EPOLL_CTL_MOD,  fd, &ev); 
+}
+
+int io_poll_disassociate_fd(int pollfd, int fd)
+{
+  struct epoll_event ev;
+  return epoll_ctl(pollfd, EPOLL_CTL_DEL,  fd, &ev);
+}
+
+
+/*
+ Wrapper around epoll_wait.
+ NOTE - in case of EINTR, it restarts with original timeout. Since we use
+ either infinite or 0 timeouts, this is not critical
+*/
+int io_poll_wait(int pollfd, native_event *native_events, int maxevents, 
+              int timeout_ms)
+{
+  int ret;
+  do 
+  {
+    ret = epoll_wait(pollfd, native_events, maxevents, timeout_ms);
+  }
+  while(ret == -1 && errno == EINTR);
+  return ret;
+}
+
+
+static void *native_event_get_userdata(native_event *event)
+{
+  return event->data.ptr;
+}
+
+#elif defined (__FreeBSD__) || defined (__APPLE__)
+int io_poll_create()
+{
+  return kqueue();
+}
+
+int io_poll_start_read(int pollfd, int fd, void *data)
+{
+  struct kevent ke;
+  EV_SET(&ke, fd, EVFILT_READ, EV_ADD|EV_ONESHOT, 
+         0, 0, data);
+  return kevent(pollfd, &ke, 1, 0, 0, 0); 
+}
+
+
+int io_poll_associate_fd(int pollfd, int fd, void *data)
+{
+  struct kevent ke;
+  EV_SET(&ke, fd, EVFILT_READ, EV_ADD|EV_ONESHOT, 
+         0, 0, data);
+  return io_poll_start_read(pollfd,fd, data); 
+}
+
+
+int io_poll_disassociate_fd(int pollfd, int fd)
+{
+  struct kevent ke;
+  EV_SET(&ke,fd, EVFILT_READ, EV_DELETE, 0, 0, NULL);
+  return kevent(pollfd, &ke, 1, 0, 0, 0);
+}
+
+
+int io_poll_wait(int pollfd, struct kevent *events, int maxevents, int timeout_ms)
+{
+  struct timespec ts;
+  int ret;
+  if (timeout_ms >= 0)
+  {
+    ts.tv_sec= timeout_ms/1000;
+    ts.tv_nsec= (timeout_ms%1000)*1000000;
+  }
+  do
+  {
+    ret= kevent(pollfd, 0, 0, events, maxevents, 
+               (timeout_ms >= 0)?&ts:NULL);
+  }
+  while (ret == -1 && errno == EINTR);
+  return ret;
+}
+
+static void* native_event_get_userdata(native_event *event)
+{
+  return event->udata;
+}
+
+#elif defined (__sun)
+
+static int io_poll_create()
+{
+  return port_create();
+}
+
+int io_poll_start_read(int pollfd, int fd, void *data)
+{
+  return port_associate(pollfd, PORT_SOURCE_FD, fd, POLLIN, data);
+}
+
+static int io_poll_associate_fd(int pollfd, int fd, void *data)
+{
+  return io_poll_start_read(pollfd, fd, data);
+}
+
+int io_poll_disassociate_fd(int pollfd, int fd)
+{
+  return port_dissociate(pollfd, PORT_SOURCE_FD, fd);
+}
+
+int io_poll_wait(int pollfd, native_event *events, int maxevents, int timeout_ms)
+{
+  struct timespec ts;
+  int ret;
+  uint_t nget= 1;
+  if (timeout_ms >= 0)
+  {
+    ts.tv_sec= timeout_ms/1000;
+    ts.tv_nsec= (timeout_ms%1000)*1000000;
+  }
+  do
+  {
+    ret= port_getn(pollfd, events, maxevents, &nget,
+            (timeout_ms >= 0)?&ts:NULL);
+  }
+  while (ret == -1 && errno == EINTR);
+  DBUG_ASSERT(nget < INT_MAX);
+  return (int)nget;
+}
+
+static void* native_event_get_userdata(native_event *event)
+{
+  return event->portev_user;
+}
+#else
+#error not ported yet to this OS
+#endif
+
+
+/* Dequeue element from a workqueue */
+
+static connection_t *queue_get(thread_group_t *thread_group)
+{
+  DBUG_ENTER("queue_get");
+  thread_group->queue_event_count++;
+  connection_t *c;
+
+  if ((c= thread_group->high_prio_queue.front()))
+  {
+    thread_group->high_prio_queue.remove(c);
+  }
+  else if ((c= thread_group->queue.front()))
+  {
+    thread_group->queue.remove(c);
+  }
+  DBUG_RETURN(c);  
+}
+
+
+/* 
+  Handle wait timeout : 
+  Find connections that have been idle for too long and kill them.
+  Also, recalculate time when next timeout check should run.
+*/
+
+static void timeout_check(pool_timer_t *timer)
+{
+  DBUG_ENTER("timeout_check");
+  
+  mysql_mutex_lock(&LOCK_thread_count);
+  Thread_iterator it= global_thread_list_begin();
+  Thread_iterator end= global_thread_list_end();
+
+  /* Reset next timeout check, it will be recalculated in the loop below */
+  my_atomic_fas64((volatile int64*)&timer->next_timeout_check, ULONGLONG_MAX);
+
+  THD *thd;
+  for ( ; it != end; ++it)
+  {
+    thd= (*it);
+    if (thd->net.reading_or_writing != 1)
+      continue;
+ 
+    connection_t *connection= (connection_t *)thd->event_scheduler.data;
+    if (!connection)
+    {
+      /* 
+        Connection does not have scheduler data. This happens for example
+        if THD belongs to a different scheduler, that is listening to extra_port.
+      */
+      continue;
+    }
+
+    if(connection->abs_wait_timeout < timer->current_microtime)
+    {
+      /* Wait timeout exceeded, kill connection. */
+      mysql_mutex_lock(&thd->LOCK_thd_data);
+      thd->killed = THD::KILL_CONNECTION;
+      tp_post_kill_notification(thd);
+      mysql_mutex_unlock(&thd->LOCK_thd_data);
+    }
+    else 
+    {
+      set_next_timeout_check(connection->abs_wait_timeout);
+    }
+  }
+  mysql_mutex_unlock(&LOCK_thread_count);
+  DBUG_VOID_RETURN;
+}
+
+
+/* 
+ Timer thread. 
+ 
+  Periodically, check if one of the thread groups is stalled. Stalls happen if
+  events are not being dequeued from the queue, or from the network, Primary  
+  reason for stall can be a lengthy executing non-blocking request. It could 
+  also happen that thread is waiting but wait_begin/wait_end is forgotten by 
+  storage engine. Timer thread will create a new thread in group in case of 
+  a stall.
+ 
+  Besides checking for stalls, timer thread is also responsible for terminating
+  clients that have been idle for longer than wait_timeout seconds.
+
+  TODO: Let the timer sleep for long time if there is no work to be done.
+  Currently it wakes up rather often on and idle server.
+*/
+
+static void* timer_thread(void *param)
+{
+  uint i;
+  pool_timer_t* timer=(pool_timer_t *)param;
+
+  my_thread_init();
+  DBUG_ENTER("timer_thread");
+  timer->next_timeout_check= ULONGLONG_MAX;
+  timer->current_microtime= my_microsecond_getsystime();
+
+  for(;;)
+  {
+    struct timespec ts;
+    int err;
+
+    set_timespec_nsec(ts,timer->tick_interval*1000000);
+    mysql_mutex_lock(&timer->mutex);
+    err= mysql_cond_timedwait(&timer->cond, &timer->mutex, &ts);
+    if (timer->shutdown)
+    {
+      mysql_mutex_unlock(&timer->mutex);
+      break;
+    }
+    if (err == ETIMEDOUT)
+    {
+      timer->current_microtime= my_microsecond_getsystime();
+      
+      /* Check stalls in thread groups */
+      for(i=0; i< array_elements(all_groups);i++)
+      {
+        if(all_groups[i].connection_count)
+           check_stall(&all_groups[i]);
+      }
+      
+      /* Check if any client exceeded wait_timeout */
+      if (timer->next_timeout_check <= timer->current_microtime)
+        timeout_check(timer);
+    }
+    mysql_mutex_unlock(&timer->mutex);
+  }
+
+  mysql_mutex_destroy(&timer->mutex);
+  my_thread_end();
+  return NULL;
+}
+
+
+
+void check_stall(thread_group_t *thread_group)
+{
+  if (mysql_mutex_trylock(&thread_group->mutex) != 0)
+  {
+    /* Something happens. Don't disturb */
+    return;
+  }
+
+  /*
+    Check if listener is present. If not,  check whether any IO 
+    events were dequeued since last time. If not, this means 
+    listener is either in tight loop or thd_wait_begin() 
+    was forgotten. Create a new worker(it will make itself listener).
+  */
+  if (!thread_group->listener && !thread_group->io_event_count)
+  {
+    wake_or_create_thread(thread_group);
+    mysql_mutex_unlock(&thread_group->mutex);
+    return;
+  }
+  
+  /*  Reset io event count */
+  thread_group->io_event_count= 0;
+
+  /* 
+    Check whether requests from the workqueue are being dequeued.
+
+    The stall detection and resolution works as follows:
+
+    1. There is a counter thread_group->queue_event_count for the number of 
+       events removed from the queue. Timer resets the counter to 0 on each run.
+    2. Timer determines stall if this counter remains 0 since last check
+       and the queue is not empty.
+    3. Once timer determined a stall it sets thread_group->stalled flag and
+       wakes and idle worker (or creates a new one, subject to throttling).
+    4. The stalled flag is reset, when an event is dequeued.
+
+    Q : Will this handling lead to an unbound growth of threads, if queue
+    stalls permanently?
+    A : No. If queue stalls permanently, it is an indication for many very long
+    simultaneous queries. The maximum number of simultanoues queries is 
+    max_connections, further we have threadpool_max_threads limit, upon which no
+    worker threads are created. So in case there is a flood of very long 
+    queries, threadpool would slowly approach thread-per-connection behavior.
+    NOTE:
+    If long queries never wait, creation of the new threads is done by timer,
+    so it is slower than in real thread-per-connection. However if long queries 
+    do wait and indicate that via thd_wait_begin/end callbacks, thread creation
+    will be faster.
+  */
+  if ((!thread_group->high_prio_queue.is_empty() ||
+      !thread_group->queue.is_empty()) && !thread_group->queue_event_count)
+  {
+    thread_group->stalled= true;
+    wake_or_create_thread(thread_group);
+  }
+  
+  /* Reset queue event count */
+  thread_group->queue_event_count= 0;
+  
+  mysql_mutex_unlock(&thread_group->mutex);
+}
+
+
+static void start_timer(pool_timer_t* timer)
+{
+  pthread_t thread_id;
+  DBUG_ENTER("start_timer");
+  mysql_mutex_init(key_timer_mutex,&timer->mutex, NULL);
+  mysql_cond_init(key_timer_cond, &timer->cond, NULL);
+  timer->shutdown = false;
+  mysql_thread_create(key_timer_thread,&thread_id, NULL, timer_thread, timer);
+  DBUG_VOID_RETURN;
+}
+
+
+static void stop_timer(pool_timer_t *timer)
+{
+  DBUG_ENTER("stop_timer");
+  mysql_mutex_lock(&timer->mutex);
+  timer->shutdown = true;
+  mysql_cond_signal(&timer->cond);
+  mysql_mutex_unlock(&timer->mutex);
+  DBUG_VOID_RETURN;
+}
+
+
+/**
+  Poll for socket events and distribute them to worker threads
+  In many case current thread will handle single event itself.
+  
+  @return a ready connection, or NULL on shutdown
+*/
+static connection_t * listener(worker_thread_t *current_thread, 
+                               thread_group_t *thread_group)
+{
+  DBUG_ENTER("listener");
+  connection_t *retval= NULL;
+
+  for(;;)
+  {
+    native_event ev[MAX_EVENTS];
+    int cnt;
+    
+    if (thread_group->shutdown)
+      break;
+  
+    cnt = io_poll_wait(thread_group->pollfd, ev, MAX_EVENTS, -1);
+    
+    if (cnt <=0)
+    {
+      DBUG_ASSERT(thread_group->shutdown);
+      break;
+    }
+
+    mysql_mutex_lock(&thread_group->mutex);
+
+    if (thread_group->shutdown)
+    {
+      mysql_mutex_unlock(&thread_group->mutex);
+      break;
+    }
+    
+    thread_group->io_event_count += cnt;  
+    
+    /* 
+     We got some network events and need to make decisions : whether
+     listener  hould handle events and whether or not any wake worker
+     threads so they can handle events.
+     
+     Q1 : Should listener handle an event itself, or put all events into 
+     queue  and let workers handle the events?
+     
+     Solution :
+     Generally, listener that handles events itself is preferable. We do not 
+     want listener thread to change its state from waiting  to running too 
+     often, Since listener has just woken from poll, it better uses its time
+     slice and does some work. Besides, not handling events means they go to
+     the  queue, and often to wake another worker must wake up to handle the
+     event. This is not good, as we want to avoid wakeups.
+     
+     The downside of listener that also handles queries is that we can
+     potentially leave thread group  for long time not picking the new 
+     network events. It is not  a major problem, because this stall will be
+     detected  sooner or later by  the timer thread. Still, relying on timer
+     is not always good, because it may "tick" too slow (large timer_interval)
+     
+     We use following strategy to solve this problem - if queue was not empty
+     we suspect flood of network events and listener stays, Otherwise, it 
+     handles a query.
+     
+     
+     Q2: If queue is not empty, how many workers to wake?
+     
+     Solution:
+     We generally try to keep one thread per group active (threads handling 
+     queries   are considered active, unless they stuck in inside some "wait")
+     Thus, we will wake only one worker, and only if there is not active 
+     threads currently,and listener is not going to handle a query. When we 
+     don't wake, we hope that  currently active  threads will finish fast and 
+     handle the queue. If this does  not happen, timer thread will detect stall
+     and wake a worker.
+     
+     NOTE: Currently nothing is done to detect or prevent long queuing times. 
+     A solutionc for the future would be to give up "one active thread per 
+     group" principle, if events stay  in the queue for too long, and just wake 
+     more workers.
+    */
+    
+    bool listener_picks_event= thread_group->high_prio_queue.is_empty() &&
+      thread_group->queue.is_empty();
+    
+    /* 
+      If listener_picks_event is set, listener thread will handle first event, 
+      and put the rest into the queue. If listener_pick_event is not set, all 
+      events go to the queue.
+    */
+    for(int i=(listener_picks_event)?1:0; i < cnt ; i++)
+    {
+      connection_t *c= (connection_t *)native_event_get_userdata(&ev[i]);
+      if (c->tickets > 0 && thd_is_transaction_active(c->thd))
+      {
+        c->tickets--;
+        thread_group->high_prio_queue.push_back(c);
+      }
+      else
+      {
+        c->tickets= threadpool_high_prio_tickets;
+        thread_group->queue.push_back(c);
+      }
+    }
+    
+    if (listener_picks_event)
+    {
+      /* Handle the first event. */
+      retval= (connection_t *)native_event_get_userdata(&ev[0]);
+      mysql_mutex_unlock(&thread_group->mutex);
+      break;
+    }
+
+    if(thread_group->active_thread_count==0)
+    {
+      /* We added some work items to queue, now wake a worker. */
+      if(wake_thread(thread_group))
+      {
+        /* 
+          Wake failed, hence groups has no idle threads. Now check if there are
+          any threads in the group except listener.
+        */ 
+        if(thread_group->thread_count == 1)
+        {
+           /*
+             Currently there is no worker thread in the group, as indicated by
+             thread_count == 1 (this means listener is the only one thread in 
+             the group).
+             The queue is not empty, and listener is not going to handle
+             events. In order to drain the queue,  we create a worker here.
+             Alternatively, we could just rely on timer to detect stall, and 
+             create thread, but waiting for timer would be an inefficient and
+             pointless delay.
+           */
+           create_worker(thread_group);
+        }
+      }
+    }
+    mysql_mutex_unlock(&thread_group->mutex);
+  }
+
+  DBUG_RETURN(retval);
+}
+
+/**
+  Adjust thread counters in group or global 
+  whenever thread is created or is about to exit
+
+  @param thread_group
+  @param count -  1, when new thread is created
+                 -1, when thread is about to exit
+*/
+
+static void add_thread_count(thread_group_t *thread_group, int32 count)
+{
+  thread_group->thread_count += count;
+  /* worker starts out and end in "active" state */
+  thread_group->active_thread_count += count;
+  my_atomic_add32(&tp_stats.num_worker_threads, count);
+}
+
+
+/**
+  Creates a new worker thread. 
+  thread_mutex must be held when calling this function 
+
+  NOTE: in rare cases, the number of threads can exceed
+  threadpool_max_threads, because we need at least 2 threads
+  per group to prevent deadlocks (one listener + one worker)
+*/
+
+static int create_worker(thread_group_t *thread_group)
+{
+  pthread_t thread_id;
+  bool max_threads_reached= false;
+  int err;
+  
+  DBUG_ENTER("create_worker");
+  if (tp_stats.num_worker_threads >= (int)threadpool_max_threads
+     && thread_group->thread_count >= 2)
+  {
+    err= 1;
+    max_threads_reached= true;
+    goto end;
+  }
+
+  
+  err= mysql_thread_create(key_worker_thread, &thread_id, 
+         thread_group->pthread_attr, worker_main, thread_group);
+  if (!err)
+  {
+    thread_group->last_thread_creation_time=my_microsecond_getsystime();
+    thread_created++;
+    add_thread_count(thread_group, 1);
+  }
+  else
+  {
+    my_errno= errno;
+  }
+
+end:
+  if (err)
+    print_pool_blocked_message(max_threads_reached);
+  else
+    pool_block_start= 0; /* Reset pool blocked timer, if it was set */
+    
+  DBUG_RETURN(err);
+}
+
+
+/**
+ Calculate microseconds throttling delay for thread creation.
+ 
+ The value depends on how many threads are already in the group:
+ small number of threads means no delay, the more threads the larger
+ the delay.
+ 
+ The actual values were not calculated using any scientific methods.
+ They just look right, and behave well in practice.
+ 
+ TODO: Should throttling depend on thread_pool_stall_limit?
+*/
+static ulonglong microsecond_throttling_interval(thread_group_t *thread_group)
+{
+  int count= thread_group->thread_count;
+  
+  if (count < 4)
+    return 0;
+  
+  if (count < 8)
+    return 50*1000; 
+  
+  if(count < 16)
+    return 100*1000;
+  
+  return 200*1000;
+}
+
+
+/**
+  Wakes a worker thread, or creates a new one. 
+  
+  Worker creation is throttled, so we avoid too many threads
+  to be created during the short time.
+*/
+static int wake_or_create_thread(thread_group_t *thread_group)
+{
+  DBUG_ENTER("wake_or_create_thread");
+  
+  if (thread_group->shutdown)
+   DBUG_RETURN(0);
+
+  if (wake_thread(thread_group) == 0)
+    DBUG_RETURN(0);
+
+  if (thread_group->thread_count > thread_group->connection_count)
+    DBUG_RETURN(-1);
+
+ 
+  if (thread_group->active_thread_count == 0)
+  {
+    /*
+     We're better off creating a new thread here  with no delay, either there 
+     are no workers at all, or they all are all blocking and there was no 
+     idle  thread to wakeup. Smells like a potential deadlock or very slowly 
+     executing requests, e.g sleeps or user locks.
+    */
+    DBUG_RETURN(create_worker(thread_group));
+  }
+
+  ulonglong now = my_microsecond_getsystime();
+  ulonglong time_since_last_thread_created =
+    (now - thread_group->last_thread_creation_time);
+  
+  /* Throttle thread creation. */  
+  if (time_since_last_thread_created >
+       microsecond_throttling_interval(thread_group))
+  {
+    DBUG_RETURN(create_worker(thread_group));
+  }
+  
+  DBUG_RETURN(-1);
+}
+
+
+
+int thread_group_init(thread_group_t *thread_group, pthread_attr_t* thread_attr)
+{
+  DBUG_ENTER("thread_group_init");
+  thread_group->pthread_attr = thread_attr;
+  mysql_mutex_init(key_group_mutex, &thread_group->mutex, NULL);
+  thread_group->pollfd= -1;
+  thread_group->shutdown_pipe[0]= -1;
+  thread_group->shutdown_pipe[1]= -1;
+  DBUG_RETURN(0);
+}
+
+
+void thread_group_destroy(thread_group_t *thread_group)
+{
+  mysql_mutex_destroy(&thread_group->mutex);
+  if (thread_group->pollfd != -1)
+  {
+    close(thread_group->pollfd);
+    thread_group->pollfd= -1;
+  }
+  for(int i=0; i < 2; i++)
+  {
+    if(thread_group->shutdown_pipe[i] != -1)
+    {
+      close(thread_group->shutdown_pipe[i]);
+      thread_group->shutdown_pipe[i]= -1;
+    }
+  }
+}
+
+/**
+  Wake sleeping thread from waiting list
+*/
+
+static int wake_thread(thread_group_t *thread_group)
+{
+  DBUG_ENTER("wake_thread");
+  worker_thread_t *thread = thread_group->waiting_threads.front();
+  if(thread)
+  {
+    thread->woken= true;
+    thread_group->waiting_threads.remove(thread);
+    mysql_cond_signal(&thread->cond);
+    DBUG_RETURN(0);
+  }
+  DBUG_RETURN(1); /* no thread in waiter list => missed wakeup */
+}
+
+
+/**
+  Initiate shutdown for thread group.
+
+  The shutdown is asynchronous, we only care to  wake all threads in here, so 
+  they can finish. We do not wait here until threads terminate. Final cleanup 
+  of the group (thread_group_destroy) will be done by the last exiting threads.
+*/
+
+static void thread_group_close(thread_group_t *thread_group)
+{
+  DBUG_ENTER("thread_group_close");
+
+  mysql_mutex_lock(&thread_group->mutex);
+  if (thread_group->thread_count == 0) 
+  {
+    mysql_mutex_unlock(&thread_group->mutex);
+    thread_group_destroy(thread_group);
+    DBUG_VOID_RETURN;
+  }
+
+  thread_group->shutdown= true; 
+  thread_group->listener= NULL;
+
+  if (pipe(thread_group->shutdown_pipe))
+  {
+    DBUG_VOID_RETURN;
+  }
+  
+  /* Wake listener */
+  if (io_poll_associate_fd(thread_group->pollfd, 
+      thread_group->shutdown_pipe[0], NULL))
+  {
+    DBUG_VOID_RETURN;
+  }
+  char c= 0;
+  if (write(thread_group->shutdown_pipe[1], &c, 1) < 0)
+    DBUG_VOID_RETURN;
+
+  /* Wake all workers. */
+  while(wake_thread(thread_group) == 0) 
+  { 
+  }
+  
+  mysql_mutex_unlock(&thread_group->mutex);
+
+  DBUG_VOID_RETURN;
+}
+
+
+/* 
+  Add work to the queue. Maybe wake a worker if they all sleep.
+  
+  Currently, this function is only used when new connections need to
+  perform login (this is done in worker threads).
+
+*/
+
+static void queue_put(thread_group_t *thread_group, connection_t *connection)
+{
+  DBUG_ENTER("queue_put");
+
+  mysql_mutex_lock(&thread_group->mutex);
+  connection->tickets= threadpool_high_prio_tickets;
+  thread_group->queue.push_back(connection);
+
+  if (thread_group->active_thread_count == 0)
+    wake_or_create_thread(thread_group);
+
+  mysql_mutex_unlock(&thread_group->mutex);
+
+  DBUG_VOID_RETURN;
+}
+
+
+/* 
+  Prevent too many threads executing at the same time,if the workload is 
+  not CPU bound.
+*/
+
+static bool too_many_threads(thread_group_t *thread_group)
+{
+  return (thread_group->active_thread_count >= 1+(int)threadpool_oversubscribe 
+   && !thread_group->stalled);
+}
+
+
+/**
+  Retrieve a connection with pending event.
+  
+  Pending event in our case means that there is either a pending login request 
+  (if connection is not yet logged in), or there are unread bytes on the socket.
+
+  If there are no pending events currently, thread will wait. 
+  If timeout specified in abstime parameter passes, the function returns NULL.
+ 
+  @param current_thread - current worker thread
+  @param thread_group - current thread group
+  @param abstime - absolute wait timeout
+  
+  @return
+  connection with pending event. 
+  NULL is returned if timeout has expired,or on shutdown.
+*/
+
+connection_t *get_event(worker_thread_t *current_thread, 
+  thread_group_t *thread_group,  struct timespec *abstime)
+{ 
+  DBUG_ENTER("get_event");
+  connection_t *connection = NULL;
+  int err=0;
+
+  mysql_mutex_lock(&thread_group->mutex);
+  DBUG_ASSERT(thread_group->active_thread_count >= 0);
+
+  for(;;) 
+  {
+    bool oversubscribed = too_many_threads(thread_group); 
+    if (thread_group->shutdown)
+     break;
+
+    /* Check if queue is not empty */
+    if (!oversubscribed)
+    {
+      connection = queue_get(thread_group);
+      if(connection)
+        break;
+    }
+
+    /* If there is  currently no listener in the group, become one. */
+    if(!thread_group->listener)
+    {
+      thread_group->listener= current_thread;
+      thread_group->active_thread_count--;
+      mysql_mutex_unlock(&thread_group->mutex);
+
+      connection = listener(current_thread, thread_group);
+
+      mysql_mutex_lock(&thread_group->mutex);
+      thread_group->active_thread_count++;
+      /* There is no listener anymore, it just returned. */
+      thread_group->listener= NULL;
+      break;
+    }
+    
+    /* 
+      Last thing we try before going to sleep is to 
+      pick a single event via epoll, without waiting (timeout 0)
+    */
+    if (!oversubscribed)
+    {
+      native_event nev;
+      if (io_poll_wait(thread_group->pollfd,&nev,1, 0) == 1)
+      {
+        thread_group->io_event_count++;
+        connection = (connection_t *)native_event_get_userdata(&nev);
+        break;
+      }
+    }
+
+    /* And now, finally sleep */ 
+    current_thread->woken = false; /* wake() sets this to true */
+
+    /* 
+      Add current thread to the head of the waiting list  and wait.
+      It is important to add thread to the head rather than tail
+      as it ensures LIFO wakeup order (hot caches, working inactivity timeout)
+    */
+    thread_group->waiting_threads.push_front(current_thread);
+    
+    thread_group->active_thread_count--;
+    if (abstime)
+    {
+      err = mysql_cond_timedwait(&current_thread->cond, &thread_group->mutex, 
+                                 abstime);
+    }
+    else
+    {
+      err = mysql_cond_wait(&current_thread->cond, &thread_group->mutex);
+    }
+    thread_group->active_thread_count++;
+    
+    if (!current_thread->woken)
+    {
+      /*
+        Thread was not signalled by wake(), it might be a spurious wakeup or
+        a timeout. Anyhow, we need to remove ourselves from the list now.
+        If thread was explicitly woken, than caller removed us from the list.
+      */
+      thread_group->waiting_threads.remove(current_thread);
+    }
+
+    if (err)
+      break;
+  }
+
+  thread_group->stalled= false;
+  mysql_mutex_unlock(&thread_group->mutex);
+ 
+  DBUG_RETURN(connection);
+}
+
+
+
+/**
+  Tells the pool that worker starts waiting  on IO, lock, condition, 
+  sleep() or similar.
+*/
+
+void wait_begin(thread_group_t *thread_group)
+{
+  DBUG_ENTER("wait_begin");
+  mysql_mutex_lock(&thread_group->mutex);
+  thread_group->active_thread_count--;
+  
+  DBUG_ASSERT(thread_group->active_thread_count >=0);
+  DBUG_ASSERT(thread_group->connection_count > 0);
+ 
+  if ((thread_group->active_thread_count == 0) && 
+      (thread_group->high_prio_queue.is_empty() ||
+       thread_group->queue.is_empty() || !thread_group->listener))
+  {
+    /* 
+      Group might stall while this thread waits, thus wake 
+      or create a worker to prevent stall.
+    */
+    wake_or_create_thread(thread_group);
+  }
+  
+  mysql_mutex_unlock(&thread_group->mutex);
+  DBUG_VOID_RETURN;
+}
+
+/**
+  Tells the pool has finished waiting.
+*/
+
+void wait_end(thread_group_t *thread_group)
+{
+  DBUG_ENTER("wait_end");
+  mysql_mutex_lock(&thread_group->mutex);
+  thread_group->active_thread_count++;
+  mysql_mutex_unlock(&thread_group->mutex);
+  DBUG_VOID_RETURN;
+}
+
+
+/**
+  Allocate/initialize a new connection structure.
+*/
+
+connection_t *alloc_connection(THD *thd)
+{
+  DBUG_ENTER("alloc_connection");
+  
+  connection_t* connection = (connection_t *)my_malloc(sizeof(connection_t),0);
+  if (connection)
+  {
+    connection->thd = thd;
+    connection->waiting= false;
+    connection->logged_in= false;
+    connection->bound_to_poll_descriptor= false;
+    connection->abs_wait_timeout= ULONGLONG_MAX;
+    connection->tickets = 0;
+  }
+  DBUG_RETURN(connection);
+}
+
+
+
+/**
+  Add a new connection to thread pool..
+*/
+
+void tp_add_connection(THD *thd)
+{
+  DBUG_ENTER("tp_add_connection");
+  
+  add_global_thread(thd);
+  mysql_mutex_unlock(&LOCK_thread_count);
+  connection_t *connection= alloc_connection(thd);
+  if (connection)
+  {
+    thd->event_scheduler.data= connection;
+      
+    /* Assign connection to a group. */
+    thread_group_t *group= 
+      &all_groups[thd->thread_id%group_count];
+    
+    connection->thread_group=group;
+      
+    mysql_mutex_lock(&group->mutex);
+    group->connection_count++;
+    mysql_mutex_unlock(&group->mutex);
+    
+    /*
+       Add connection to the work queue.Actual logon 
+       will be done by a worker thread.
+    */
+    queue_put(group, connection);
+  }
+  else
+  {
+    /* Allocation failed */
+    threadpool_remove_connection(thd);
+  } 
+  DBUG_VOID_RETURN;
+}
+
+
+/**
+  Terminate connection.
+*/
+
+static void connection_abort(connection_t *connection)
+{
+  DBUG_ENTER("connection_abort");
+  thread_group_t *group= connection->thread_group;
+
+  threadpool_remove_connection(connection->thd); 
+  
+  mysql_mutex_lock(&group->mutex);
+  group->connection_count--;
+  mysql_mutex_unlock(&group->mutex);
+
+  my_free(connection);
+  DBUG_VOID_RETURN;
+}
+
+
+/**
+  MySQL scheduler callback : kill connection
+*/
+
+void tp_post_kill_notification(THD *thd)
+{
+  DBUG_ENTER("tp_post_kill_notification");
+  if (current_thd == thd || thd->system_thread)
+    DBUG_VOID_RETURN;
+  
+  if (thd->net.vio)
+    vio_shutdown(thd->net.vio, SHUT_RD);
+  DBUG_VOID_RETURN;
+}
+
+/**
+  MySQL scheduler callback: wait begin
+*/
+
+void tp_wait_begin(THD *thd, int type)
+{
+  DBUG_ENTER("tp_wait_begin");
+  DBUG_ASSERT(thd);
+  connection_t *connection = (connection_t *)thd->event_scheduler.data;
+  if (connection)
+  {
+    DBUG_ASSERT(!connection->waiting);
+    connection->waiting= true;
+    wait_begin(connection->thread_group);
+  }
+  DBUG_VOID_RETURN;
+}
+
+
+/**
+  MySQL scheduler callback: wait end
+*/
+
+void tp_wait_end(THD *thd) 
+{ 
+  DBUG_ENTER("tp_wait_end");
+  DBUG_ASSERT(thd);
+
+  connection_t *connection = (connection_t *)thd->event_scheduler.data;
+  if (connection)
+  {
+    DBUG_ASSERT(connection->waiting);
+    connection->waiting = false;
+    wait_end(connection->thread_group);
+  }
+  DBUG_VOID_RETURN;
+}
+
+
+static void set_next_timeout_check(ulonglong abstime)
+{
+  DBUG_ENTER("set_next_timeout_check");
+  while(abstime < pool_timer.next_timeout_check)
+  {
+    longlong old= (longlong)pool_timer.next_timeout_check;
+    my_atomic_cas64((volatile int64*)&pool_timer.next_timeout_check,
+          &old, abstime);
+  }
+  DBUG_VOID_RETURN;
+}
+
+
+/**
+  Set wait timeout for connection. 
+*/
+
+static void set_wait_timeout(connection_t *c)
+{
+  DBUG_ENTER("set_wait_timeout");
+  /* 
+    Calculate wait deadline for this connection.
+    Instead of using my_microsecond_getsystime() which has a syscall 
+    overhead, use pool_timer.current_microtime and take 
+    into account that its value could be off by at most 
+    one tick interval.
+  */
+
+  c->abs_wait_timeout= pool_timer.current_microtime +
+    1000LL*pool_timer.tick_interval +
+    1000000LL*c->thd->variables.net_wait_timeout;
+
+  set_next_timeout_check(c->abs_wait_timeout);
+  DBUG_VOID_RETURN;
+}
+
+
+
+/**
+  Handle a (rare) special case,where connection needs to 
+  migrate to a different group because group_count has changed
+  after thread_pool_size setting. 
+*/
+
+static int change_group(connection_t *c, 
+ thread_group_t *old_group,
+ thread_group_t *new_group)
+{ 
+  int ret= 0;
+  int fd = mysql_socket_getfd(c->thd->net.vio->mysql_socket);
+
+  DBUG_ASSERT(c->thread_group == old_group);
+
+  /* Remove connection from the old group. */
+  mysql_mutex_lock(&old_group->mutex);
+  if (c->bound_to_poll_descriptor)
+  {
+    io_poll_disassociate_fd(old_group->pollfd,fd);
+    c->bound_to_poll_descriptor= false;
+  }
+  c->thread_group->connection_count--;
+  mysql_mutex_unlock(&old_group->mutex);
+  
+  /* Add connection to the new group. */
+  mysql_mutex_lock(&new_group->mutex);
+  c->thread_group= new_group;
+  new_group->connection_count++;
+  /* Ensure that there is a listener in the new group. */
+  if (!new_group->thread_count)
+    ret= create_worker(new_group);
+  mysql_mutex_unlock(&new_group->mutex);
+  return ret;
+}
+
+
+static int start_io(connection_t *connection)
+{ 
+  int fd = mysql_socket_getfd(connection->thd->net.vio->mysql_socket);
+
+  /*
+    Usually, connection will stay in the same group for the entire
+    connection's life. However, we do allow group_count to
+    change at runtime, which means in rare cases when it changes is 
+    connection should need to migrate  to another group, this ensures
+    to ensure equal load between groups.
+
+    So we recalculate in which group the connection should be, based
+    on thread_id and current group count, and migrate if necessary.
+  */ 
+  thread_group_t *group = 
+    &all_groups[connection->thd->thread_id%group_count];
+
+  if (group != connection->thread_group)
+  {
+    if (change_group(connection, connection->thread_group, group))
+      return -1;
+  }
+    
+  /* 
+    Bind to poll descriptor if not yet done. 
+  */ 
+  if (!connection->bound_to_poll_descriptor)
+  {
+    connection->bound_to_poll_descriptor= true;
+    return io_poll_associate_fd(group->pollfd, fd, connection);
+  }
+  
+  return io_poll_start_read(group->pollfd, fd, connection);
+}
+
+
+
+static void handle_event(connection_t *connection)
+{
+
+  DBUG_ENTER("handle_event");
+  int err;
+
+  if (!connection->logged_in)
+  {
+    err= threadpool_add_connection(connection->thd);
+    connection->logged_in= true;
+  }
+  else 
+  {
+    err= threadpool_process_request(connection->thd);
+  }
+
+  if(err)
+    goto end;
+
+  set_wait_timeout(connection);
+  err= start_io(connection);
+
+end:
+  if (err)
+    connection_abort(connection);
+
+  DBUG_VOID_RETURN;
+}
+
+
+
+/**
+  Worker thread's main
+*/
+
+static void *worker_main(void *param)
+{
+  
+  worker_thread_t this_thread;
+  pthread_detach_this_thread();
+  my_thread_init();
+  
+  DBUG_ENTER("worker_main");
+  
+  thread_group_t *thread_group = (thread_group_t *)param;
+
+  /* Init per-thread structure */
+  mysql_cond_init(key_worker_cond, &this_thread.cond, NULL);
+  this_thread.thread_group= thread_group;
+  this_thread.event_count=0;
+
+#ifdef HAVE_PSI_THREAD_INTERFACE
+    PSI_THREAD_CALL(set_thread_user_host)
+      (NULL, 0, NULL, 0);
+#endif
+
+  /* Run event loop */
+  for(;;)
+  {
+    connection_t *connection;
+    struct timespec ts;
+    set_timespec(ts,threadpool_idle_timeout);
+    connection = get_event(&this_thread, thread_group, &ts);
+    if (!connection)
+      break;
+    this_thread.event_count++;
+    handle_event(connection);
+  }
+
+  /* Thread shutdown: cleanup per-worker-thread structure. */
+  mysql_cond_destroy(&this_thread.cond);
+
+  bool last_thread;                    /* last thread in group exits */
+  mysql_mutex_lock(&thread_group->mutex);
+  add_thread_count(thread_group, -1);
+  last_thread= ((thread_group->thread_count == 0) && thread_group->shutdown);
+  mysql_mutex_unlock(&thread_group->mutex);
+
+  /* Last thread in group exits and pool is terminating, destroy group.*/
+  if (last_thread)
+    thread_group_destroy(thread_group);
+
+  my_thread_end();
+  return NULL;
+}
+
+
+bool tp_init()
+{
+  DBUG_ENTER("tp_init");
+  threadpool_started= true;
+  scheduler_init();
+
+  for(uint i=0; i < array_elements(all_groups); i++)
+  {
+    thread_group_init(&all_groups[i], get_connection_attrib());  
+  }
+  tp_set_threadpool_size(threadpool_size);
+  if(group_count == 0)
+  {
+    /* Something went wrong */
+    sql_print_error("Can't set threadpool size to %d",threadpool_size);
+    DBUG_RETURN(1);
+  }
+  PSI_register(mutex);
+  PSI_register(cond);
+  PSI_register(thread);
+  
+  pool_timer.tick_interval= threadpool_stall_limit;
+  start_timer(&pool_timer);
+  DBUG_RETURN(0);
+}
+
+
+void tp_end()
+{
+  DBUG_ENTER("tp_end");
+  
+  if (!threadpool_started)
+    DBUG_VOID_RETURN;
+
+  stop_timer(&pool_timer);
+  for(uint i=0; i< array_elements(all_groups); i++)
+  {
+    thread_group_close(&all_groups[i]);
+  }
+  threadpool_started= false;
+  DBUG_VOID_RETURN;
+}
+
+
+/** Ensure that poll descriptors are created when threadpool_size changes */
+
+void tp_set_threadpool_size(uint size)
+{
+  bool success= true;
+  if (!threadpool_started)
+    return;
+
+  for(uint i=0; i< size; i++)
+  {
+    thread_group_t *group= &all_groups[i];
+    mysql_mutex_lock(&group->mutex);
+    if (group->pollfd == -1)
+    {
+      group->pollfd= io_poll_create();
+      success= (group->pollfd >= 0);
+      if(!success)
+      {
+        sql_print_error("io_poll_create() failed, errno=%d\n", errno);
+        break;
+      }
+    }  
+    mysql_mutex_unlock(&all_groups[i].mutex);
+    if (!success)
+    {
+      group_count= i;
+      return;
+    }
+  }
+  group_count= size;
+}
+
+void tp_set_threadpool_stall_limit(uint limit)
+{
+  if (!threadpool_started)
+    return;
+  mysql_mutex_lock(&(pool_timer.mutex));
+  pool_timer.tick_interval= limit;
+  mysql_mutex_unlock(&(pool_timer.mutex));
+  mysql_cond_signal(&(pool_timer.cond));
+}
+
+
+/**
+ Calculate number of idle/waiting threads in the pool.
+ 
+ Sum idle threads over all groups. 
+ Don't do any locking, it is not required for stats.
+*/
+
+int tp_get_idle_thread_count()
+{
+  int sum=0;
+  for(uint i= 0; 
+      i< array_elements(all_groups) && (all_groups[i].pollfd >= 0); 
+      i++)
+  {
+    sum+= (all_groups[i].thread_count - all_groups[i].active_thread_count);
+  }
+  return sum;
+}
+
+
+/* Report threadpool problems */
+
+/** 
+   Delay in microseconds, after which "pool blocked" message is printed.
+   (30 sec == 30 Mio usec)
+*/
+#define BLOCK_MSG_DELAY 30*1000000
+
+#define MAX_THREADS_REACHED_MSG \
+"Threadpool could not create additional thread to handle queries, because the \
+number of allowed threads was reached. Increasing 'thread_pool_max_threads' \
+parameter can help in this situation.\n \
+If 'extra_port' parameter is set, you can still connect to the database with \
+superuser account (it must be TCP connection using extra_port as TCP port) \
+and troubleshoot the situation. \
+A likely cause of pool blocks are clients that lock resources for long time. \
+'show processlist' or 'show engine innodb status' can give additional hints."
+
+#define CREATE_THREAD_ERROR_MSG "Can't create threads in threadpool (errno=%d)."
+
+/**
+ Write a message when blocking situation in threadpool occurs.
+ The message is written only when pool blocks for BLOCK_MSG_DELAY (30) seconds.
+ It will be just a single message for each blocking situation (to prevent
+ log flood).
+*/
+
+static void print_pool_blocked_message(bool max_threads_reached)
+{
+  ulonglong now;
+  static bool msg_written;
+  
+  now= my_microsecond_getsystime();
+  if (pool_block_start == 0)
+  {
+    pool_block_start= now;
+    msg_written = false;
+    return;
+  }
+  
+  if (now > pool_block_start + BLOCK_MSG_DELAY && !msg_written)
+  {
+    if (max_threads_reached)
+      sql_print_error(MAX_THREADS_REACHED_MSG);
+    else
+      sql_print_error(CREATE_THREAD_ERROR_MSG, my_errno);
+    
+    sql_print_information("Threadpool has been blocked for %u seconds\n",
+      (uint)((now- pool_block_start)/1000000));
+    /* avoid reperated messages for the same blocking situation */
+    msg_written= true;
+  }
+}

=== added file 'Percona-Server/sql/threadpool_win.cc'
--- Percona-Server/sql/threadpool_win.cc	1970-01-01 00:00:00 +0000
+++ Percona-Server/sql/threadpool_win.cc	2013-05-27 12:16:38 +0000
@@ -0,0 +1,763 @@
+/* Copyright (C) 2012 Monty Program Ab
+
+   This program is free software; you can redistribute it and/or modify
+   it under the terms of the GNU General Public License as published by
+   the Free Software Foundation; version 2 of the License.
+
+   This program is distributed in the hope that it will be useful,
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA */
+
+#ifdef _WIN32_WINNT
+#undef _WIN32_WINNT
+#endif
+
+#define _WIN32_WINNT 0x0601
+
+#include <my_global.h>
+#include <violite.h>
+#include <sql_priv.h>
+#include <sql_class.h>
+#include <my_pthread.h>
+#include <scheduler.h>
+#include <sql_connect.h>
+#include <mysqld.h>
+#include <debug_sync.h>
+#include <threadpool.h>
+#include <windows.h>
+
+
+/*
+  Threadpool API is not available on XP. We still want to compile a single 
+  version on Windows, but use the latest functionality if available.
+  We cannot use threadpool functionality directly, since executable won't 
+  start on XP and loader will complain about missing symbols.
+
+  We solve using the usual way it is done on Windows, i.e with dynamic loading.
+  We'll need to load a lot of function, and make this less painful with the
+  WEAK_SYMBOL macro below
+*/
+
+/*
+ WEAK_SYMBOL(return_type, function_name, argument_type1,..,argument_typeN)
+
+ Declare and load function pointer from kernel32. The name of the static 
+ variable that holds the function pointer is my_<original function name>
+ This should be combined with 
+ #define <original function name> my_<original function name>
+ so that one could use Widows APIs transparently, without worrying whether
+ they are present in a particular version or not.
+
+ Of course, prior to use of any function there should be a check for correct
+ Windows version, or check whether function pointer is not NULL.
+*/
+#define WEAK_SYMBOL(return_type, function, ...) \
+  typedef return_type (WINAPI *pFN_##function)(__VA_ARGS__); \
+  static pFN_##function my_##function = (pFN_##function) \
+    (GetProcAddress(GetModuleHandle("kernel32"),#function))
+
+WEAK_SYMBOL(VOID, CancelThreadpoolIo, PTP_IO);
+#define CancelThreadpoolIo my_CancelThreadpoolIo
+
+WEAK_SYMBOL(VOID, CloseThreadpool, PTP_POOL);
+#define CloseThreadpool my_CloseThreadpool
+
+WEAK_SYMBOL(VOID, CloseThreadpoolIo, PTP_IO);
+#define CloseThreadpoolIo my_CloseThreadpoolIo
+
+WEAK_SYMBOL(VOID, CloseThreadpoolTimer,PTP_TIMER);
+#define CloseThreadpoolTimer my_CloseThreadpoolTimer
+
+WEAK_SYMBOL(VOID, CloseThreadpoolWait,PTP_WAIT);
+#define CloseThreadpoolWait my_CloseThreadpoolWait
+
+WEAK_SYMBOL(PTP_POOL, CreateThreadpool,PVOID);
+#define CreateThreadpool my_CreateThreadpool
+
+WEAK_SYMBOL(PTP_IO, CreateThreadpoolIo, HANDLE, PTP_WIN32_IO_CALLBACK, PVOID ,
+  PTP_CALLBACK_ENVIRON);
+#define CreateThreadpoolIo my_CreateThreadpoolIo
+
+WEAK_SYMBOL(PTP_TIMER, CreateThreadpoolTimer, PTP_TIMER_CALLBACK ,
+ PVOID pv, PTP_CALLBACK_ENVIRON pcbe);
+#define CreateThreadpoolTimer my_CreateThreadpoolTimer
+
+WEAK_SYMBOL(PTP_WAIT, CreateThreadpoolWait, PTP_WAIT_CALLBACK, PVOID, 
+  PTP_CALLBACK_ENVIRON);
+#define CreateThreadpoolWait my_CreateThreadpoolWait
+
+WEAK_SYMBOL(VOID, DisassociateCurrentThreadFromCallback, PTP_CALLBACK_INSTANCE);
+#define DisassociateCurrentThreadFromCallback my_DisassociateCurrentThreadFromCallback
+
+WEAK_SYMBOL(DWORD, FlsAlloc, PFLS_CALLBACK_FUNCTION);
+#define FlsAlloc my_FlsAlloc
+
+WEAK_SYMBOL(PVOID, FlsGetValue, DWORD);
+#define FlsGetValue my_FlsGetValue
+
+WEAK_SYMBOL(BOOL, FlsSetValue, DWORD, PVOID);
+#define FlsSetValue my_FlsSetValue
+
+WEAK_SYMBOL(VOID, SetThreadpoolThreadMaximum, PTP_POOL, DWORD);
+#define SetThreadpoolThreadMaximum my_SetThreadpoolThreadMaximum
+
+WEAK_SYMBOL(BOOL, SetThreadpoolThreadMinimum, PTP_POOL, DWORD);
+#define SetThreadpoolThreadMinimum my_SetThreadpoolThreadMinimum
+
+WEAK_SYMBOL(VOID, SetThreadpoolTimer, PTP_TIMER, PFILETIME,DWORD,DWORD);
+#define SetThreadpoolTimer my_SetThreadpoolTimer
+
+WEAK_SYMBOL(VOID, SetThreadpoolWait, PTP_WAIT,HANDLE,PFILETIME);
+#define SetThreadpoolWait my_SetThreadpoolWait
+
+WEAK_SYMBOL(VOID, StartThreadpoolIo, PTP_IO);
+#define StartThreadpoolIo my_StartThreadpoolIo
+
+WEAK_SYMBOL(VOID, WaitForThreadpoolIoCallbacks,PTP_IO, BOOL);
+#define WaitForThreadpoolIoCallbacks my_WaitForThreadpoolIoCallbacks
+
+WEAK_SYMBOL(VOID, WaitForThreadpoolTimerCallbacks, PTP_TIMER, BOOL);
+#define WaitForThreadpoolTimerCallbacks my_WaitForThreadpoolTimerCallbacks
+
+WEAK_SYMBOL(VOID, WaitForThreadpoolWaitCallbacks, PTP_WAIT, BOOL);
+#define WaitForThreadpoolWaitCallbacks my_WaitForThreadpoolWaitCallbacks
+
+WEAK_SYMBOL(BOOL, SetFileCompletionNotificationModes, HANDLE, UCHAR);
+#define SetFileCompletionNotificationModes my_SetFileCompletionNotificationModes
+
+WEAK_SYMBOL(BOOL, TrySubmitThreadpoolCallback, PTP_SIMPLE_CALLBACK pfns, 
+  PVOID pv,PTP_CALLBACK_ENVIRON pcbe);
+#define TrySubmitThreadpoolCallback my_TrySubmitThreadpoolCallback
+
+WEAK_SYMBOL(PTP_WORK, CreateThreadpoolWork, PTP_WORK_CALLBACK pfnwk, PVOID pv,
+  PTP_CALLBACK_ENVIRON pcbe);
+#define CreateThreadpoolWork my_CreateThreadpoolWork
+
+WEAK_SYMBOL(VOID, SubmitThreadpoolWork,PTP_WORK pwk);
+#define SubmitThreadpoolWork my_SubmitThreadpoolWork
+
+WEAK_SYMBOL(VOID, CloseThreadpoolWork, PTP_WORK pwk);
+#define CloseThreadpoolWork my_CloseThreadpoolWork 
+
+WEAK_SYMBOL(BOOL, CallbackMayRunLong, PTP_CALLBACK_INSTANCE pci);
+#define CallbackMayRunLong my_CallbackMayRunLong
+
+#if _MSC_VER >= 1600
+/* Stack size manipulation available only on Win7+ /declarations in VS10 */
+WEAK_SYMBOL(BOOL, SetThreadpoolStackInformation, PTP_POOL, 
+  PTP_POOL_STACK_INFORMATION);
+#define SetThreadpoolStackInformation my_SetThreadpoolStackInformation
+#else /* _MSC_VER < 1600 */
+#define SetThreadpoolCallbackPriority(env,prio)
+typedef enum _TP_CALLBACK_PRIORITY {
+    TP_CALLBACK_PRIORITY_HIGH,
+    TP_CALLBACK_PRIORITY_NORMAL,
+    TP_CALLBACK_PRIORITY_LOW,
+    TP_CALLBACK_PRIORITY_INVALID
+} TP_CALLBACK_PRIORITY;
+#endif
+
+
+/* Log a warning */
+static void tp_log_warning(const char *msg, const char *fct)
+{
+  sql_print_warning("Threadpool: %s. %s failed (last error %d)",msg, fct,
+    GetLastError());
+}
+
+
+PTP_POOL pool;
+DWORD fls;
+
+static bool skip_completion_port_on_success = false;
+
+/*
+  Threadpool callbacks.
+
+  io_completion_callback  - handle client request
+  timer_callback - handle wait timeout (kill connection)
+  shm_read_callback, shm_close_callback - shared memory stuff
+  login_callback - user login (submitted as threadpool work)
+
+*/
+
+static void CALLBACK timer_callback(PTP_CALLBACK_INSTANCE instance, 
+  PVOID context, PTP_TIMER timer);
+
+static void CALLBACK io_completion_callback(PTP_CALLBACK_INSTANCE instance, 
+  PVOID context,  PVOID overlapped,  ULONG io_result, ULONG_PTR nbytes, PTP_IO io);
+
+static void CALLBACK shm_read_callback(PTP_CALLBACK_INSTANCE instance,
+  PVOID Context, PTP_WAIT wait,TP_WAIT_RESULT wait_result);
+
+static void CALLBACK shm_close_callback(PTP_CALLBACK_INSTANCE instance,
+  PVOID Context, PTP_WAIT wait,TP_WAIT_RESULT wait_result);
+
+static void check_thread_init();
+
+/* Get current time as Windows time */
+static ulonglong now()
+{
+  ulonglong current_time;
+  GetSystemTimeAsFileTime((PFILETIME)&current_time);
+  return current_time;
+}
+
+/* 
+  Connection structure, encapsulates THD + structures for asynchronous
+  IO and pool.
+*/
+
+struct connection_t
+{
+  THD *thd;
+  HANDLE handle;
+  OVERLAPPED overlapped;
+  /* absolute time for wait timeout (as Windows time) */
+  volatile ulonglong timeout; 
+  TP_CALLBACK_ENVIRON callback_environ;
+  PTP_IO  io;
+  PTP_TIMER timer;
+  PTP_WAIT shm_read;
+  /* Callback instance, used to inform treadpool about long callbacks */
+  PTP_CALLBACK_INSTANCE callback_instance;
+  bool logged_in;
+};
+
+
+void init_connection(connection_t *connection)
+{
+  connection->logged_in = false;
+  connection->handle= 0;
+  connection->io= 0;
+  connection->shm_read= 0;
+  connection->timer= 0;
+  connection->logged_in = false;
+  connection->timeout= ULONGLONG_MAX;
+  connection->callback_instance= 0;
+  memset(&connection->overlapped, 0, sizeof(OVERLAPPED));
+  InitializeThreadpoolEnvironment(&connection->callback_environ);
+  SetThreadpoolCallbackPool(&connection->callback_environ, pool);
+  connection->thd = 0;
+}
+
+
+int init_io(connection_t *connection, THD *thd)
+{
+  connection->thd= thd;
+  Vio *vio = thd->net.vio;
+  switch(vio->type)
+  {
+    case VIO_TYPE_SSL:
+    case VIO_TYPE_TCPIP:
+      connection->handle= (HANDLE)vio->sd;
+      break;
+    case VIO_TYPE_NAMEDPIPE:
+      connection->handle= (HANDLE)vio->hPipe;
+      break;
+    case VIO_TYPE_SHARED_MEMORY:
+      connection->shm_read=  CreateThreadpoolWait(shm_read_callback, connection, 
+        &connection->callback_environ);
+      if (!connection->shm_read)
+      {
+        tp_log_warning("Allocation failed", "CreateThreadpoolWait");
+        return -1;
+      }
+      break;
+    default:
+      abort();
+  }
+
+  if (connection->handle)
+  {
+    /* Performance tweaks (s. MSDN documentation)*/
+    UCHAR flags= FILE_SKIP_SET_EVENT_ON_HANDLE;
+    if (skip_completion_port_on_success)
+    {
+      flags |= FILE_SKIP_COMPLETION_PORT_ON_SUCCESS;
+    }
+    (void)SetFileCompletionNotificationModes(connection->handle, flags);
+
+    /* Assign io completion callback */
+    connection->io= CreateThreadpoolIo(connection->handle, 
+      io_completion_callback, connection, &connection->callback_environ);
+    if(!connection->io)
+    {
+      tp_log_warning("Allocation failed", "CreateThreadpoolWait");
+      return -1;
+    }
+  }
+  connection->timer= CreateThreadpoolTimer(timer_callback, connection, 
+    &connection->callback_environ);
+  if (!connection->timer)
+  {
+    tp_log_warning("Allocation failed", "CreateThreadpoolWait");
+    return -1;
+  }
+
+  return 0;
+}
+
+
+/*
+  Start asynchronous read
+*/
+int start_io(connection_t *connection, PTP_CALLBACK_INSTANCE instance)
+{
+  /* Start async read */
+  DWORD num_bytes = 0;
+  static char c;
+  WSABUF buf;
+  buf.buf= &c;
+  buf.len= 0;
+  DWORD flags=0;
+  DWORD last_error= 0;
+
+  int retval;
+  Vio *vio= connection->thd->net.vio;
+
+  if (vio->type == VIO_TYPE_SHARED_MEMORY)
+  {
+      SetThreadpoolWait(connection->shm_read, vio->event_server_wrote, NULL);
+      return 0;
+  }
+  if (vio->type == VIO_CLOSED)
+  {
+    return -1;
+  }
+
+  DBUG_ASSERT(vio->type == VIO_TYPE_TCPIP || 
+    vio->type == VIO_TYPE_SSL ||
+    vio->type == VIO_TYPE_NAMEDPIPE);
+
+  OVERLAPPED *overlapped= &connection->overlapped;
+  PTP_IO io= connection->io;
+  StartThreadpoolIo(io);
+
+  if (vio->type == VIO_TYPE_TCPIP || vio->type == VIO_TYPE_SSL)
+  {
+    /* Start async io (sockets). */
+    if (WSARecv(vio->sd , &buf, 1, &num_bytes, &flags,
+          overlapped,  NULL) == 0)
+    {
+        retval= last_error= 0;
+    }
+    else
+    {
+      retval= -1;
+      last_error=  WSAGetLastError();
+    }
+  }
+  else
+  {
+    /* Start async io (named pipe) */
+    if (ReadFile(vio->hPipe, &c, 0, &num_bytes ,overlapped))
+    {
+      retval= last_error= 0;
+    }
+    else
+    {
+      retval= -1;
+      last_error= GetLastError();
+    }
+  }
+
+  if (retval == 0 || last_error == ERROR_MORE_DATA)
+  {
+    /*
+      IO successfully finished (synchronously). 
+      If skip_completion_port_on_success is set, we need to handle it right 
+      here, because completion callback would not be executed by the pool.
+    */
+    if(skip_completion_port_on_success)
+    {
+      CancelThreadpoolIo(io);
+      io_completion_callback(instance, connection, overlapped, last_error, 
+        num_bytes, io);
+    }
+    return 0;
+  }
+
+  if(last_error == ERROR_IO_PENDING)
+  {
+    return 0;
+  }
+
+  /* Some error occured */
+  CancelThreadpoolIo(io);
+  return -1;
+}
+
+
+int login(connection_t *connection, PTP_CALLBACK_INSTANCE instance)
+{
+  if (threadpool_add_connection(connection->thd) == 0
+      && init_io(connection, connection->thd) == 0 
+      && start_io(connection, instance) == 0)
+  {
+    return 0;
+  }
+  return -1;
+}
+
+/*
+  Recalculate wait timeout, maybe reset timer. 
+*/
+void set_wait_timeout(connection_t *connection, ulonglong old_timeout)
+{
+  ulonglong new_timeout = now() + 
+    10000000LL*connection->thd->variables.net_wait_timeout;
+
+  if (new_timeout < old_timeout)
+  {
+    SetThreadpoolTimer(connection->timer, (PFILETIME) &new_timeout, 0, 1000);
+  }
+  connection->timeout = new_timeout;
+}
+
+
+/* Connection destructor */
+void destroy_connection(connection_t *connection, PTP_CALLBACK_INSTANCE instance)
+{
+  if (instance)
+    DisassociateCurrentThreadFromCallback(instance);
+  if (connection->io)
+  {
+     WaitForThreadpoolIoCallbacks(connection->io, TRUE); 
+     CloseThreadpoolIo(connection->io);
+  }
+
+  if(connection->shm_read)
+  {
+    WaitForThreadpoolWaitCallbacks(connection->shm_read, TRUE);
+    CloseThreadpoolWait(connection->shm_read);
+  }
+
+  if(connection->timer)
+  {
+    SetThreadpoolTimer(connection->timer, 0, 0, 0);
+    WaitForThreadpoolTimerCallbacks(connection->timer, TRUE);
+    CloseThreadpoolTimer(connection->timer);
+  }
+  
+  if (connection->thd)
+  {
+    threadpool_remove_connection(connection->thd);
+  }
+
+  DestroyThreadpoolEnvironment(&connection->callback_environ);
+}
+
+
+
+/* 
+  This function should be called first whenever a callback is invoked in the 
+  threadpool, does my_thread_init() if not yet done
+*/
+extern ulong thread_created;
+static void check_thread_init()
+{
+  if (FlsGetValue(fls) == NULL)
+  {
+    FlsSetValue(fls, (void *)1);
+    thread_created++;
+    InterlockedIncrement((volatile long *)&tp_stats.num_worker_threads);
+  }
+}
+
+
+/*
+  Decrement number of threads when a thread exits . 
+  On Windows, FlsAlloc() provides the thread destruction callbacks.
+*/
+static VOID WINAPI thread_destructor(void *data)
+{
+  if(data)
+  {
+    InterlockedDecrement((volatile long *)&tp_stats.num_worker_threads);
+  }
+}
+
+
+/* Scheduler callback : init */
+bool tp_init(void)
+{
+  fls= FlsAlloc(thread_destructor);
+  pool= CreateThreadpool(NULL);
+  if(!pool)
+  {
+    sql_print_error("Can't create threadpool. "
+      "CreateThreadpool() failed with %d. Likely cause is memory pressure", 
+      GetLastError());
+    exit(1);
+  }
+
+  if (threadpool_max_threads)
+  {
+    SetThreadpoolThreadMaximum(pool,threadpool_max_threads);
+  }
+
+  if (threadpool_min_threads)
+  {
+    if (!SetThreadpoolThreadMinimum(pool, threadpool_min_threads))
+    {
+      tp_log_warning( "Can't set threadpool minimum threads", 
+        "SetThreadpoolThreadMinimum");
+    }
+  }
+
+  /*
+    Control stack size (OS must be Win7 or later, plus corresponding SDK)
+  */
+#if _MSC_VER >=1600
+  if (SetThreadpoolStackInformation)
+  {
+    TP_POOL_STACK_INFORMATION stackinfo;
+    stackinfo.StackCommit = 0;
+    stackinfo.StackReserve = (SIZE_T)my_thread_stack_size;
+    if (!SetThreadpoolStackInformation(pool, &stackinfo))
+    {
+      tp_log_warning("Can't set threadpool stack size", 
+        "SetThreadpoolStackInformation");
+    }
+  }
+#endif
+
+  return 0;
+}
+
+
+/**
+  Scheduler callback : Destroy the scheduler.
+*/
+void tp_end(void)
+{
+  if(pool)
+  {
+    SetThreadpoolThreadMaximum(pool, 0);
+    CloseThreadpool(pool);
+  }
+}
+
+/**
+  Notify pool about connection being killed.
+*/
+void tp_post_kill_notification(THD *thd)
+{
+   if (current_thd == thd)
+    return; /* There is nothing to do.*/
+
+  if (thd->system_thread)
+   return; /* Will crash if we attempt to kill system thread. */
+
+  Vio *vio= thd->net.vio;
+
+  vio_shutdown(vio, SD_BOTH);
+
+}
+
+/*
+  Handle read completion/notification.
+*/
+static VOID CALLBACK io_completion_callback(PTP_CALLBACK_INSTANCE instance, 
+  PVOID context,  PVOID overlapped,  ULONG io_result, ULONG_PTR nbytes, PTP_IO io)
+{
+  if(instance)
+  {
+    check_thread_init();
+  }
+
+  connection_t *connection = (connection_t*)context;
+
+  if (io_result != ERROR_SUCCESS)
+    goto error;
+
+  THD *thd= connection->thd;
+  ulonglong old_timeout = connection->timeout;
+  connection->timeout = ULONGLONG_MAX;
+  connection->callback_instance= instance;
+  if (threadpool_process_request(connection->thd))
+    goto error;
+
+  set_wait_timeout(connection, old_timeout);
+  if(start_io(connection, instance))
+    goto error;
+
+  return;
+
+error:
+  /* Some error has occured. */
+
+  destroy_connection(connection, instance);
+  free(connection);
+}
+
+
+/* Simple callback for login */
+static void CALLBACK login_callback(PTP_CALLBACK_INSTANCE instance, 
+  PVOID context, PTP_WORK work)
+{
+  if(instance)
+  {
+    check_thread_init();
+  }
+
+  connection_t *connection =(connection_t *)context;
+  if (login(connection, instance) != 0)
+  {
+    destroy_connection(connection, instance);
+    free(connection);
+  }
+}
+
+/*
+  Timer callback.
+  Invoked when connection times out (wait_timeout)
+*/
+static VOID CALLBACK timer_callback(PTP_CALLBACK_INSTANCE instance, 
+  PVOID parameter, PTP_TIMER timer)
+{
+  check_thread_init();
+
+  connection_t *con= (connection_t*)parameter;
+  ulonglong timeout= con->timeout;
+
+  if (timeout <= now())
+  {
+    con->thd->killed = KILL_CONNECTION;
+    if(con->thd->net.vio)
+      vio_shutdown(con->thd->net.vio, SD_BOTH);
+  }
+  else if(timeout != ULONGLONG_MAX)
+  {
+    /* 
+      Reset timer. 
+      There is a tiny possibility of a race condition, since the value of timeout 
+      could have changed to smaller value in the thread doing io callback. 
+
+      Given the relative unimportance of the wait timeout, we accept race 
+      condition.
+    */
+    SetThreadpoolTimer(timer, (PFILETIME)&timeout, 0, 1000);
+  }
+}
+
+
+/*
+  Shared memory read callback.
+  Invoked when read event is set on connection.
+*/
+static void CALLBACK shm_read_callback(PTP_CALLBACK_INSTANCE instance,
+  PVOID context, PTP_WAIT wait,TP_WAIT_RESULT wait_result)
+{
+  connection_t *con= (connection_t *)context;
+  /* Disarm wait. */
+  SetThreadpoolWait(wait, NULL, NULL);
+
+  /* 
+    This is an autoreset event, and one wakeup is eaten already by threadpool,
+    and the current state is "not set". Thus we need to reset the event again, 
+    or vio_read will hang.
+  */
+  HANDLE h = con->thd->net.vio->event_server_wrote;
+  SetEvent(h);
+  io_completion_callback(instance, context, NULL, 0, 0 , 0);
+}
+
+
+/*
+  Notify the thread pool about a new connection.
+  NOTE: LOCK_thread_count is locked on entry. This function must unlock it.
+*/
+void tp_add_connection(THD *thd)
+{
+  threads.append(thd);
+  mysql_mutex_unlock(&LOCK_thread_count);
+
+  connection_t *con = (connection_t *)malloc(sizeof(connection_t));
+  if(!con)
+  {
+    tp_log_warning("Allocation failed", "tp_add_connection");
+    threadpool_remove_connection(thd);
+    return;
+  }
+
+  init_connection(con);
+  con->thd= thd;
+  thd->event_scheduler.data= con;
+
+  /* Try to login asynchronously, using threads in the pool */
+  PTP_WORK wrk =  CreateThreadpoolWork(login_callback,con, &con->callback_environ);
+  if (wrk)
+  {
+    SubmitThreadpoolWork(wrk);
+    CloseThreadpoolWork(wrk);
+  }
+  else
+  {
+    /* Likely memory pressure */
+    login_callback(NULL, con, NULL); /* deletes connection if something goes wrong */
+  }
+}
+
+
+/**
+  Sets the number of idle threads the thread pool maintains in anticipation of new
+  requests.
+*/
+void tp_set_min_threads(uint val)
+{
+  if (pool)
+    SetThreadpoolThreadMinimum(pool, val);
+}
+
+void tp_set_max_threads(uint val)
+{
+  if (pool)
+    SetThreadpoolThreadMaximum(pool, val);
+}
+
+void tp_wait_begin(THD *thd, int type)
+{
+  DBUG_ASSERT(thd);
+
+  /*
+    Signal to the threadpool whenever callback can run long. Currently, binlog
+    waits are a good candidate, its waits are really long
+  */
+  if (type == THD_WAIT_BINLOG)
+  {
+    connection_t *connection= (connection_t *)thd->event_scheduler.data;
+    if(connection && connection->callback_instance)
+    {
+      CallbackMayRunLong(connection->callback_instance);
+      /* 
+        Reset instance, to avoid calling CallbackMayRunLong  twice within 
+        the same callback (it is an error according to docs).
+      */
+      connection->callback_instance= 0;
+    }
+  }
+}
+
+void tp_wait_end(THD *thd) 
+{
+  /* Do we need to do anything ? */
+}
+
+
+/**
+ Number of idle threads in pool.
+ This info is not available in Windows implementation,
+ thus function always returns 0.
+*/
+int tp_get_idle_thread_count()
+{
+  return 0;
+}
+

=== modified file 'Percona-Server/storage/innobase/handler/ha_innodb.cc'
--- Percona-Server/storage/innobase/handler/ha_innodb.cc	2013-05-26 10:55:00 +0000
+++ Percona-Server/storage/innobase/handler/ha_innodb.cc	2013-05-27 12:16:38 +0000
@@ -743,6 +743,10 @@
   (char*) &export_vars.innodb_rows_updated,		  SHOW_LONG},
   {"num_open_files",
   (char*) &export_vars.innodb_num_open_files,		  SHOW_LONG},
+  {"read_views_memory",
+  (char*) &export_vars.innodb_read_views_memory,	  SHOW_LONG},
+  {"descriptors_memory",
+  (char*) &export_vars.innodb_descriptors_memory,	  SHOW_LONG},
   {"s_lock_os_waits",
   (char*) &export_vars.innodb_s_lock_os_waits,		  SHOW_LONGLONG},
   {"s_lock_spin_rounds",

=== modified file 'Percona-Server/storage/innobase/include/srv0srv.h'
--- Percona-Server/storage/innobase/include/srv0srv.h	2013-05-22 17:21:32 +0000
+++ Percona-Server/storage/innobase/include/srv0srv.h	2013-05-27 12:16:38 +0000
@@ -406,6 +406,9 @@
 
 /*-------------------------------------------*/
 
+extern ulint	srv_read_views_memory;
+extern ulint	srv_descriptors_memory;
+
 extern ibool	srv_print_innodb_monitor;
 extern ibool	srv_print_innodb_lock_monitor;
 extern ibool	srv_print_innodb_tablespace_monitor;
@@ -960,6 +963,8 @@
 	ulint innodb_num_open_files;		/*!< fil_n_file_opened */
 	ulint innodb_truncated_status_writes;	/*!< srv_truncated_status_writes */
 	ulint innodb_available_undo_logs;       /*!< srv_available_undo_logs */
+	ulint innodb_read_views_memory;		/*!< srv_read_views_memory */
+	ulint innodb_descriptors_memory;	/*!< srv_descriptors_memory */
 	ib_int64_t innodb_s_lock_os_waits;
 	ib_int64_t innodb_s_lock_spin_rounds;
 	ib_int64_t innodb_s_lock_spin_waits;

=== modified file 'Percona-Server/storage/innobase/log/log0log.cc'
--- Percona-Server/storage/innobase/log/log0log.cc	2013-05-10 13:29:38 +0000
+++ Percona-Server/storage/innobase/log/log0log.cc	2013-05-27 12:16:38 +0000
@@ -285,6 +285,8 @@
 
 		os_thread_sleep(10000);
 
+		mutex_enter(&(log->mutex));
+
 		goto loop;
 	}
 
@@ -440,9 +442,13 @@
 
 		if (tracked_lsn_age >= log->log_group_capacity) {
 
-			fprintf(stderr, " InnoDB: Error: the age of the "
+			fprintf(stderr, "InnoDB: Error: the age of the "
 				"oldest untracked record exceeds the log "
 				"group capacity!\n");
+			fprintf(stderr, "InnoDB: Error: stopping the log "
+				"tracking thread at LSN " LSN_PF "\n",
+				tracked_lsn);
+			srv_track_changed_pages = FALSE;
 		}
 	}
 

=== modified file 'Percona-Server/storage/innobase/read/read0read.cc'
--- Percona-Server/storage/innobase/read/read0read.cc	2013-05-24 09:42:22 +0000
+++ Percona-Server/storage/innobase/read/read0read.cc	2013-05-27 12:16:38 +0000
@@ -189,6 +189,8 @@
 	if (view == NULL) {
 		view = static_cast<read_view_t*>(
 			ut_malloc(sizeof(read_view_t)));
+		os_atomic_increment_ulint(&srv_read_views_memory,
+					  sizeof(read_view_t));
 		view->max_descr = 0;
 		view->descriptors = NULL;
 	}
@@ -198,6 +200,9 @@
 		/* avoid frequent re-allocations by extending the array to the
 		desired size + 10% */
 
+		os_atomic_increment_ulint(&srv_read_views_memory,
+					  (n + n / 10 - view->max_descr) *
+					  sizeof(trx_id_t));
 		view->max_descr = n + n / 10;
 		view->descriptors = static_cast<trx_id_t*>(
 			ut_realloc(view->descriptors,
@@ -575,6 +580,10 @@
 		return;
 	}
 
+	os_atomic_decrement_lint(&srv_read_views_memory,
+				 sizeof(read_view_t) +
+				 view->max_descr * sizeof(trx_id_t));
+
 	if (view->descriptors != NULL) {
 		ut_free(view->descriptors);
 	}

=== modified file 'Percona-Server/storage/innobase/row/row0sel.cc'
--- Percona-Server/storage/innobase/row/row0sel.cc	2013-05-24 09:42:22 +0000
+++ Percona-Server/storage/innobase/row/row0sel.cc	2013-05-27 12:16:38 +0000
@@ -5205,9 +5205,9 @@
 		if (trx->isolation_level >= TRX_ISO_REPEATABLE_READ
 		    && !trx->read_view) {
 
-			trx->read_view = read_view_open_now(trx->id,
-							    trx->prebuilt_view);
-
+			trx->read_view =
+				read_view_open_now(trx->id,
+						   trx->prebuilt_view);
 			trx->global_read_view = trx->read_view;
 		}
 	}

=== modified file 'Percona-Server/storage/innobase/srv/srv0srv.cc'
--- Percona-Server/storage/innobase/srv/srv0srv.cc	2013-05-26 10:50:45 +0000
+++ Percona-Server/storage/innobase/srv/srv0srv.cc	2013-05-27 12:16:38 +0000
@@ -410,6 +410,20 @@
 UNIV_INTERN ulint	srv_truncated_status_writes	= 0;
 UNIV_INTERN ulint	srv_available_undo_logs         = 0;
 
+/* Ensure status variables are on separate cache lines */
+
+#define CACHE_LINE_SIZE 64
+#define CACHE_ALIGNED __attribute__ ((aligned (CACHE_LINE_SIZE)))
+
+UNIV_INTERN byte
+counters_pad_start[CACHE_LINE_SIZE] __attribute__((unused)) = {0};
+
+UNIV_INTERN ulint		srv_read_views_memory CACHE_ALIGNED	= 0;
+UNIV_INTERN ulint		srv_descriptors_memory CACHE_ALIGNED	= 0;
+
+UNIV_INTERN byte
+counters_pad_end[CACHE_LINE_SIZE] __attribute__((unused)) = {0};
+
 /* Set the following to 0 if you want InnoDB to write messages on
 stderr on startup/shutdown. */
 UNIV_INTERN ibool	srv_print_verbose_log		= TRUE;
@@ -1288,6 +1302,11 @@
 			"; in additional pool allocated " ULINTPF "\n",
 			ut_total_allocated_memory,
 			mem_pool_get_reserved(mem_comm_pool));
+
+	fprintf(file,
+		"Total memory allocated by read views " ULINTPF "\n",
+		os_atomic_increment_lint(&srv_read_views_memory, 0));
+
 	/* Calculate reserved memories */
 	if (btr_search_sys && btr_search_sys->hash_index->heap) {
 		btr_search_sys_subtotal
@@ -1383,6 +1402,15 @@
 	fprintf(file, "%lu read views open inside InnoDB\n",
 		UT_LIST_GET_LEN(trx_sys->view_list));
 
+	fprintf(file, "%lu RW transactions active inside InnoDB\n",
+		UT_LIST_GET_LEN(trx_sys->rw_trx_list));
+
+	fprintf(file, "%lu RO transactions active inside InnoDB\n",
+		UT_LIST_GET_LEN(trx_sys->ro_trx_list));
+
+	fprintf(file, "%lu out of %lu descriptors used\n",
+		trx_sys->descr_n_used, trx_sys->descr_n_max);
+
 	if (UT_LIST_GET_LEN(trx_sys->view_list)) {
 		read_view_t*	view = UT_LIST_GET_LAST(trx_sys->view_list);
 
@@ -1714,6 +1742,10 @@
 		srv_truncated_status_writes;
 
 	export_vars.innodb_available_undo_logs = srv_available_undo_logs;
+	export_vars.innodb_read_views_memory
+		= os_atomic_increment_lint(&srv_read_views_memory, 0);
+	export_vars.innodb_descriptors_memory
+		= os_atomic_increment_lint(&srv_descriptors_memory, 0);
 
 #ifdef UNIV_DEBUG
 	rw_lock_s_lock(&purge_sys->latch);

=== modified file 'Percona-Server/storage/innobase/trx/trx0sys.cc'
--- Percona-Server/storage/innobase/trx/trx0sys.cc	2013-05-24 09:42:22 +0000
+++ Percona-Server/storage/innobase/trx/trx0sys.cc	2013-05-27 12:16:38 +0000
@@ -512,6 +512,8 @@
 			  TRX_DESCR_ARRAY_INITIAL_SIZE));
 	trx_sys->descr_n_max = TRX_DESCR_ARRAY_INITIAL_SIZE;
 	trx_sys->descr_n_used = 0;
+	srv_descriptors_memory = TRX_DESCR_ARRAY_INITIAL_SIZE *
+		sizeof(trx_id_t);
 
 	sys_header = trx_sysf_get(&mtr);
 

=== modified file 'Percona-Server/storage/innobase/trx/trx0trx.cc'
--- Percona-Server/storage/innobase/trx/trx0trx.cc	2013-05-24 09:42:22 +0000
+++ Percona-Server/storage/innobase/trx/trx0trx.cc	2013-05-27 12:16:38 +0000
@@ -134,6 +134,7 @@
 				   n_max * sizeof(trx_id_t)));
 
 		trx_sys->descr_n_max = n_max;
+		srv_descriptors_memory = n_max * sizeof(trx_id_t);
 	}
 
 	descr = trx_sys->descriptors + n_used - 1;

=== modified file 'UDF/configure.ac'
--- UDF/configure.ac	2013-05-27 12:16:36 +0000
+++ UDF/configure.ac	2013-05-27 12:16:38 +0000
@@ -1,4 +1,5 @@
 AC_INIT([maatkit-udf], [0.1], [http://code.google.com/p/maatkit/issues/list])
+m4_ifdef([AM_PROG_AR], [AM_PROG_AR])
 AM_INIT_AUTOMAKE([foreign -Wall -Werror])
 LT INIT
 AC_PROG_LIBTOOL

=== modified file 'build/build-binary.sh'
--- build/build-binary.sh	2013-05-13 04:25:56 +0000
+++ build/build-binary.sh	2013-05-27 12:16:38 +0000
@@ -148,6 +148,7 @@
         -DWITH_EMBEDDED_SERVER=OFF \
         -DFEATURE_SET=community \
         -DENABLE_DTRACE=OFF \
+        -DWITH_SSL=system \
         -DCMAKE_INSTALL_PREFIX="/usr/local/$PRODUCT_FULL" \
         -DMYSQL_DATADIR="/usr/local/$PRODUCT_FULL/data" \
         -DMYSQL_SERVER_SUFFIX="-$PERCONA_SERVER_VERSION" \

=== removed directory 'build/debian/additions'
=== removed file 'build/debian/additions/debian-start'
--- build/debian/additions/debian-start	2013-05-27 12:16:36 +0000
+++ build/debian/additions/debian-start	1970-01-01 00:00:00 +0000
@@ -1,32 +0,0 @@
-#!/bin/bash
-#
-# This script is executed by "/etc/init.d/mysql" on every (re)start.
-# 
-# Changes to this file will be preserved when updating the Debian package.
-#
-
-PERCONA_PREFIX=/usr
-source "${PERCONA_PREFIX}"/share/mysql/debian-start.inc.sh
-
-MYSQL="${PERCONA_PREFIX}/bin/mysql --defaults-file=/etc/mysql/debian.cnf"
-MYADMIN="${PERCONA_PREFIX}/bin/mysqladmin --defaults-file=/etc/mysql/debian.cnf"
-MYUPGRADE="${PERCONA_PREFIX}/bin/mysql_upgrade --defaults-extra-file=/etc/mysql/debian.cnf"
-MYCHECK="${PERCONA_PREFIX}/bin/mysqlcheck --defaults-file=/etc/mysql/debian.cnf"
-MYCHECK_SUBJECT="WARNING: mysqlcheck has found corrupt tables"
-MYCHECK_PARAMS="--all-databases --fast --silent"
-MYCHECK_RCPT="root"
-
-# The following commands should be run when the server is up but in background
-# where they do not block the server start and in one shell instance so that
-# they run sequentially. They are supposed not to echo anything to stdout.
-# If you want to disable the check for crashed tables comment
-# "check_for_crashed_tables" out.  
-# (There may be no output to stdout inside the background process!)
-echo "Checking for corrupt, not cleanly closed and upgrade needing tables."
-(
-  upgrade_system_tables_if_necessary;
-  check_root_accounts;
-  check_for_crashed_tables;
-) >&2 &
-
-exit 0

=== removed file 'build/debian/additions/debian-start.inc.sh'
--- build/debian/additions/debian-start.inc.sh	2013-05-27 12:16:36 +0000
+++ build/debian/additions/debian-start.inc.sh	1970-01-01 00:00:00 +0000
@@ -1,72 +0,0 @@
-#!/bin/bash
-#
-# This file is included by /etc/mysql/debian-start
-#
-
-## Check all unclosed tables.
-# - Requires the server to be up.
-# - Is supposed to run silently in background. 
-function check_for_crashed_tables() {
-  set -e
-  set -u
-
-  # But do it in the background to not stall the boot process.
-  logger -p daemon.info -i -t$0 "Triggering myisam-recover for all MyISAM tables"
-
-  # Checking for $? is unreliable so the size of the output is checked.
-  # Some table handlers like HEAP do not support CHECK TABLE.
-  tempfile=`tempfile`
-  # We have to use xargs in this case, because a for loop barfs on the 
-  # spaces in the thing to be looped over. 
-  LC_ALL=C $MYSQL --skip-column-names --batch -e  '
-      select concat("select count(*) into @discard from `",
-                    TABLE_SCHEMA, "`.`", TABLE_NAME, "`") 
-      from information_schema.TABLES where ENGINE="MyISAM"' | \
-    xargs -i $MYSQL --skip-column-names --silent --batch \
-                    --force -e "{}" >$tempfile 
-  if [ -s $tempfile ]; then
-    (
-      /bin/echo -e "\n" \
-        "Improperly closed tables are also reported if clients are accessing\n" \
- 	"the tables *now*. A list of current connections is below.\n";
-       $MYADMIN processlist status
-    ) >> $tempfile
-    # Check for presence as a dependency on mailx would require an MTA.
-    if [ -x /usr/bin/mailx ]; then 
-      mailx -e -s"$MYCHECK_SUBJECT" $MYCHECK_RCPT < $tempfile 
-    fi
-    (echo "$MYCHECK_SUBJECT"; cat $tempfile) | logger -p daemon.warn -i -t$0
-  fi
-  rm $tempfile
-}
-
-## Check for tables needing an upgrade.
-# - Requires the server to be up.
-# - Is supposed to run silently in background. 
-function upgrade_system_tables_if_necessary() {
-  set -e
-  set -u
-
-  logger -p daemon.info -i -t$0 "Upgrading MySQL tables if necessary."
-
-  # Filter all "duplicate column", "duplicate key" and "unknown column"
-  # errors as the script is designed to be idempotent.
-  LC_ALL=C $MYUPGRADE \
-    2>&1 \
-    | egrep -v '^(1|@had|ERROR (1054|1060|1061))' \
-    | logger -p daemon.warn -i -t$0
-}
-
-## Check for the presence of both, root accounts with and without password.
-# This might have been caused by a bug related to mysql_install_db (#418672).
-function check_root_accounts() {
-  set -e
-  set -u
-  
-  logger -p daemon.info -i -t$0 "Checking for insecure root accounts."
-
-  ret=$( echo "SELECT count(*) FROM mysql.user WHERE user='root' and password='';" | $MYSQL --skip-column-names )
-  if [ "$ret" -ne "0" ]; then
-    logger -p daemon.warn -i -t$0 "WARNING: mysql.user contains $ret root accounts without password!"
-  fi
-}

=== removed file 'build/debian/compat'
--- build/debian/compat	2013-05-27 12:16:36 +0000
+++ build/debian/compat	1970-01-01 00:00:00 +0000
@@ -1,1 +0,0 @@
-7

=== modified file 'build/debian/rules'
--- build/debian/rules	2013-05-13 04:25:56 +0000
+++ build/debian/rules	2013-05-27 12:16:38 +0000
@@ -28,6 +28,7 @@
 	$(CMAKE) ../ -DBUILD_CONFIG=mysql_release \
 	   -DINSTALL_LAYOUT=DEB \
 	   -DCMAKE_BUILD_TYPE=Debug \
+	   -DWITH_SSL=system \
 	   -DWITH_EMBEDDED_SERVER=OFF \
 	   -DMYSQL_UNIX_ADDR="/var/run/mysqld/mysqld.sock" \
 	   -DFEATURE_SET=$(feature_set) \
@@ -40,13 +41,14 @@
 	cd release && $(CMAKE) ../ -DBUILD_CONFIG=mysql_release \
 	   -DINSTALL_LAYOUT=DEB \
 	    -DCMAKE_BUILD_TYPE=RelWithDebInfo \
+	    -DWITH_SSL=system \
 	    -DWITH_EMBEDDED_SERVER=OFF \
 	    -DMYSQL_UNIX_ADDR="/var/run/mysqld/mysqld.sock" \
 	    -DFEATURE_SET=$(feature_set) \
 	    -DCOMPILATION_COMMENT=$(compilation_comment_release) \
 	    -DMYSQL_SERVER_SUFFIX=$(server_suffix) \
 	    -DWITH_PAM=ON
-	
+
 override_dh_auto_build:
 ifeq ($(SKIP_DEBUG_BINARY),)
 	cd debug && make $(MAKE_JFLAG)

=== modified file 'build/percona-server.spec'
--- build/percona-server.spec	2013-05-26 10:55:00 +0000
+++ build/percona-server.spec	2013-05-27 12:16:38 +0000
@@ -25,7 +25,7 @@
 %define mysql_vendor            Oracle and/or its affiliates
 %define percona_server_vendor	Percona, Inc
 
-%define mysql_version   5.6.10
+%define mysql_version   5.6.11
 %define redhatversion %(lsb_release -rs | awk -F. '{ print $1}')
 %define majorversion 60
 %define minorversion 2
@@ -236,7 +236,7 @@
 Packager:       Percona MySQL Development Team <mysqldev@percona.com>
 Vendor:         %{percona_server_vendor}
 Provides:       mysql-server
-BuildRequires:  %{distro_buildreq}
+BuildRequires:  %{distro_buildreq} pam-devel
 
 # Think about what you use here since the first step is to
 # run a rm -rf
@@ -404,6 +404,7 @@
            -DCMAKE_BUILD_TYPE=Debug \
            -DENABLE_DTRACE=OFF \
            -DWITH_EMBEDDED_SERVER=OFF \
+           -DWITH_SSL=system \
            -DMYSQL_UNIX_ADDR="/var/lib/mysql/mysql.sock" \
            -DFEATURE_SET="%{feature_set}" \
            -DCOMPILATION_COMMENT="%{compilation_comment_debug}" \
@@ -421,6 +422,7 @@
            -DCMAKE_BUILD_TYPE=RelWithDebInfo \
            -DENABLE_DTRACE=OFF \
            -DWITH_EMBEDDED_SERVER=OFF \
+           -DWITH_SSL=system \
            -DMYSQL_UNIX_ADDR="/var/lib/mysql/mysql.sock" \
            -DFEATURE_SET="%{feature_set}" \
            -DCOMPILATION_COMMENT="%{compilation_comment_release}" \
@@ -712,6 +714,12 @@
 NEW_VERSION=%{mysql_version}-%{release}
 STATUS_FILE=$mysql_datadir/RPM_UPGRADE_MARKER
 
+if [ -f $STATUS_FILE ] ; then
+	SERVER_TO_START=`grep '^SERVER_TO_START=' $STATUS_FILE | cut -c17-`
+else
+	SERVER_TO_START=''
+fi
+
 if [ $1 -eq 1 ]; then
 # ----------------------------------------------------------------------
 # Create a MySQL user and group. Do not report any problems if it already
@@ -727,11 +735,6 @@
 # Create data directory if needed, check whether upgrade or install
 # ----------------------------------------------------------------------
 if [ ! -d $mysql_datadir ] ; then mkdir -m 755 $mysql_datadir; fi
-if [ -f $STATUS_FILE ] ; then
-	SERVER_TO_START=`grep '^SERVER_TO_START=' $STATUS_FILE | cut -c17-`
-else
-	SERVER_TO_START=''
-fi
 # echo "Analyzed: SERVER_TO_START=$SERVER_TO_START"
 if [ ! -d $mysql_datadir/mysql ] ; then
 	mkdir $mysql_datadir/mysql;
@@ -743,8 +746,13 @@
 if [ ! -d $mysql_datadir/test ]; then 
         mkdir $mysql_datadir/test; 
 fi
+
+# ----------------------------------------------------------------------
+# Initiate databases if needed
+# ----------------------------------------------------------------------
 %{_bindir}/mysql_install_db --rpm --user=%{mysqld_user}
 fi 
+
 # ----------------------------------------------------------------------
 # Make MySQL start/shutdown automatically when the machine does it.
 # ----------------------------------------------------------------------
@@ -759,9 +767,6 @@
 fi
 
 # ----------------------------------------------------------------------
-# Initiate databases if needed
-# ----------------------------------------------------------------------
-# ----------------------------------------------------------------------
 # Upgrade databases if needed would go here - but it cannot be automated yet
 # ----------------------------------------------------------------------
 

=== modified file 'build/percona-shared-compat.spec'
--- build/percona-shared-compat.spec	2013-03-05 16:39:46 +0000
+++ build/percona-shared-compat.spec	2013-05-27 12:16:38 +0000
@@ -28,7 +28,7 @@
 #
 # Change this to match the version of the shared libs you want to include
 #
-%define version55 5.5.29
+%define version55 5.5.30
 %define version51 5.1.66
 %define version50 5.0.91
 %define version41 4.1.22

=== removed directory 'build/rpm'
=== removed file 'build/rpm/mysql-dubious-exports.patch'
--- build/rpm/mysql-dubious-exports.patch	2013-04-15 07:19:08 +0000
+++ build/rpm/mysql-dubious-exports.patch	1970-01-01 00:00:00 +0000
@@ -1,127 +0,0 @@
-=== modified file 'Percona-Server/client/mysqladmin.cc'
---- Percona-Server/client/mysqladmin.cc	2013-02-12 07:47:19 +0000
-+++ Percona-Server/client/mysqladmin.cc	2013-04-15 07:08:10 +0000
-@@ -23,6 +23,7 @@
- #include <my_pthread.h>				/* because of signal()	*/
- #include <sys/stat.h>
- #include <mysql.h>
-+#include <password.h>       /* my_make_scrambled_password_323, my_make_scrambled_password */
- #include <sql_common.h>
- #include <welcome_copyright_notice.h>           /* ORACLE_WELCOME_COPYRIGHT_NOTICE */
- #include <mysqld_error.h>                       /* to check server error codes */
-@@ -1045,9 +1046,9 @@
-           we will give one more try with old format.
-         */
-         if (old)
--          make_scrambled_password_323(crypted_pw, typed_password);
-+          my_make_scrambled_password_323(crypted_pw, typed_password, strlen(typed_password));
-         else
--          make_scrambled_password(crypted_pw, typed_password);
-+          my_make_scrambled_password_sha1(crypted_pw, typed_password, strlen(typed_password));
-       }
-       else
- 	crypted_pw[0]=0;			/* No password */
-
-=== modified file 'Percona-Server/include/errmsg.h'
---- Percona-Server/include/errmsg.h	2013-04-15 06:52:16 +0000
-+++ Percona-Server/include/errmsg.h	2013-04-15 07:08:31 +0000
-@@ -25,6 +25,7 @@
- void	init_client_errs(void);
- void	finish_client_errs(void);
- extern const char *client_errors[];	/* Error messages */
-+extern const char **mysql_client_errors;	/* Error messages */
- #ifdef	__cplusplus
- }
- #endif
-
-=== modified file 'Percona-Server/include/my_sys.h'
---- Percona-Server/include/my_sys.h	2013-03-05 12:46:43 +0000
-+++ Percona-Server/include/my_sys.h	2013-04-15 07:08:10 +0000
-@@ -260,6 +260,7 @@
- 
- /* charsets */
- #define MY_ALL_CHARSETS_SIZE 2048
-+#define default_charset_info mysql_default_charset_info  /* namespace sanity */
- extern MYSQL_PLUGIN_IMPORT CHARSET_INFO *default_charset_info;
- extern MYSQL_PLUGIN_IMPORT CHARSET_INFO *all_charsets[MY_ALL_CHARSETS_SIZE];
- extern CHARSET_INFO compiled_charsets[];
-
-=== modified file 'Percona-Server/include/mysql.h.pp'
---- Percona-Server/include/mysql.h.pp	2013-02-12 07:47:19 +0000
-+++ Percona-Server/include/mysql.h.pp	2013-04-15 07:08:10 +0000
-@@ -90,7 +90,7 @@
- void my_net_local_init(NET *net);
- void net_end(NET *net);
- void net_clear(NET *net, my_bool check_buffer);
--my_bool net_realloc(NET *net, size_t length);
-+my_bool mysql_net_realloc(NET *net, size_t length);
- my_bool net_flush(NET *net);
- my_bool my_net_write(NET *net,const unsigned char *packet, size_t len);
- my_bool net_write_command(NET *net,unsigned char command,
-@@ -129,13 +129,11 @@
- double my_rnd(struct rand_struct *);
- void create_random_string(char *to, unsigned int length, struct rand_struct *rand_st);
- void hash_password(unsigned long *to, const char *password, unsigned int password_len);
--void make_scrambled_password_323(char *to, const char *password);
- void scramble_323(char *to, const char *message, const char *password);
- my_bool check_scramble_323(const unsigned char *reply, const char *message,
-                            unsigned long *salt);
- void get_salt_from_password_323(unsigned long *res, const char *password);
- void make_password_from_salt_323(char *to, const unsigned long *salt);
--void make_scrambled_password(char *to, const char *password);
- void scramble(char *to, const char *message, const char *password);
- my_bool check_scramble(const unsigned char *reply, const char *message,
-                        const unsigned char *hash_stage2);
-
-=== modified file 'Percona-Server/include/mysql_com.h'
---- Percona-Server/include/mysql_com.h	2013-03-05 12:46:43 +0000
-+++ Percona-Server/include/mysql_com.h	2013-04-15 07:08:10 +0000
-@@ -482,6 +482,7 @@
- void my_net_local_init(NET *net);
- void net_end(NET *net);
- void net_clear(NET *net, my_bool check_buffer);
-+#define net_realloc mysql_net_realloc    /* namespace sanity */
- my_bool net_realloc(NET *net, size_t length);
- my_bool	net_flush(NET *net);
- my_bool	my_net_write(NET *net,const unsigned char *packet, size_t len);
-@@ -559,14 +560,12 @@
- void create_random_string(char *to, unsigned int length, struct rand_struct *rand_st);
- 
- void hash_password(unsigned long *to, const char *password, unsigned int password_len);
--void make_scrambled_password_323(char *to, const char *password);
- void scramble_323(char *to, const char *message, const char *password);
- my_bool check_scramble_323(const unsigned char *reply, const char *message,
-                            unsigned long *salt);
- void get_salt_from_password_323(unsigned long *res, const char *password);
- void make_password_from_salt_323(char *to, const unsigned long *salt);
- 
--void make_scrambled_password(char *to, const char *password);
- void scramble(char *to, const char *message, const char *password);
- my_bool check_scramble(const unsigned char *reply, const char *message,
-                        const unsigned char *hash_stage2);
-
-=== modified file 'Percona-Server/libmysql/CMakeLists.txt'
---- Percona-Server/libmysql/CMakeLists.txt	2013-04-15 06:52:16 +0000
-+++ Percona-Server/libmysql/CMakeLists.txt	2013-04-15 07:08:10 +0000
-@@ -221,7 +221,7 @@
-       SET(libmysql_link_flags
-         "${libmysql_link_flags} ${LINK_FLAG_NO_UNDEFINED}")
-       SET(libmysql_link_flags
--        "${libmysql_link_flags} -Wl,--version-script=libmysql.ver")
-+        "${libmysql_link_flags} -Wl,--version-script=${CMAKE_CURRENT_SOURCE_DIR}/libmysql.map")
-       SET_TARGET_PROPERTIES(libmysql
-         PROPERTIES LINK_FLAGS "${libmysql_link_flags}")
-     ENDIF() 
-
-=== modified file 'Percona-Server/libmysql/errmsg.c'
---- Percona-Server/libmysql/errmsg.c	2013-04-15 06:52:16 +0000
-+++ Percona-Server/libmysql/errmsg.c	2013-04-15 07:08:10 +0000
-@@ -23,6 +23,7 @@
- #include <my_sys.h>
- #include "errmsg.h"
- 
-+const char **mysql_client_errors = client_errors;
- const char *client_errors[]=
- {
-   "Unknown MySQL error",
-

=== added directory 'doc/source/_static'
=== modified file 'doc/source/conf.py'
--- doc/source/conf.py	2013-05-12 09:13:00 +0000
+++ doc/source/conf.py	2013-05-27 12:16:38 +0000
@@ -54,7 +54,7 @@
 # The short X.Y version.
 version = '5.6'
 # The full version, including alpha/beta/rc tags.
-release = '5.6.10-60.2'
+release = '5.6.11-60.3'
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.

=== removed file 'doc/source/development.rst'
--- doc/source/development.rst	2013-05-27 12:16:36 +0000
+++ doc/source/development.rst	1970-01-01 00:00:00 +0000
@@ -1,266 +0,0 @@
-=============================
-Development of Percona Server
-=============================
-
-|Percona Server| is an open source project to produce a distribution
-of the |MySQL| server with improved performance, scalability and
-diagnostics.
-
-Submitting Changes
-==================
-This process is very much modeled on what is being used by
-`Drizzle <http://www.drizzle.org>`_. The Drizzle project went through
-several iterations and refinements before settling on this process. It
-has been found to both keep trunk in a constant state of stability
-(allowing for a release at any time) and minimizing wasted time by
-developers due to broken code from somebody else interfering with their day.
-
-You should also be familiar with our |Jenkins| setup.
-
-Overview
-~~~~~~~~
-At Percona we use `Bazaar <http://www.bazaar-vcs.org>`_ for source
-control and `launchpad <http://www.launchpad.net>`_ for both
-code hosting and release management.
-
-Changes to our software projects could be because of a new feature
-(blueprint) or fixing a bug (bug). Projects such as refactoring could
-be classed as a blueprint or a bug depending on the scope of the work.
-
-Blueprints and bugs are targeted to specific milestones (releases). A
-milestone is part of a series - e.g. 1.6 is a series in Percona
-XtraBackup and 1.6.1, 1.6.2 and 1.6.3 are milestones in the 1.6 series.
-
-Code is proposed for merging in the form of merge requests on launchpad.
-
-Some software (such as Percona Xtrabackup) we maintain both a
-development branch and a stable branch. For example: Xtrabackup 1.6 is
-the current stable series, and changes that should make it into bugfix
-releases of 1.6 should be proposed for the 1.6 tree. However, most new
-features or more invasive (or smaller) bug fixes should be targeted to
-the next release, currently 1.7. If submitting something to 1.6, you
-should also propose a branch that has these changes merged to the
-development release (1.7). This way somebody else doesn't have to
-attempt to merge your code and we get to run any extra tests that may
-be in the tree (and check compatibility with all platforms).
-
-For Percona Server, we have two current bzr branches on which
-development occurs: 5.1 and 5.5. As Percona Server is not a
-traditional project, instead being a set of patches against an
-existing product, these two branches are not related. That is, we do
-not merge from one to the other. To have your changes in both, you
-must propose two branches: one for 5.1 version of patch and one for
-5.5.
-
-Making a change to a project
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-In this case we're going to use percona-xtrabackup as an
-example. workflow is similar for Percona Server, but patch will need
-to be modified both in 5.1 and 5.5 branches.
-
-* ``bzr branch lp:percona-xtrabackup featureX`` (where 'featureX' is a
-  sensible name for the task at hand)
-* (developer makes changes in featureX, testing locally)
-* Developer pushes to ``lp:~username/percona-xtrabackup/featureX``
-* When the developer thinks the branch may be ready to be merged, they
-  will run the branch through param build.
-* If there are any build or test failures, developer fixes them (in
-  the case of failing tests in trunk... no more tests should
-  fail. Eventually all tests will pass in trunk)
-* Developer can then submit a merge proposal to lp:percona-xtrabackup,
-  referencing URL for the param build showing that build and test
-  passes
-* Code undergoes review
-* Once code is accepted, it can be merged (see other section)
-
-If the change also applies to a stable release (e.g. 1.6) then changes
-should be made on a branch of 1.6 and merged to a branch of trunk. In
-this case there should be two branches run through param build and two
-merge proposals (one for 1.6 and one with the changes merged to
-trunk). This prevents somebody else having to guess how to merge your
-changes.
-
-Merging approved branches
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Before code hits trunk, it goes through a "staging" branch, where some
-extra tests may be run (e.g. valgrind) along with testing that all
-branches behave well together (build and test) before pushing to
-trunk.
-
-To ensure quality, **DO NOT push directly to trunk!** everything must go through adequate testing first. This ensures that at any point trunk is in a releasable state.
-
-Please note that **ALL changes must go through staging first** This is to ensure that several approved merge requests do not interact badly with each
-other.
-
-* Merge captain (for lack of a better term for the person merging
-  approved code into trunk) may collate several approved branches that
-  have individually passed param-build as run by the original
-  developers.
-
-  * Workflow would look something like this:
-
-    * ``bzr branch lp:percona-xtrabackup staging``
-    * ``bzr merge lp:~user/percona-xtrabackup/featureX``
-    * ``bzr commit -m "merge feature X"``
-    * ``bzr merge lp:~user/percona-xtrabackup/featureY``
-    * ``bzr commit -m "merge feature Y"``
-    * ``bzr push --overwrite lp:percona-xtrabackup/staging'``
-    * Run ``lp:percona-xtrabackup/staging`` through param build (in
-      future, we'll likely have a Jenkins job specifically for this)
-    * If build succeeds, ``bzr push lp:percona-server`` (and branches
-      will be automatically marked as 'merged'.. although bug reports
-      will need to be manually changed to 'Fix Released')
-    * If build or test fails, attempt to find which branch may be the
-      cause, and repeat process but without that branch.
-
-* Any failing branch will be set to 'Work in Progress' with a 'Needs
-  fixing' review with the URL of the build in jenkins where the
-  failure occured. This will allow developers to fix their code.
-
-Resubmitting a merge request
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In the event of a merge request being marked as 'Work In Progress' due
-to build/test failures when merging, the developer should fix up the
-branch, run through param build and then 'Resubmit' the merge
-proposal.
-
-There is a link on launchpad to resubmit the merge proposal, this means it appears in the list of merge requests to review again rather than off in the "work in progress" section.
-
-
-Percona Server
-~~~~~~~~~~~~~~
-
-The same process for Percona Server, but we have different branches (and merge requests) for 5.1 and 5.5 series.
-
-Upgrading MySQL base version
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-* Same process as other modifications.
-* create local branch
-* make changes
-* param build
-* merge request
-
-We will need some human processes to ensure that we do not merge extra
-things during the time when base MySQL version is being updated to
-avoid making life harder for the person doing the update.
-
-
-
-Making a release
-================
-
-* ``bzr branch lp:project release-project-VERSION``
-* build packages
-* perform any final tests (as we transition, this will already have
-  been done by jenkins)
-* ``bzr tag project-version``
-* merge request back to lp:project including the tag (TODO: write
-  exact bzr commands for this)
-
-This way anybody can easily check out an old release by just using bzr
-to branch the specific tag.
-
-Jenkins
-=======
-
-Our Jenkins instance uses a mixture of VMs on physical hosts that
-Percona runs and Virtual Machines in Amazon EC2 that are launched on
-demand.
-
-Basic Concepts
-~~~~~~~~~~~~~~
-We have some jobs that are activated based on source control changes
-(new commits in a bzr repository). We have some that are "param
-build" - that is, a user specifies parameters for the build (e.g. the
-bzr tree). A param-build allows developers to ensure their branch
-compiles and passes tests on all supported platforms *before*
-submitting a merge request. This helps us maintain the quality of the
-main bzr branches and not block other developers work.
-
-Jenkins is a Master/Slave system and the jenkins master schedules the
-builds across available machines (and may launch new VMs in EC2 to
-meet demand).
-
-Most of our jobs are what's known as "matrix builds". That is, a job
-that will be run with several different configurations of the project
-(e.g. release, debug) across several platforms (e.g. on a host
-matching the label of "centos5-32" and a host matching label of
-"ubuntu-natty-32bit"). Matrix builds show a table of lights to
-indicate their status. Clicking "build now" on one of these queues up
-builds for all of the combinations.
-
-We have some integration of our regression test suites (currently
-xtrabackup) with Jenkins ability to parse JUnitXML, presenting a nice
-user interface to any test failures.
-
-Because building some projects is non-trivial, in order to not
-duplicate the list of compile instructions for each job, we use
-template builds. You'll see builds such as percona-xtrabackup-template
-which is a disabled job, but all current xtrabackup jobs point to it
-for the commands to build and run the test suite.
-
-Percona Xtrabackup
-~~~~~~~~~~~~~~~~~~
-
-`<http://jenkins.percona.com/view/Percona%20Xtrabackup/>`_
-
-We currently build both xtrabackup 1.6 and xtrabackup trunk (will become 1.7).
-
-There are param-builds for 1.6 and trunk too. These should be run for each merge request (and before any collection of merged branches is pushed to trunk)
-
-Percona Server
-~~~~~~~~~~~~~~
-
-We have separate jobs for Percona Server 5.1 and Percona Server 5.5 due to the different build systems that MySQL 5.1 and 5.5 use.
-
-The ``mysql-test-run.pl`` test suite is integrated with Jenkins through `subunit <http://launchpad.net/subunit>`_ and ``subunit2junitxml`` allowing us to easily see which tests passed/failed on any particular test run.
-
-Percona Server 5.1
-------------------
-
-`<http://jenkins.percona.com/view/Percona%20Server%205.1/>`_
-
-We have trunk and param jobs. We also have a valgrind job that will run after a successful trunk build.
-
-Percona Server 5.5
-------------------
-
-`<http://jenkins.percona.com/view/Percona%20Server%205.5/>`_
-
-Similar to 5.1, but for PS5.5 instead.
-
-MySQL Builds
-~~~~~~~~~~~~
-
-`<http://jenkins.percona.com/view/MySQL/>`_
-
-I've set up a few jobs in Jenkins that should help us predict the future
-for Percona Server. Namely, if upstream MySQL may cause us any problems.
-
-I wanted to see if some test failures were possibly upstream, so I set
-up two jobs:
-
-`<http://jenkins.percona.com/view/MySQL/job/mysql-5.1-url-param/>`_
-`<http://jenkins.percona.com/view/MySQL/job/mysql-5.5-url-param/>`_
-
-both of which ask for a URL to a MySQL source tarball and then do a full
-build and test across the platforms we have in jenkins.
-
-But my next thought was that we could try and do this *before* the
-source tarballs come out - hopefully then being able to have MySQL
-release source tarballs that do in fact pass build and test everywhere
-where we're wanting to support Percona Server.
-
-`<http://jenkins.percona.com/view/MySQL/job/mysql-5.1-trunk/>`_
-`<http://jenkins.percona.com/view/MySQL/job/mysql-5.5-trunk/>`_
-
-are scheduled to just try once per week (we can change the frequency if
-we want to) to build and test from the MySQL bzr trees.
-
-I also have a valgrind build (same configuration as for Percona Server) to help us see if there's any new valgrind warnings (or missed suppressions).
-
-I'm hoping that these jobs will help us catch any future problems before
-they become our problem. (e.g. we can easily see that the sporadic test failures we see in Percona Server are actually in upstream MySQL).

=== removed file 'doc/source/diagnostics/user_stats.rst'
--- doc/source/diagnostics/user_stats.rst	2013-05-27 12:16:36 +0000
+++ doc/source/diagnostics/user_stats.rst	1970-01-01 00:00:00 +0000
@@ -1,242 +0,0 @@
-.. _user_stats:
-
-=================
- User Statistics
-=================
-
-This feature adds several ``INFORMATION_SCHEMA`` tables, several commands, and the userstat variable. The tables and commands can be used to understand the server activity better and identify the source of the load.
-
-The functionality is disabled by default, and must be enabled by setting ``userstat`` to ``ON``. It works by keeping several hash tables in memory. To avoid contention over global mutexes, each connection has its own local statistics, which are occasionally merged into the global statistics, and the local statistics are then reset to 0.
-
-
-Version Specific Information
-============================
-
-  * :rn:`5.5.10-20.1`:
-     Renamed variable :variable:`userstat_running` to :variable:`userstat`.
-
-Other Information
-=================
-
-  * Author/Origin:
-     *Google*; *Percona* added the ``INFORMATION_SCHEMA`` tables and the :variable:`userstat_running` variable.
-
-System Variables
-================
-
-.. variable:: userstat_running
-
-     :version 5.5.10-20.1: Renamed to :variable:`userstat`
-     :cli: Yes
-     :conf: Yes
-     :scope: Global
-     :dyn: Yes
-     :vartype: BOOLEAN
-     :default: OFF
-     :range: ON/OFF
-
-Enables or disables collection of statistics. The default is ``OFF``, meaning no statistics are gathered. This is to ensure that the statistics collection doesn't cause any extra load on the server unless desired.
-
-
-INFORMATION_SCHEMA Tables
-=========================
-
-.. table:: INFORMATION_SCHEMA.CLIENT_STATISTICS
-
-  :column CLIENT: The IP address or hostname from which the connection originated.
-  :column TOTAL_CONNECTIONS: The number of connections created for this client.
-  :column CONCURRENT_CONNECTIONS: The number of concurrent connections for this client.
-  :column CONNECTED_TIME: The cumulative number of seconds elapsed while there were connections from this client.
-  :column BUSY_TIME: The cumulative number of seconds there was activity on connections from this client.
-  :column CPU_TIME: The cumulative CPU time elapsed, in seconds, while servicing this client``s connections.
-  :column BYTES_RECEIVED: The number of bytes received from this client's connections.
-  :column BYTES_SENT: The number of bytes sent to this client's connections.
-  :column BINLOG_BYTES_WRITTEN:	The number of bytes written to the binary log from this client's connections.
-  :column ROWS_FETCHED: The number of rows fetched by this client's connections.
-  :column ROWS_UPDATED: The number of rows updated by this client's connections.
-  :column TABLE_ROWS_READ: The number of rows read from tables by this client's connections. (It may be different from ``ROWS_FETCHED``.)
-  :column SELECT_COMMANDS: The number of ``SELECT`` commands executed from this client's connections.
-  :column UPDATE_COMMANDS: The number of ``UPDATE`` commands executed from this client's connections.
-  :column OTHER_COMMANDS: The number of other commands executed from this client's connections.
-  :column COMMIT_TRANSACTIONS: The number of ``COMMIT`` commands issued by this client's connections.
-  :column ROLLBACK_TRANSACTIONS: The number of ``ROLLBACK`` commands issued by this client's connections.
-  :column DENIED_CONNECTIONS: The number of connections denied to this client.
-  :column LOST_CONNECTIONS: The number of this client's connections that were terminated uncleanly.
-  :column ACCESS_DENIED: The number of times this client's connections issued commands that were denied.
-  :column EMPTY_QUERIES: The number of times this client's connections sent empty queries to the server.
-
-This table holds statistics about client connections. The Percona version of the feature restricts this table's visibility to users who have the ``SUPER`` or ``PROCESS`` privilege.
-
-Example: ::
-
-  mysql> SELECT * FROM INFORMATION_SCHEMA.CLIENT_STATISTICS\G
-  *************************** 1. row ***************************
-                  CLIENT: 10.1.12.30
-       TOTAL_CONNECTIONS: 20
-  CONCURRENT_CONNECTIONS: 0
-          CONNECTED_TIME: 0
-               BUSY_TIME: 93
-                CPU_TIME: 48
-          BYTES_RECEIVED: 5031
-              BYTES_SENT: 276926
-    BINLOG_BYTES_WRITTEN: 217
-            ROWS_FETCHED: 81
-            ROWS_UPDATED: 0
-         TABLE_ROWS_READ: 52836023
-         SELECT_COMMANDS: 26
-         UPDATE_COMMANDS: 1
-          OTHER_COMMANDS: 145
-     COMMIT_TRANSACTIONS: 1
-   ROLLBACK_TRANSACTIONS: 0
-      DENIED_CONNECTIONS: 0
-        LOST_CONNECTIONS: 0
-           ACCESS_DENIED: 0
-           EMPTY_QUERIES: 0
-
-
-.. table:: INFORMATION_SCHEMA.INDEX_STATISTICS
-
-  :column TABLE_SCHEMA: The schema (database) name.
-  :column TABLE_NAME: The table name.
-  :column INDEX_NAME: The index name (as visible in ``SHOW CREATE TABLE``).
-  :column ROWS_READ: The number of rows read from this index.
-
-This table shows statistics on index usage. An older version of the feature contained a single column that had the ``TABLE_SCHEMA``, ``TABLE_NAME`` and ``INDEX_NAME`` columns concatenated together. The |Percona| version of the feature separates these into three columns. Users can see entries only for tables to which they have ``SELECT`` access.
-
-This table makes it possible to do many things that were difficult or impossible previously. For example, you can use it to find unused indexes and generate DROP commands to remove them.
-
-Example: ::
-
-  mysql> SELECT * FROM INFORMATION_SCHEMA.INDEX_STATISTICS
-     WHERE TABLE_NAME='tables_priv';
-  +--------------+-----------------------+--------------------+-----------+
-  | TABLE_SCHEMA | TABLE_NAME            | INDEX_NAME         | ROWS_READ |
-  +--------------+-----------------------+--------------------+-----------+
-  | mysql        | tables_priv           | PRIMARY            |         2 |
-  +--------------+-----------------------+--------------------+-----------+
-
-
-
-.. table:: INFORMATION_SCHEMA.TABLE_STATISTICS
-
-  :column TABLE_SCHEMA: The schema (database) name.
-  :column TABLE_NAME: The table name.
-  :column ROWS_READ: The number of rows read from the table.
-  :column ROWS_CHANGED: The number of rows changed in the table.
-  :column ROWS_CHANGED_X_INDEXES: The number of rows changed in the table, multiplied by the number of indexes changed.
-
-This table is similar in function to the ``INDEX_STATISTICS`` table.
-
-Example: ::
-
-  mysql> SELECT * FROM INFORMATION_SCHEMA.TABLE_STATISTICS
-     WHERE TABLE_NAME=``tables_priv``;
-  +--------------+-------------------------------+-----------+--------------+------------------------+
-  | TABLE_SCHEMA | TABLE_NAME                    | ROWS_READ | ROWS_CHANGED | ROWS_CHANGED_X_INDEXES |
-  +--------------+-------------------------------+-----------+--------------+------------------------+
-  | mysql        | tables_priv                   |         2 |            0 |                      0 | 
-  +--------------+-------------------------------+-----------+--------------+------------------------+
-
-
-.. table:: INFORMATION_SCHEMA.THREAD_STATISTICS
-
-  :column THREAD_ID: int(21)
-  :column TOTAL_CONNECTIONS: int(21)
-  :column CONCURRENT_CONNECTIONS: int(21)
-  :column CONNECTED_TIME: int(21)
-  :column BUSY_TIME: int(21)
-  :column CPU_TIME: int(21)
-  :column BYTES_RECEIVED: int(21)
-  :column BYTES_SENT: int(21)
-  :column BINLOG_BYTES_WRITTEN: int(21)
-  :column ROWS_FETCHED: int(21)
-  :column ROWS_UPDATED: int(21)
-  :column TABLE_ROWS_READ: int(21)
-  :column SELECT_COMMANDS: int(21)
-  :column UPDATE_COMMANDS: int(21)
-  :column OTHER_COMMANDS: int(21)
-  :column COMMIT_TRANSACTIONS: int(21)
-  :column ROLLBACK_TRANSACTIONS: int(21)
-  :column DENIED_CONNECTIONS: int(21)
-  :column LOST_CONNECTIONS: int(21)
-  :column ACCESS_DENIED: int(21)
-  :column EMPTY_QUERIES: int(21)
-
-.. table:: INFORMATION_SCHEMA.USER_STATISTICS
-
-  :column USER: The username. The value ``#mysql_system_user#`` appears when there is no username (such as for the slave SQL thread).
-  :column TOTAL_CONNECTIONS: The number of connections created for this user.
-  :column CONCURRENT_CONNECTIONS: The number of concurrent connections for this user.
-  :column CONNECTED_TIME: The cumulative number of seconds elapsed while there were connections from this user.
-  :column BUSY_TIME: The cumulative number of seconds there was activity on connections from this user.
-  :column CPU_TIME: The cumulative CPU time elapsed, in seconds, while servicing this user's connections.
-  :column BYTES_RECEIVED: The number of bytes received from this user's connections.
-  :column BYTES_SENT: The number of bytes sent to this user's connections.
-  :column BINLOG_BYTES_WRITTEN: The number of bytes written to the binary log from this user's connections.
-  :column ROWS_FETCHED: The number of rows fetched by this user's connections.
-  :column ROWS_UPDATED: The number of rows updated by this user's connections.
-  :column TABLE_ROWS_READ: The number of rows read from tables by this user's connections. (It may be different from ``ROWS_FETCHED``.)
-  :column SELECT_COMMANDS: The number of ``SELECT`` commands executed from this user's connections.
-  :column UPDATE_COMMANDS: The number of ``UPDATE`` commands executed from this user's connections.
-  :column OTHER_COMMANDS: The number of other commands executed from this user's connections.
-  :column COMMIT_TRANSACTIONS: The number of ``COMMIT`` commands issued by this user's connections.
-  :column ROLLBACK_TRANSACTIONS: The number of ``ROLLBACK`` commands issued by this user's connections.
-  :column DENIED_CONNECTIONS: The number of connections denied to this user.
-  :column LOST_CONNECTIONS: The number of this user's connections that were terminated uncleanly.
-  :column ACCESS_DENIED: The number of times this user's connections issued commands that were denied.
-  :column EMPTY_QUERIES: The number of times this user's connections sent empty queries to the server.
-
-This table contains information about user activity. The |Percona| version of the patch restricts this table's visibility to users who have the ``SUPER`` or ``PROCESS`` privilege.
-
-The table gives answers to questions such as which users cause the most load, and whether any users are being abusive. It also lets you measure how close to capacity the server may be. For example, you can use it to find out whether replication is likely to start falling behind.
-
-Example: ::
-
-  mysql> SELECT * FROM INFORMATION_SCHEMA.USER_STATISTICS\G
-  *************************** 1. row ***************************
-                    USER: root
-       TOTAL_CONNECTIONS: 5592
-  CONCURRENT_CONNECTIONS: 0
-          CONNECTED_TIME: 6844
-               BUSY_TIME: 179
-                CPU_TIME: 72
-          BYTES_RECEIVED: 603344
-              BYTES_SENT: 15663832
-    BINLOG_BYTES_WRITTEN: 217
-            ROWS_FETCHED: 9793
-            ROWS_UPDATED: 0
-         TABLE_ROWS_READ: 52836023
-         SELECT_COMMANDS: 9701
-         UPDATE_COMMANDS: 1
-          OTHER_COMMANDS: 2614
-     COMMIT_TRANSACTIONS: 1
-   ROLLBACK_TRANSACTIONS: 0
-      DENIED_CONNECTIONS: 0
-        LOST_CONNECTIONS: 0
-           ACCESS_DENIED: 0
-           EMPTY_QUERIES: 0
-
-Commands Provided
-=================
-
-  * ``FLUSH CLIENT_STATISTICS``
-
-  * ``FLUSH INDEX_STATISTICS``
-
-  * ``FLUSH TABLE_STATISTICS``
-
-  * ``FLUSH THREAD_STATISTICS``
-
-  * ``FLUSH USER_STATISTICS``
-
-These commands discard the specified type of stored statistical information.
-
-  * ``SHOW CLIENT_STATISTICS``
-  * ``SHOW INDEX_STATISTICS``
-  * ``SHOW TABLE_STATISTICS``
-  * ``SHOW THREAD_STATISTICS``
-  * ``SHOW USER_STATISTICS``
-
-These commands are another way to display the information you can get from the ``INFORMATION_SCHEMA`` tables. The commands accept ``WHERE`` clauses. They also accept but ignore ``LIKE`` clauses.
-
-

=== added file 'doc/source/flexibility/buff_read_ahead_area.rst'
--- doc/source/flexibility/buff_read_ahead_area.rst	1970-01-01 00:00:00 +0000
+++ doc/source/flexibility/buff_read_ahead_area.rst	2013-05-27 12:16:38 +0000
@@ -0,0 +1,36 @@
+.. _buff_read_ahead_area:
+
+====================================
+ Fixed Size for the Read Ahead Area
+====================================
+
+|InnoDB| dynamically calculates the size of the read-ahead area in case it has to trigger its read-ahead algorithm. When the workload involves heavy I/O operations, this size is computed so frequently that it has a non-negligeable impact on the CPU usage.
+
+This variable only depends on the size of the buffer pool set by the :variable:`innodb_buffer_pool_size` variable, and as soon as the buffer pool has a size properly greater than 1024 pages (or 16 MB), it is always 64. With this change, its value is fixed to 64, thus removing a bottleneck experienced by some users.
+
+Please note that the minimum allowed value for the |InnoDB| buffer pool is de facto set to 32 MB.
+
+This change is a port of the feature from Facebook:
+
+  *  http://bazaar.launchpad.net/~mysqlatfacebook/mysqlatfacebook/5.1/revision/3538
+
+
+Version Specific Information
+============================
+
+  * :rn:`5.5.8-20.0` :
+    Full functionality available.
+
+Other Information
+=================
+
+  * Author/Origin:
+    Facebook
+
+  * Bugs fixed:
+    :bug:`606811`
+
+Other Reading
+=============
+
+  * `BUF_READ_AHEAD_AREA Bottleneck <http://www.facebook.com/notes/mysqlfacebook/using-pmp-to-double-mysql-throughput-part-2/405092575932>`_

=== removed file 'doc/source/management/innodb_expanded_fast_index_creation.rst'
--- doc/source/management/innodb_expanded_fast_index_creation.rst	2013-05-27 12:16:36 +0000
+++ doc/source/management/innodb_expanded_fast_index_creation.rst	1970-01-01 00:00:00 +0000
@@ -1,82 +0,0 @@
-.. _expanded_innodb_fast_index_creation:
-
-============================
-Expanded Fast Index Creation
-============================
-
-Percona has implemented several changes related to |MySQL|'s fast index creation feature. This feature expands the ``ALTER TABLE`` command by adding a new clause that provides online index renaming capability, that is renaming indexes without rebuilding the whole table.
-
-Enabling Expanded Fast Index Creation
-=====================================
-
-Fast index creation was implemented in |MySQL| as a way to speed up the process of adding or dropping indexes on tables with many rows. However, cases have been found in which fast index creation creates an inconsistency between |MySQL| and |InnoDB| data dictionaries.
-
-This feature implements a session variable that enables extended fast index creation. Besides optimizing DDL directly, :variable:`expand_fast_index_creation` may also optimize index access for subsequent DML statements because using it results in much less fragmented indexes.
-
-
-:command:`mysqldump`
---------------------
-
-A new option, ``--innodb-optimize-keys``, was implemented in :command:`mysqldump`. It changes the way |InnoDB| tables are dumped, so that secondary and foreign keys are created after loading the data, thus taking advantage of fast index creation. More specifically:
-
-  * ``KEY``, ``UNIQUE KEY``, and ``CONSTRAINT`` clauses are omitted from ``CREATE TABLE`` statements corresponding to |InnoDB| tables.
-
-  * An additional ``ALTER TABLE`` is issued after dumping the data, in order to create the previously omitted keys.
-
-``ALTER TABLE``
----------------
-
-When ``ALTER TABLE`` requires a table copy, secondary keys are now dropped and recreated later, after copying the data. The following restrictions apply:
-
-  * Only non-unique keys can be involved in this optimization.
-
-  * If the table contains foreign keys, or a foreign key is being added as a part of the current ``ALTER TABLE`` statement, the optimization is disabled for all keys.
-
-``OPTIMIZE TABLE``
-------------------
-
-Internally, ``OPTIMIZE TABLE`` is mapped to ``ALTER TABLE ... ENGINE=innodb`` for |InnoDB| tables. As a consequence, it now also benefits from fast index creation, with the same restrictions as for ``ALTER TABLE``.
-
-
-Caveats
--------
-
-|InnoDB| fast index creation uses temporary files in tmpdir for all indexes being created. So make sure you have enough tmpdir space when using :variable:`expand_fast_index_creation`. It is a session variable, so you can temporarily switch it off if you are short on tmpdir space and/or don’t want this optimization to be used for a specific table. 
-
-There’s also a number of cases when this optimization is not applicable:
-  * ``UNIQUE`` indexes in ``ALTER TABLE`` are ignored to enforce uniqueness where necessary when copying the data to a temporary table;
-
-  * ``ALTER TABLE`` and ``OPTIMIZE TABLE`` always process tables containing foreign keys as if :variable:`expand_fast_index_creation` is OFF to avoid dropping keys that are part of a FOREIGN KEY constraint;
-
-  * :command:`mysqldump --innodb-optimize-keys` ignores foreign keys because |InnoDB| requires a full table rebuild on foreign key changes. So adding them back with a separate ``ALTER TABLE`` after restoring the data from a dump would actually make the restore slower;
-
-  * :command:`mysqldump --innodb-optimize-keys` ignores indexes on ``AUTO_INCREMENT`` columns, because they must be indexed, so it is impossible to temporarily drop the corresponding index;
-
-  * :command:`mysqldump --innodb-optimize-keys` ignores the first UNIQUE index on non-nullable columns when the table has no ``PRIMARY KEY`` defined, because in this case |InnoDB| picks such an index as the clustered one.
-
-Version Specific Information
-============================
-
-  * :rn:`5.6.10-60.2`
-    Variable :variable:`expand_fast_index_creation` implemented.
-    This variable is controlling whether fast index creation optimizations made by Percona are used.
-
-System Variables
-================
-
-.. variable:: expand_fast_index_creation
-
-     :cli: Yes
-     :conf: No
-     :scope: Local/Global
-     :dyn: Yes
-     :vartype: Boolean
-     :default: OFF
-     :range: ON/OFF
-
-Other Reading
-=============
-
-  * `Improved InnoDB fast index creation <http://www.mysqlperformanceblog.com/2011/11/06/improved-innodb-fast-index-creation/>`_
-  * `Thinking about running OPTIMIZE on your InnoDB Table? Stop! <http://www.mysqlperformanceblog.com/2010/12/09/thinking-about-running-optimize-on-your-innodb-table-stop/>`_
-

=== added file 'doc/source/management/innodb_fake_changes.rst'
--- doc/source/management/innodb_fake_changes.rst	1970-01-01 00:00:00 +0000
+++ doc/source/management/innodb_fake_changes.rst	2013-05-27 12:16:38 +0000
@@ -0,0 +1,70 @@
+.. _innodb_fake_changes_page:
+
+==========================
+ Support for Fake Changes
+==========================
+
+Restarting a slave server in a replication environment or setting up new slave server can cause a replication reads slower. This is happening because replication in |MySQL| is single-threaded and because it needs to read the data before it can execute the queries. The process can be speeded up by having prefetch threads to warm the server: replay statements and then rollback at commit.
+
+That makes prefetch simple but has high overhead from locking rows only to undo changes at rollback.
+
+Using this approach, support for *Fake Changes* have been implemented in order to remove the overhead and make it faster.
+
+By reading the rows for ``INSERT``, ``UPDATE`` and ``DELETE`` statements but not updating them (*Fake Changes*), the rollback is very fast as in most cases there is nothing to do.
+
+Caveats
+=======
+
+``DML`` operations **are supported**
+------------------------------------
+
+Currently only ``DML`` operations **are supported**, i.e. ``UPDATE``, ``INSERT``, ``REPLACE`` and ``DELETE`` (set deleted flag).
+
+``DDL`` operations **are not supported**
+----------------------------------------
+
+``DDL`` operations **are not supported**, i.e. ``ALTER TABLE`` and ``TRUNCATE TABLE``. Fake Changes should be disabled temporally if ``DDL`` statements are going to be executed. Otherwise, data may be lost.
+
+Explicit ``COMMIT`` will lead to an error
+-----------------------------------------
+
+From the viewpoint of transactional RDBMS, ``COMMIT`` should not be "fake" anytime. ``ROLLBACK`` must be used to terminate the fake transaction.
+
+System Variables
+================
+
+.. variable:: innodb_fake_changes
+   
+   :version 5.5.16-22.0: Introduced
+   :scope: ``GLOBAL``
+   :type: ``BOOLEAN``
+   :dyn: ``YES``
+   :default: ``FALSE``
+
+   This variable enables the *Fake Changes* feature.
+
+Implementation Details
+======================
+
+  * The fake session is used as a prefetch of the replication, it  should not affect to later replication SQL execution.
+
+  * The effective unit is each transaction. The behavior is decided at the start of the each one and never changed during the transaction
+
+  * ``INSERT`` operations doesn't use the ``INSERT BUFFER``, it always causes the reading of the page actually for the option. ``DELETE`` also doesn't use the ``INSERT BUFFER``.
+
+  * It never acquires ``X_LOCK`` from tables or records, only ``S_LOCK``.
+
+  * The auto increment values behaves as usual.
+
+  * It reserves free pages as usual.
+
+  * Existed only ``root ~ leaf`` pages, which are accessed in the ``DML`` operation.
+
+  * It will not prefetch allocate/free, split/merge, ``INODE``, ``XDES`` or other management pages. The same is for extern pages, i.e. large ``BLOB`` s).
+
+  * Foreign key constraints are checked (for causing IO), but passed always.
+
+Related Reading
+===============
+
+  * `on MySQL replication prefetching <http://dom.as/2011/12/03/replication-prefetching/>`_

=== modified file 'doc/source/management/innodb_kill_idle_trx.rst'
--- doc/source/management/innodb_kill_idle_trx.rst	2013-05-10 09:39:17 +0000
+++ doc/source/management/innodb_kill_idle_trx.rst	2013-05-27 12:16:38 +0000
@@ -4,8 +4,6 @@
  Kill Idle Transactions
 ========================
 
-**NOTE:** This feature is currently considered **BETA** quality and may not yet be suitable for use in production environments.
-
 This feature limits the age of idle |XtraDB| transactions. If a transaction is idle for more seconds than the threshold specified, it will be killed. This prevents users from blocking purge by mistake.
 
 System Variables

=== added file 'doc/source/percona-xtrabackup-logo.jpg'
Binary files doc/source/percona-xtrabackup-logo.jpg	1970-01-01 00:00:00 +0000 and doc/source/percona-xtrabackup-logo.jpg	2013-05-27 12:16:38 +0000 differ
=== added file 'doc/source/percona_xtradb.rst'
--- doc/source/percona_xtradb.rst	1970-01-01 00:00:00 +0000
+++ doc/source/percona_xtradb.rst	2013-05-27 12:16:38 +0000
@@ -0,0 +1,13 @@
+=====================================
+ The *Percona XtraDB* Storage Engine
+=====================================
+
+.. image:: percona-xtradb.png
+   :alt: Percona XtraDB
+   :align: right
+
+|Percona| |XtraDB| is an enhanced version of the |InnoDB| storage engine, designed to better scale on modern hardware, and including a variety of other features useful in high performance environments. It is fully backwards compatible, and so can be used as a drop-in replacement for standard |InnoDB|.
+
+|Percona| |XtraDB| includes all of |InnoDB| 's robust, reliable ``ACID``-compliant design and advanced ``MVCC`` architecture, and builds on that solid foundation with more features, more tunability, more metrics, and more scalability. In particular, it is designed to scale better on many cores, to use memory more efficiently, and to be more convenient and useful. The new features are especially designed to alleviate some of |InnoDB| 's limitations. We choose features and fixes based on customer requests and on our best judgment of real-world needs as a high-performance consulting company.
+
+|Percona| |XtraDB| engine will not have further binary releases, it is distributed as part of |Percona Server| and *MariaDB*.

=== added file 'doc/source/performance/query_cache_enhance.rst'
--- doc/source/performance/query_cache_enhance.rst	1970-01-01 00:00:00 +0000
+++ doc/source/performance/query_cache_enhance.rst	2013-05-27 12:16:38 +0000
@@ -0,0 +1,123 @@
+.. _query_cache_enhance:
+
+==========================
+ Query Cache Enhancements
+==========================
+
+This page describes the enhancements for the query cache. At the moment three features are available:
+
+  * Disabling the cache completely
+
+  * Diagnosing contention more easily
+
+  * Ignoring comments
+
+Disabling the cache completely
+==============================
+
+This feature allows the user to completely disable use of the query cache. When the server is compiled with the query cache enabled, the query cache is locked during use by the query cache mutex. This lock can cause performance to decrease in some situations. By disabling use of the query cache altogether when the server is started, any possibility of locking it is eliminated, and performance may be improved.
+
+The query cache can now be disabled at server startup or in an option file by: ::
+
+  --query_cache_type=0
+
+The default is 1 (query cache enabled).
+
+**Note:** This variable already exists in standard |MySQL|, but when setting query_cache_type=0, the query cache mutex will still be in used. Setting query_cache_type=0 in |Percona Server| ensures that both the cache is disabled and the mutex is not used.
+
+If query caching is off and a user tries to turn it on from within a session, the following error will be reported: ::
+
+  SET GLOBAL query_cache_type=ON;
+  ERROR 1651(HY000): Query cache is disabled; restart the server with query_cache_type=1 to enable it
+
+**Note:** This variable is implemented in standard |MySQL| from version 5.5.0.
+
+
+Diagnosing contention more easily
+=================================
+
+This features provides a new thread state - ``Waiting on query cache mutex``. It has always been difficult to spot query cache bottlenecks because these bottlenecks usually happen intermittently and are not directly reported by the server. This new thread state appear in the output of SHOW PROCESSLIST, easing diagnostics.
+
+Imagine that we run three queries simultaneously (each one in a separate thread):
+
+  > SELECT number from t where id > 0;
+  > SELECT number from t where id > 0;
+  > SELECT number from t where id > 0;
+
+If we experience query cache contention, the output of SHOW PROCESSLIT will look like this: ::
+
+  > SHOW PROCESSLIST;
+  Id      User    Host            db      Command Time    State                          Info
+  2       root    localhost       test    Sleep   2       NULL
+  3       root    localhost       test    Query   2       Waiting on query cache mutex  SELECT number from t where id > 0;
+  4       root    localhost       test    Query   1       Waiting on query cache mutex   SELECT number from t where id > 0;
+  5       root    localhost       test    Query   0       NULL
+
+Ignoring comments
+=================
+
+This feature adds an option to make the server ignore comments when checking for a query cache hit. For example, consider these two queries: ::
+
+  /* first query  */ select name from users where users.name like 'Bob%';
+  /* retry search */ select name from users where users.name like 'Bob%';
+
+By default (option off), the queries are considered different, so the server will execute them both and cache them both.
+
+If the option is enabled, the queries are considered identical, so the server will execute and cache the first one and will serve the second one directly from the query cache.
+
+
+.. Version Specific Information
+.. ----------------------------
+
+.. Disabling the query cache completely
+
+..  Percona Server Version	 Comments
+.. 5.1.49-12.0	 Full functionality available.
+.. Diagnosing contention more easily
+
+..  Percona Server Version	 Comments
+.. 5.1.49-12.0	 Full functionality available.
+.. Ignoring comments
+
+..  Percona Server Version	 Comments
+.. 5.1.47-11.0	 Critical bug (see MySQL bug 55032). Release was recalled.
+.. 5.1.47-11.1	 Fixed critical bug from previous release. MySQL bug 55032 actual. Bug b603618 actual. Bug 603619 actual.
+.. 5.1.47-11.2	 Full functionality available.
+.. 5.1.48-12.0	 Full functionality available.
+
+.. Other Information
+
+.. Disabling the query cache completely
+
+.. Author/Origin	 Percona
+.. Bugs fixed	LP bug 609027, MySQL bug 38551
+.. Diagnosing contention more easily
+
+.. Author/Origin	 Percona
+.. Bugs fixed	LP bug589484
+
+System Variables
+================
+
+.. variable:: query_cache_strip_comments
+
+   :cli: Yes
+   :conf: Yes
+   :scope: Global
+   :dyn: Yes
+   :vartype: Boolean
+   :default: Off
+
+
+Makes the server ignore comments when checking for a query cache hit.
+
+Other Reading
+-------------
+
+  * `MySQL general thread states <http://dev.mysql.com/doc/refman/5.1/en/general-thread-states.html>`_
+
+  * `RAII <http://en.wikibooks.org/wiki/More_C%2B%2B_Idioms/Resource_Acquisition_Is_Initialization>`_
+
+  * `Scope guard <http://en.wikibooks.org/wiki/More_C%2B%2B_Idioms/Scope_Guard>`_
+
+  * `Query cache freezes <http://www.mysqlperformanceblog.com/2009/03/19/mysql-random-freezes-could-be-the-query-cache/>`_

=== modified file 'doc/source/performance/threadpool.rst'
--- doc/source/performance/threadpool.rst	2013-04-02 13:02:39 +0000
+++ doc/source/performance/threadpool.rst	2013-05-27 12:16:38 +0000
@@ -24,12 +24,34 @@
  
   Current implementation of the thread pool is built in the server, unlike the upstream version which is implemented as a plugin. Another significant implementation difference is that this implementation doesn't try to minimize the number of concurrent transactions like the ``MySQL Enterprise Threadpool``. Because of these things this implementation isn't compatible with the upstream one.
 
+Priority connection scheduling
+==============================
+
+In |Percona Server| :rn:`5.6.11-60.3` priority connection scheduling for thread pool has been implemented. Even though thread pool puts a limit on the number of concurrently running queries, the number of open transactions may remain high, because connections with already started transactions are put to the end of the queue. Higher number of open transactions has a number of implications on the currently running queries. To improve the performance new :variable:`thread_pool_high_prio_tickets` variable has been introduced.
+
+This variable controls the high priority queue policy. Each new connection is assigned this many tickets to enter the high priority queue. Whenever a query has to be queued to be executed later because no threads are available, the thread pool puts the connection into the high priority queue if the following conditions apply:
+
+  1. The connection has an open transaction in the server.
+  2. The number of high priority tickets of this connection is non-zero.
+
+If both the above conditions hold, the connection is put into the high priority queue and its tickets value is decremented. Otherwise the connection is put into the common queue with the initial tickets value specified with this option.
+
+Each time the thread pool looks for a new connection to process, first it checks the high priority queue, and picks connections from the common queue only when the high priority one is empty.
+
+The goal is to minimize the number of open transactions in the server. In many cases it is beneficial to give short-running transactions a chance to commit faster and thus deallocate server resources and locks without waiting in the same queue with other connections that are about to start a new transaction, or those that have run out of their high priority tickets.
+
+With the default value of 0, all connections are always put into the common queue, i.e. no priority scheduling is used as in the original implementation in |MariaDB|. The higher is the value, the more chances each transaction gets to enter the high priority queue and commit before it is put in the common queue.
+
+
 Version Specific Information
 ============================
 
  * :rn:`5.6.10-60.2`
     ``Thread Pool`` feature implemented. This feature was ported from |MariaDB|.
 
+ * :rn:`5.6.11-60.3`
+    Implemented priority connection scheduling and introduced new variable :variable:`thread_pool_high_prio_tickets` to the original implementation introduced in |MariaDB|.
+
 System Variables
 ================
 
@@ -44,6 +66,17 @@
 
 This variable can be used to limit the time an idle thread should wait before exiting.
 
+.. variable:: thread_pool_high_prio_tickets
+
+     :cli: Yes
+     :conf: Yes
+     :scope: Global
+     :dyn: Yes
+     :vartype: Numeric
+     :default: 0
+
+This variable controls the high priority queue policy. Each new connection is assigned this many tickets to enter the high priority queue. 
+
 .. variable:: thread_pool_max_threads
 
      :cli: Yes

=== removed file 'doc/source/reliability/error_pad.rst'
--- doc/source/reliability/error_pad.rst	2013-05-27 12:16:36 +0000
+++ doc/source/reliability/error_pad.rst	1970-01-01 00:00:00 +0000
@@ -1,55 +0,0 @@
-.. _error_pad:
-
-==========================
- Error Code Compatibility
-==========================
-
-|Percona Server| with |XtraDB| has error code incompatibilities with |MySQL| 5.5. It is important to maintain compatibility in the error codes used by the servers. For example, scripts that may be run on both servers could contain references to error codes.
-
-The reasons for the current incompatibilities are:
-
-  * |Percona Server| with |XtraDB| contains features that have been backported from MyQL 5.5. Some of the |MySQL| 5.5 features added new error codes.
-
-  * Some |Percona Server| with |XtraDB| features have added new error codes.
-
-The solution to the first problem is to preserve |MySQL| 5.5 error codes in the |Percona Server|. An example of where this has been done is |Percona Server| feature Query Cache Enhancements. This feature adds error ``ER_QUERY_CACHE_DISABLED`` to the |Percona Server|, which is defined as error code 1651 in |MySQL| 5.5.
-
-After migrating |Percona Server| / |XtraDB| to |MySQL| 5.5, users might experience troubles because of this.
-
-The solution to the second problem is to insure that unique error codes are chosen, when adding new ones to |Percona Server|, that will never be duplicated during |MySQL| development.
-
-For example, |MySQL| has a tool ``comp_err`` that generates:
-
-  - :file:`errmsg.sys` files
-
-  - header file :file:`include/mysqld_error.h`
-
-  - header file :file:`include/mysqld_ername.h`
-
-from the file :file:`errmsg.txt`.
-
-To keep error numbers consistent, we should add some fictive errors to :file:`errmsg.txt`, because ``comp_err`` assigns error code numbers sequentially, without gaps.
-
-I propose patch to ``comp_err``.
-
-This patch allows usage of a new syntax, with prefix ``PADD``, for example: ::
-
-  PADD_QUERY_CACHE_DISABLED 1651
-    eng "ER_QUERY_CACHE_DISABLED padding to 1651 error"
-  ER_QUERY_CACHE_DISABLED
-    eng "Query cache is disabled; restart the server with query_cache_type=1 to enable it"
-
-comp_err with my patch padds empty intervals (from last error code number to 1651) by error message ``ER_QUERY_CACHE_DISABLED padding to 1651 error``, i.e. and ``ER_QUERY_CACHE_DISABLED`` now has error code 1651 (as desired). I propose to use this patch for Percona errors, for example: ::
-
-  PADD_PERCONA_NEW_ERROR_CODE 4000
-    end "Padd empty space to error code number 4000 (Percona error codes)"
-  ...some percona error codes...
-
-Patch only adds prefix ``PADD_`` and padds error in sys files. All other |MySQL| code (load*.sys files, my_error, etc) works as old one.
-
-
-Version-Specific Information
-============================
-
-  * 5.5.8-20.0
-    Full functionality available.

=== removed file 'doc/source/reliability/innodb_corrupt_table_action.rst'
--- doc/source/reliability/innodb_corrupt_table_action.rst	2013-05-27 12:16:36 +0000
+++ doc/source/reliability/innodb_corrupt_table_action.rst	1970-01-01 00:00:00 +0000
@@ -1,35 +0,0 @@
-.. _innodb_corrupt_table_action_page:
-
-=========================
- Handle Corrupted Tables
-=========================
-
-Instead of crashing the server as they used to do, corrupted |InnoDB| tables are simply disabled, so that the database remains available while the corruption is being fixed.
-
-This feature adds a new system variable.
-
-Version Specific Information
-============================
-
-  * 5.6.10-60.2:
-    Feature ported from |Percona Server| 5.5
-
-System Variables
-================
-
-.. variable:: innodb_corrupt_table_action
-
-     :version 5.6.10-60.2: Introduced.
-     :cli: Yes
-     :conf: Yes
-     :scope: Global
-     :dyn: Yes
-     :vartype: ULONG
-     :range: ``assert``, ``warn``, ``salvage``
-
-* With the default value |XtraDB| will intentionally crash the server with an assertion failure as it would normally do when detecting corrupted data in a single-table tablespace.
-
-* If the ``warn`` value is used it will pass corruption of the table as ``corrupt table`` instead of crashing itself. For this to work :option:`innodb_file_per_table` should be enabled. All file I/O for the datafile after detected as corrupt is disabled, except for the deletion. 
-
-* When the option value is ``salvage``, |XtraDB| allows read access to a corrupted tablespace, but ignores corrupted pages".
-

=== modified file 'doc/source/upstream-bug-fixes.rst'
--- doc/source/upstream-bug-fixes.rst	2013-05-26 10:55:00 +0000
+++ doc/source/upstream-bug-fixes.rst	2013-05-27 12:16:38 +0000
@@ -5,21 +5,45 @@
 =============================================================
 
 +-------------------------------------------------------------------------------------------------------------+
+|:Upstream bug: :mysqlbug:`68845` - Unnecessary log_sys->mutex reacquisition in mtr_log_reserve_and_write()   |
+|:Launchpad bug: :bug:`1163439`                                                                               |
+|:Upstream state: Open (checked on 2013-04-10)                                                                |
+|:Fix Released: :rn:`5.6.11-60.3`                                                                             |
+|:Upstream fix: N/A                                                                                           |
++-------------------------------------------------------------------------------------------------------------+
+|:Upstream bug: :mysqlbug:`62578` - mysql client aborts connection on terminal resize                         |
+|:Launchpad bug: :bug:`925343`                                                                                |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
+|:Fix Released: :rn:`5.6.11-60.3`                                                                             |
+|:Upstream fix: N/A                                                                                           |
++-------------------------------------------------------------------------------------------------------------+
+|:Upstream bug: :mysqlbug:`49169` - read_view_open_now is inefficient with many concurrent sessions           |
+|:Launchpad bug: :bug:`1131187` and :bug:`1131189`                                                            |
+|:Upstream state: Closed                                                                                      |
+|:Fix Released: :rn:`5.6.11-60.3`                                                                             |
+|:Upstream fix: N/A                                                                                           |
++-------------------------------------------------------------------------------------------------------------+
+|:Upstream bug: :mysqlbug:`63144` - CREATE TABLE IF NOT EXISTS metadata lock is too restrictive               |
+|:Launchpad bug: :bug:`1127008`                                                                               |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
+|:Fix Released: :rn:`5.6.11-60.3`                                                                             |
+|:Upstream fix: N/A                                                                                           |
++-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`68477` - Suboptimal code in skip_trailing_space()                                  |
 |:Launchpad bug: :bug:`1132351`                                                                               |
-|:Upstream state: Verified (checked on 2013-03-05)                                                            |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`68476` - Suboptimal code in my_strnxfrm_simple()                                   |
 |:Launchpad bug: :bug:`1132350`                                                                               |
-|:Upstream state: Verified (checked on 2013-03-05)                                                            |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`67504` - Duplicate error in replication with slave triggers and auto increment     |
 |:Launchpad bug: :bug:`1068210`                                                                               |
-|:Upstream state: Verified (checked on 2013-02-21)                                                            |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
@@ -49,7 +73,7 @@
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`66237` - Temporary files created by binary log cache are not purged after transa...|
 |:Launchpad bug: :bug:`1070856`                                                                               |
-|:Upstream state: Verified (checked on 2013-02-21)                                                            |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
@@ -61,67 +85,67 @@
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`62856` - Check for "stack overrun" doesn't work with gcc-4.6, server crashes       |
 |:Launchpad bug: :bug:`1042517`                                                                               |
-|:Upstream state: Verified (checked on 2013-02-21)                                                            |
+|:Upstream state: Closed                                                                                      |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`61180` - korr/store macros in my_global.h assume the argument to be a char pointer |
 |:Launchpad bug: :bug:`1042517`                                                                               |
-|:Upstream state: Verified (checked on 2013-02-21)                                                            |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`61179` - Unoptimized versions of korr/store macros in my_global.h are used on ...  |
 |:Launchpad bug: :bug:`1042517`                                                                               |
-|:Upstream state: Verified (checked on 2013-02-21)                                                            |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`61178` - Incorrect implementation of intersect(ulonglong) in non-optimized Bitmap..|
 |:Launchpad bug: :bug:`1042517`                                                                               |
-|:Upstream state: Verified (checked on 2013-02-21)                                                            |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`54127` - mysqld segfaults when built using --with-max-indexes=128                  |
 |:Launchpad bug: :bug:`1042517`                                                                               |
-|:Upstream state: Verified (checked on 2013-02-21)                                                            |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`64800` - mysqldump with --include-master-host-port putting quotes around port no.  | 
 |:Launchpad bug: :bug:`1013432`                                                                               |
-|:Upstream state: Verified (checked on 2013-02-21)                                                            |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`66301` - INSERT ... ON DUPLICATE KEY UPDATE + innodb_autoinc_lock_mode=1 is broken |
 |:Launchpad bug: :bug:`1035225`                                                                               |
-|:Upstream state: Verified (checked on 2013-02-21)                                                            |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`60743` - typo in cmake/dtrace.cmake                                                |
 |:Launchpad bug: :bug:`1013455`                                                                               |
-|:Upstream state: Verified (checked on 2013-02-21)                                                            |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`64663` - Segfault when adding indexes to InnoDB temporary tables                   |
 |:Launchpad bug: :bug:`999147`                                                                                |
-|:Upstream state: Verified (checked on 2013-02-21)                                                            |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`64432` - Bug :mysqlbug:`54330` (Broken fast index creation) was never fixed in 5.5 |
 |:Launchpad bug: :bug:`939485`                                                                                |
-|:Upstream state: Documenting (checked on 2013-02-21)                                                         |
+|:Upstream state: Closed                                                                                      |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`61595` - mysql-test/include/wait_for_slave_param.inc timeout logic is incorrect    |
 |:Launchpad bug: :bug:`800035`                                                                                |
-|:Upstream state: Verified (checked on 2013-02-21)                                                            |
+|:Upstream state: Verified (checked on 2013-04-10)                                                            |
 |:Fix Released: :rn:`5.6.11-60.3`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+
@@ -133,7 +157,7 @@
 +-------------------------------------------------------------------------------------------------------------+
 |:Upstream bug: :mysqlbug:`68116` - InnoDB monitor may hit an assertion error in buf_page_get_gen in debug ...|
 |:Launchpad bug: :bug:`1100178`                                                                               |
-|:Upstream state: Analyzing (checked on 2013-02-21)                                                           |
+|:Upstream state: Analyzing (checked on 2013-04-10)                                                           |
 |:Fix Released: :rn:`5.6.10-60.2`                                                                             |
 |:Upstream fix: N/A                                                                                           |
 +-------------------------------------------------------------------------------------------------------------+

=== removed directory 'python-for-subunit2junitxml'
=== removed file 'python-for-subunit2junitxml/BytesIO.py'
--- python-for-subunit2junitxml/BytesIO.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/BytesIO.py	1970-01-01 00:00:00 +0000
@@ -1,136 +0,0 @@
-
-# http://wiki.python.org/moin/BytesIO
-#
-# A skeleton one used for systems that don't have BytesIO.
-#
-# It's enough for subunit at least....
-
-class BytesIO(object):
-    """ A file-like API for reading and writing bytes objects.
-
-    Mostly like StringIO, but write() calls modify the underlying
-    bytes object.
-
-    >>> b = bytes()
-    >>> f = BytesIO(b, 'w')
-    >>> f.write(bytes.fromhex('ca fe ba be'))
-    >>> f.write(bytes.fromhex('57 41 56 45'))
-    >>> b
-    bytes([202, 254, 186, 190, 87, 65, 86, 69])
-    """
-
-    def __init__(self, buf, mode='r'):
-        """ Create a new BytesIO for reading or writing the given buffer.
-
-        buf - Back-end buffer for this BytesIO.  A bytes object.
-            Actually, anything that supports len(), slice-assignment,
-            and += will work.
-        mode - One of 'r', 'w', 'a'.
-            An optional 'b' is also allowed, but it doesn't do anything.
-        """
-        # XXX many 'mode' possibilities aren't allowed yet: 'rw+Ut'
-        if len(mode) == 2 and mode[-1] == 'b':
-            mode = mode[:-1]  # binary mode goes without saying
-        if mode not in ('r', 'w', 'a'):
-            raise ValueError("mode must be 'r', 'w', or 'a'")
-
-        self._buf = buf
-        self.mode = mode
-        self.closed = False
-        if self.mode == 'w':
-            del buf[:]
-            self._point = 0
-        elif self.mode == 'r':
-            self._point = 0
-        else: # 'a'
-            self._point = len(buf)
-
-    def close(self):
-        self.closed = True
-
-    def _check_closed(self):
-        if self.closed:
-            raise ValueError("file is closed")
-
-    def flush(self):
-        self._check_closed()
-
-    def next(self):
-        line = self.readline()
-        if len(line) == 0:
-            raise StopIteration
-        return line
-
-    def read(self, size=None):
-        self._check_closed()
-        if size is None:
-            e = len(self._buf)
-        else:
-            e = min(self._point + size, len(self._buf))
-        r = self._buf[self._point:e]
-        self._point = e
-        return r
-
-    def readline(self, size=None):
-        self._check_closed()
-        die  # XXX TODO - assume ascii and read a line
-
-    def readlines(self, sizehint=None):
-        # XXX TODO handle sizehint
-        return list(self)
-
-    def seek(self, offset, whence=0):
-        self._check_closed()
-
-        if whence == 0:
-            self._point = offset
-        elif whence == 1:
-            self._point += offset
-        elif whence == 2:
-            self._point = len(self._buf) + offset
-        else:
-            raise ValueError("whence must be 0, 1, or 2")
-
-        if self._point < 0:
-            self._point = 0  # XXX is this right?
-
-    def tell(self):
-        self._check_closed()
-        return self._point
-
-    def truncate(self, size=None):
-        self._check_closed()
-        if size is None:
-            size = self.tell()
-        del self._buf[size:]
-
-    def write(self, data):
-        self._check_closed()
-        amt = len(data)
-        size = len(self._buf)
-        if self.mode == 'a':
-            self._point = size
-
-        if self._point > size:
-            if isinstance(b, bytes):
-                blank = bytes([0])
-            else:
-                # Don't know what default value to insert, unfortunately
-                raise ValueError("can't write past the end of this object")
-            self._buf += blank * (self._point - size) + data
-            self._point = len(self._buf)
-        else:
-            p = self._point
-            self._buf[p:p + amt] = data
-            self._point = min(p + amt, len(self._buf))
-
-    def writelines(self, seq):
-        for line in seq:
-            self.write(line)
-
-    def __iter__(self):
-        return self
-
-    @property
-    def name(self):
-        return repr(self)

=== removed directory 'python-for-subunit2junitxml/iso8601'
=== removed file 'python-for-subunit2junitxml/iso8601/LICENSE'
--- python-for-subunit2junitxml/iso8601/LICENSE	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/iso8601/LICENSE	1970-01-01 00:00:00 +0000
@@ -1,20 +0,0 @@
-Copyright (c) 2007 Michael Twomey
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be included
-in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

=== removed file 'python-for-subunit2junitxml/iso8601/README'
--- python-for-subunit2junitxml/iso8601/README	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/iso8601/README	1970-01-01 00:00:00 +0000
@@ -1,26 +0,0 @@
-A simple package to deal with ISO 8601 date time formats.
-
-ISO 8601 defines a neutral, unambiguous date string format, which also
-has the property of sorting naturally.
-
-e.g. YYYY-MM-DDTHH:MM:SSZ or 2007-01-25T12:00:00Z
-
-Currently this covers only the most common date formats encountered, not
-all of ISO 8601 is handled.
-
-Currently the following formats are handled:
-
-* 2006-01-01T00:00:00Z
-* 2006-01-01T00:00:00[+-]00:00
-
-I'll add more as I encounter them in my day to day life. Patches with 
-new formats and tests will be gratefully accepted of course :)
-
-References:
-
-* http://www.cl.cam.ac.uk/~mgk25/iso-time.html - simple overview
-
-* http://hydracen.com/dx/iso8601.htm - more detailed enumeration of
-  valid formats.
-
-See the LICENSE file for the license this package is released under.

=== removed file 'python-for-subunit2junitxml/iso8601/README.subunit'
--- python-for-subunit2junitxml/iso8601/README.subunit	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/iso8601/README.subunit	1970-01-01 00:00:00 +0000
@@ -1,5 +0,0 @@
-This is a [slightly rearranged] import of http://pypi.python.org/pypi/iso8601/
-version 0.1.4. The OS X hidden files have been stripped, and the package
-turned into a single module, to simplify installation. The remainder of the
-source distribution is included in the subunit source tree at python/iso8601
-for reference.

=== removed file 'python-for-subunit2junitxml/iso8601/setup.py'
--- python-for-subunit2junitxml/iso8601/setup.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/iso8601/setup.py	1970-01-01 00:00:00 +0000
@@ -1,58 +0,0 @@
-try:
-    from setuptools import setup
-except ImportError:
-    from distutils import setup
-
-long_description="""Simple module to parse ISO 8601 dates
-
-This module parses the most common forms of ISO 8601 date strings (e.g.
-2007-01-14T20:34:22+00:00) into datetime objects.
-
->>> import iso8601
->>> iso8601.parse_date("2007-01-25T12:00:00Z")
-datetime.datetime(2007, 1, 25, 12, 0, tzinfo=<iso8601.iso8601.Utc ...>)
->>>
-
-Changes
-=======
-
-0.1.4
------
-
-* The default_timezone argument wasn't being passed through correctly,
-  UTC was being used in every case. Fixes issue 10.
-
-0.1.3
------
-
-* Fixed the microsecond handling, the generated microsecond values were 
-  way too small. Fixes issue 9.
-
-0.1.2
------
-
-* Adding ParseError to __all__ in iso8601 module, allows people to import it.
-  Addresses issue 7.
-* Be a little more flexible when dealing with dates without leading zeroes.
-  This violates the spec a little, but handles more dates as seen in the 
-  field. Addresses issue 6.
-* Allow date/time separators other than T.
-
-0.1.1
------
-
-* When parsing dates without a timezone the specified default is used. If no
-  default is specified then UTC is used. Addresses issue 4.
-"""
-
-setup(
-    name="iso8601",
-    version="0.1.4",
-    description=long_description.split("\n")[0],
-    long_description=long_description,
-    author="Michael Twomey",
-    author_email="micktwomey+iso8601@gmail.com",
-    url="http://code.google.com/p/pyiso8601/",
-    packages=["iso8601"],
-    license="MIT",
-)

=== removed file 'python-for-subunit2junitxml/iso8601/test_iso8601.py'
--- python-for-subunit2junitxml/iso8601/test_iso8601.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/iso8601/test_iso8601.py	1970-01-01 00:00:00 +0000
@@ -1,111 +0,0 @@
-import iso8601
-
-def test_iso8601_regex():
-    assert iso8601.ISO8601_REGEX.match("2006-10-11T00:14:33Z")
-
-def test_timezone_regex():
-    assert iso8601.TIMEZONE_REGEX.match("+01:00")
-    assert iso8601.TIMEZONE_REGEX.match("+00:00")
-    assert iso8601.TIMEZONE_REGEX.match("+01:20")
-    assert iso8601.TIMEZONE_REGEX.match("-01:00")
-
-def test_parse_date():
-    d = iso8601.parse_date("2006-10-20T15:34:56Z")
-    assert d.year == 2006
-    assert d.month == 10
-    assert d.day == 20
-    assert d.hour == 15
-    assert d.minute == 34
-    assert d.second == 56
-    assert d.tzinfo == iso8601.UTC
-
-def test_parse_date_fraction():
-    d = iso8601.parse_date("2006-10-20T15:34:56.123Z")
-    assert d.year == 2006
-    assert d.month == 10
-    assert d.day == 20
-    assert d.hour == 15
-    assert d.minute == 34
-    assert d.second == 56
-    assert d.microsecond == 123000
-    assert d.tzinfo == iso8601.UTC
-
-def test_parse_date_fraction_2():
-    """From bug 6
-    
-    """
-    d = iso8601.parse_date("2007-5-7T11:43:55.328Z'")
-    assert d.year == 2007
-    assert d.month == 5
-    assert d.day == 7
-    assert d.hour == 11
-    assert d.minute == 43
-    assert d.second == 55
-    assert d.microsecond == 328000
-    assert d.tzinfo == iso8601.UTC
-
-def test_parse_date_tz():
-    d = iso8601.parse_date("2006-10-20T15:34:56.123+02:30")
-    assert d.year == 2006
-    assert d.month == 10
-    assert d.day == 20
-    assert d.hour == 15
-    assert d.minute == 34
-    assert d.second == 56
-    assert d.microsecond == 123000
-    assert d.tzinfo.tzname(None) == "+02:30"
-    offset = d.tzinfo.utcoffset(None)
-    assert offset.days == 0
-    assert offset.seconds == 60 * 60 * 2.5
-
-def test_parse_invalid_date():
-    try:
-        iso8601.parse_date(None)
-    except iso8601.ParseError:
-        pass
-    else:
-        assert 1 == 2
-
-def test_parse_invalid_date2():
-    try:
-        iso8601.parse_date("23")
-    except iso8601.ParseError:
-        pass
-    else:
-        assert 1 == 2
-
-def test_parse_no_timezone():
-    """issue 4 - Handle datetime string without timezone
-    
-    This tests what happens when you parse a date with no timezone. While not
-    strictly correct this is quite common. I'll assume UTC for the time zone
-    in this case.
-    """
-    d = iso8601.parse_date("2007-01-01T08:00:00")
-    assert d.year == 2007
-    assert d.month == 1
-    assert d.day == 1
-    assert d.hour == 8
-    assert d.minute == 0
-    assert d.second == 0
-    assert d.microsecond == 0
-    assert d.tzinfo == iso8601.UTC
-
-def test_parse_no_timezone_different_default():
-    tz = iso8601.FixedOffset(2, 0, "test offset")
-    d = iso8601.parse_date("2007-01-01T08:00:00", default_timezone=tz)
-    assert d.tzinfo == tz
-
-def test_space_separator():
-    """Handle a separator other than T
-    
-    """
-    d = iso8601.parse_date("2007-06-23 06:40:34.00Z")
-    assert d.year == 2007
-    assert d.month == 6
-    assert d.day == 23
-    assert d.hour == 6
-    assert d.minute == 40
-    assert d.second == 34
-    assert d.microsecond == 0
-    assert d.tzinfo == iso8601.UTC

=== removed directory 'python-for-subunit2junitxml/junitxml'
=== removed file 'python-for-subunit2junitxml/junitxml/__init__.py'
--- python-for-subunit2junitxml/junitxml/__init__.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/junitxml/__init__.py	1970-01-01 00:00:00 +0000
@@ -1,221 +0,0 @@
-#
-#  junitxml: extensions to Python unittest to get output junitxml
-#  Copyright (C) 2009 Robert Collins <robertc@robertcollins.net>
-#
-#  Copying permitted under the LGPL-3 licence, included with this library.
-
-"""unittest compatible JUnit XML output."""
-
-
-import datetime
-import re
-import time
-import unittest
-
-# same format as sys.version_info: "A tuple containing the five components of
-# the version number: major, minor, micro, releaselevel, and serial. All
-# values except releaselevel are integers; the release level is 'alpha',
-# 'beta', 'candidate', or 'final'. The version_info value corresponding to the
-# Python version 2.0 is (2, 0, 0, 'final', 0)."  Additionally we use a
-# releaselevel of 'dev' for unreleased under-development code.
-#
-# If the releaselevel is 'alpha' then the major/minor/micro components are not
-# established at this point, and setup.py will use a version of next-$(revno).
-# If the releaselevel is 'final', then the tarball will be major.minor.micro.
-# Otherwise it is major.minor.micro~$(revno).
-__version__ = (0, 7, 0, 'alpha', 0)
-
-
-def test_suite():
-    import junitxml.tests
-    return junitxml.tests.test_suite()
-
-
-class LocalTimezone(datetime.tzinfo):
-
-    def __init__(self):
-        self._offset = None
-
-    # It seems that the minimal possible implementation is to just return all
-    # None for every function, but then it breaks...
-    def utcoffset(self, dt):
-        if self._offset is None:
-            t = 1260423030 # arbitrary, but doesn't handle dst very well
-            dt = datetime.datetime
-            self._offset = (dt.fromtimestamp(t) - dt.utcfromtimestamp(t))
-        return self._offset
-
-    def dst(self, dt):
-        return datetime.timedelta(0)
-
-    def tzname(self, dt):
-        return None
-
-
-def _error_name(eclass):
-    module = eclass.__module__
-    if module not in ("__main__", "builtins", "exceptions"):
-        return ".".join([module, eclass.__name__])
-    return eclass.__name__
-
-
-_non_cdata = "[\0-\b\x0B-\x1F\uD800-\uDFFF\uFFFE\uFFFF]+"
-if "\\u" in _non_cdata:
-    _non_cdata = _non_cdata.decode("unicode-escape")
-    def _strip_invalid_chars(s, _sub=re.compile(_non_cdata, re.UNICODE).sub):
-        if not isinstance(s, unicode):
-            try:
-                s = s.decode("utf-8")
-            except UnicodeDecodeError:
-                s = s.decode("ascii", "replace")
-        return _sub("", s).encode("utf-8")
-else:
-    def _strip_invalid_chars(s, _sub=re.compile(_non_cdata, re.UNICODE).sub):
-        return _sub("", s)
-def _escape_content(s):
-    return (_strip_invalid_chars(s)
-        .replace("&", "&amp;")
-        .replace("<", "&lt;")
-        .replace("]]>", "]]&gt;"))
-def _escape_attr(s):
-    return (_strip_invalid_chars(s)
-        .replace("&", "&amp;")
-        .replace("<", "&lt;")
-        .replace("]]>", "]]&gt;")
-        .replace('"', "&quot;")
-        .replace("\t", "&#x9;")
-        .replace("\n", "&#xA;"))
-
-
-class JUnitXmlResult(unittest.TestResult):
-    """A TestResult which outputs JUnit compatible XML."""
-    
-    def __init__(self, stream):
-        """Create a JUnitXmlResult.
-
-        :param stream: A stream to write results to. Note that due to the
-            nature of JUnit XML output, nnothing will be written to the stream
-            until stopTestRun() is called.
-        """
-        self.__super = super(JUnitXmlResult, self)
-        self.__super.__init__()
-        # GZ 2010-09-03: We have a problem if passed a text stream in Python 3
-        #                as really we want to write raw UTF-8 to ensure that
-        #                the encoding is not mangled later
-        self._stream = stream
-        self._results = []
-        self._set_time = None
-        self._test_start = None
-        self._run_start = None
-        self._tz_info = None
-
-    def startTestRun(self):
-        """Start a test run."""
-        self._run_start = self._now()
-
-    def _get_tzinfo(self):
-        if self._tz_info is None:
-            self._tz_info = LocalTimezone()
-        return self._tz_info
-
-    def _now(self):
-        if self._set_time is not None:
-            return self._set_time
-        else:
-            return datetime.datetime.now(self._get_tzinfo())
-
-    def time(self, a_datetime):
-        self._set_time = a_datetime
-        if (self._run_start is not None and
-            self._run_start > a_datetime):
-            self._run_start = a_datetime
-
-    def startTest(self, test):
-        self.__super.startTest(test)
-        self._test_start = self._now()
-
-    def _duration(self, from_datetime):
-        try:
-            delta = self._now() - from_datetime
-        except TypeError:
-            n = self._now()
-            delta = datetime.timedelta(-1)
-        seconds = delta.days * 3600*24 + delta.seconds
-        return seconds + 0.000001 * delta.microseconds
-
-    def _test_case_string(self, test):
-        duration = self._duration(self._test_start)
-        test_id = test.id()
-        # Split on the last dot not inside a parameter
-        class_end = test_id.rfind(".", 0, test_id.find("("))
-        if class_end == -1:
-            classname, name = "", test_id
-        else:
-            classname, name = test_id[:class_end], test_id[class_end+1:]
-        self._results.append('<testcase classname="%s" name="%s" '
-            'time="%0.3f"' % (_escape_attr(classname), _escape_attr(name), duration))
-
-    def stopTestRun(self):
-        """Stop a test run.
-
-        This allows JUnitXmlResult to output the XML representation of the test
-        run.
-        """
-        duration = self._duration(self._run_start)
-        self._stream.write('<testsuite errors="%d" failures="%d" name="" '
-            'tests="%d" time="%0.3f">\n' % (len(self.errors),
-            len(self.failures) + len(getattr(self, "unexpectedSuccesses", ())),
-            self.testsRun, duration))
-        self._stream.write(''.join(self._results))
-        self._stream.write('</testsuite>\n')
-
-    def addError(self, test, error):
-        self.__super.addError(test, error)
-        self._test_case_string(test)
-        self._results.append('>\n')
-        self._results.append('<error type="%s">%s</error>\n</testcase>\n' % (
-            _escape_attr(_error_name(error[0])),
-            _escape_content(self._exc_info_to_string(error, test))))
-
-    def addFailure(self, test, error):
-        self.__super.addFailure(test, error)
-        self._test_case_string(test)
-        self._results.append('>\n')
-        self._results.append('<failure type="%s">%s</failure>\n</testcase>\n' %
-            (_escape_attr(_error_name(error[0])),
-            _escape_content(self._exc_info_to_string(error, test))))
-
-    def addSuccess(self, test):
-        self.__super.addSuccess(test)
-        self._test_case_string(test)
-        self._results.append('/>\n')
-
-    def addSkip(self, test, reason):
-        try:
-            self.__super.addSkip(test, reason)
-        except AttributeError:
-            # Python < 2.7|3.1
-            pass
-        self._test_case_string(test)
-        self._results.append('>\n')
-        self._results.append('<skip>%s</skip>\n</testcase>\n'% _escape_attr(reason))
-
-    def addUnexpectedSuccess(self, test):
-        try:
-            self.__super.addUnexpectedSuccess(test)
-        except AttributeError:
-            # Python < 2.7|3.1
-            pass
-        self._test_case_string(test)
-        self._results.append('>\n')
-        self._results.append('<failure type="unittest.case._UnexpectedSuccess"/>\n</testcase>\n')
-
-    def addExpectedFailure(self, test, error):
-        try:
-            self.__super.addExpectedFailure(test, error)
-        except AttributeError:
-            # Python < 2.7|3.1
-            pass
-        self._test_case_string(test)
-        self._results.append('/>\n')
-

=== removed directory 'python-for-subunit2junitxml/junitxml/tests'
=== removed file 'python-for-subunit2junitxml/junitxml/tests/__init__.py'
--- python-for-subunit2junitxml/junitxml/tests/__init__.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/junitxml/tests/__init__.py	1970-01-01 00:00:00 +0000
@@ -1,16 +0,0 @@
-#
-#  junitxml: extensions to Python unittest to get output junitxml
-#  Copyright (C) 2009 Robert Collins <robertc@robertcollins.net>
-#
-#  Copying permitted under the LGPL-3 licence, included with this library.
-
-import unittest
-
-from junitxml.tests import (
-    test_junitxml,
-    )
-
-def test_suite():
-    return unittest.TestLoader().loadTestsFromNames([
-        'junitxml.tests.test_junitxml',
-        ])

=== removed file 'python-for-subunit2junitxml/junitxml/tests/test_junitxml.py'
--- python-for-subunit2junitxml/junitxml/tests/test_junitxml.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/junitxml/tests/test_junitxml.py	1970-01-01 00:00:00 +0000
@@ -1,327 +0,0 @@
-#
-#  junitxml: extensions to Python unittest to get output junitxml
-#  Copyright (C) 2009 Robert Collins <robertc@robertcollins.net>
-#
-#  Copying permitted under the LGPL-3 licence, included with this library.
-
-
-try:
-    from cStringIO import StringIO
-except ImportError:
-    from io import StringIO
-import datetime
-import re
-import sys
-import unittest
-import xml.dom.minidom
-
-import junitxml
-
-class TestImports(unittest.TestCase):
-
-    def test_result(self):
-        from junitxml import JUnitXmlResult
-
-
-class TestJUnitXmlResult__init__(unittest.TestCase):
-
-    def test_with_stream(self):
-        result = junitxml.JUnitXmlResult(StringIO())
-
-
-class TestJUnitXmlResult(unittest.TestCase):
-
-    def setUp(self):
-        self.output = StringIO()
-        self.result = junitxml.JUnitXmlResult(self.output)
-
-    def get_output(self):
-        output = self.output.getvalue()
-        # Collapse detailed regions into specific strings we can match on
-        return re.sub(r'(?s)<failure (.*?)>.*?</failure>',
-            r'<failure \1>failure</failure>', re.sub(
-            r'(?s)<error (.*?)>.*?</error>', r'<error \1>error</error>',
-            re.sub(r'time="\d+\.\d+"', 'time="0.000"', output)))
-
-    def run_test_or_simulate(self, test, method_name, manual_method,
-        *manual_args):
-        if getattr(test, method_name, None):
-            test.run(self.result)
-        else:
-            # older python - manually execute
-            self.result.startTest(test)
-            manual_method(test, *manual_args)
-            self.result.stopTest(test)
-
-    def test_run_duration_handles_datestamping_in_the_past(self):
-        # When used via subunit2junitxml, startTestRun is called before
-        # any tz info in the test stream has been seen.
-        # So, we use the earliest reported timestamp as the start time,
-        # replacing _test_start if needed.
-        self.result.startTestRun() # the time is now.
-        # Lose an hour (peeks inside, a little naughty but not very).
-        self.result.time(self.result._run_start - datetime.timedelta(0, 3600))
-        self.result.stopTestRun()
-        self.assertEqual("""<testsuite errors="0" failures="0" name="" tests="0" time="0.000">
-</testsuite>
-""", self.get_output())
-
-    def test_startTestRun_no_output(self):
-        # startTestRun doesn't output anything, because JUnit wants an up-front
-        # summary.
-        self.result.startTestRun()
-        self.assertEqual('', self.get_output())
-
-    def test_stopTestRun_outputs(self):
-        # When stopTestRun is called, everything is output.
-        self.result.startTestRun()
-        self.result.stopTestRun()
-        self.assertEqual("""<testsuite errors="0" failures="0" name="" tests="0" time="0.000">
-</testsuite>
-""", self.get_output())
-
-    def test_test_count(self):
-        class Passes(unittest.TestCase):
-            def test_me(self):
-                pass
-        self.result.startTestRun()
-        Passes("test_me").run(self.result)
-        Passes("test_me").run(self.result)
-        self.result.stopTestRun()
-        # When tests are run, the number of tests is counted.
-        output = self.get_output()
-        self.assertTrue('tests="2"' in output)
-
-    def test_test_id_with_parameter(self):
-        class Passes(unittest.TestCase):
-            def id(self):
-                return unittest.TestCase.id(self) + '(version_1.6)'
-            def test_me(self):
-                pass
-        self.result.startTestRun()
-        Passes("test_me").run(self.result)
-        self.result.stopTestRun()
-        output = self.get_output()
-        self.assertTrue('Passes" name="test_me(version_1.6)"' in output)
-
-    def test_erroring_test(self):
-        class Errors(unittest.TestCase):
-            def test_me(self):
-                1/0
-        self.result.startTestRun()
-        Errors("test_me").run(self.result)
-        self.result.stopTestRun()
-        self.assertEqual("""<testsuite errors="1" failures="0" name="" tests="1" time="0.000">
-<testcase classname="junitxml.tests.test_junitxml.Errors" name="test_me" time="0.000">
-<error type="ZeroDivisionError">error</error>
-</testcase>
-</testsuite>
-""", self.get_output())
-
-    def test_failing_test(self):
-        class Fails(unittest.TestCase):
-            def test_me(self):
-                self.fail()
-        self.result.startTestRun()
-        Fails("test_me").run(self.result)
-        self.result.stopTestRun()
-        self.assertEqual("""<testsuite errors="0" failures="1" name="" tests="1" time="0.000">
-<testcase classname="junitxml.tests.test_junitxml.Fails" name="test_me" time="0.000">
-<failure type="AssertionError">failure</failure>
-</testcase>
-</testsuite>
-""", self.get_output())
-
-    def test_successful_test(self):
-        class Passes(unittest.TestCase):
-            def test_me(self):
-                pass
-        self.result.startTestRun()
-        Passes("test_me").run(self.result)
-        self.result.stopTestRun()
-        self.assertEqual("""<testsuite errors="0" failures="0" name="" tests="1" time="0.000">
-<testcase classname="junitxml.tests.test_junitxml.Passes" name="test_me" time="0.000"/>
-</testsuite>
-""", self.get_output())
-
-    def test_skip_test(self):
-        class Skips(unittest.TestCase):
-            def test_me(self):
-                self.skipTest("yo")
-        self.result.startTestRun()
-        test = Skips("test_me")
-        self.run_test_or_simulate(test, 'skipTest', self.result.addSkip, 'yo')
-        self.result.stopTestRun()
-        output = self.get_output()
-        expected = """<testsuite errors="0" failures="0" name="" tests="1" time="0.000">
-<testcase classname="junitxml.tests.test_junitxml.Skips" name="test_me" time="0.000">
-<skip>yo</skip>
-</testcase>
-</testsuite>
-"""
-        self.assertEqual(expected, output)
-
-    def test_unexpected_success_test(self):
-        class Succeeds(unittest.TestCase):
-            def test_me(self):
-                pass
-            try:
-                test_me = unittest.expectedFailure(test_me)
-            except AttributeError:
-                pass # Older python - just let the test pass
-        self.result.startTestRun()
-        Succeeds("test_me").run(self.result)
-        self.result.stopTestRun()
-        output = self.get_output()
-        expected = """<testsuite errors="0" failures="1" name="" tests="1" time="0.000">
-<testcase classname="junitxml.tests.test_junitxml.Succeeds" name="test_me" time="0.000">
-<failure type="unittest.case._UnexpectedSuccess"/>
-</testcase>
-</testsuite>
-"""
-        expected_old = """<testsuite errors="0" failures="0" name="" tests="1" time="0.000">
-<testcase classname="junitxml.tests.test_junitxml.Succeeds" name="test_me" time="0.000"/>
-</testsuite>
-"""
-        if output != expected_old:
-            self.assertEqual(expected, output)
-
-    def test_expected_failure_test(self):
-        expected_failure_support = [True]
-        class ExpectedFail(unittest.TestCase):
-            def test_me(self):
-                self.fail("fail")
-            try:
-                test_me = unittest.expectedFailure(test_me)
-            except AttributeError:
-                # Older python - just let the test fail
-                expected_failure_support[0] = False
-        self.result.startTestRun()
-        ExpectedFail("test_me").run(self.result)
-        self.result.stopTestRun()
-        output = self.get_output()
-        expected = """<testsuite errors="0" failures="0" name="" tests="1" time="0.000">
-<testcase classname="junitxml.tests.test_junitxml.ExpectedFail" name="test_me" time="0.000"/>
-</testsuite>
-"""
-        expected_old = """<testsuite errors="0" failures="1" name="" tests="1" time="0.000">
-<testcase classname="junitxml.tests.test_junitxml.ExpectedFail" name="test_me" time="0.000">
-<failure type="AssertionError">failure</failure>
-</testcase>
-</testsuite>
-"""
-        if expected_failure_support[0]:
-            self.assertEqual(expected, output)
-        else:
-            self.assertEqual(expected_old, output)
-
-
-class TestWellFormedXml(unittest.TestCase):
-    """XML created should always be well formed even with odd test cases"""
-
-    def _run_and_parse_test(self, case):
-        output = StringIO()
-        result = junitxml.JUnitXmlResult(output)
-        result.startTestRun()
-        case.run(result)
-        result.stopTestRun()
-        return xml.dom.minidom.parseString(output.getvalue())
-
-    def test_failure_with_amp(self):
-        """Check the failure element content is escaped"""
-        class FailWithAmp(unittest.TestCase):
-            def runTest(self):
-                self.fail("& should be escaped as &amp;")
-        doc = self._run_and_parse_test(FailWithAmp())
-        self.assertTrue(
-            doc.getElementsByTagName("failure")[0].firstChild.nodeValue
-                .endswith("AssertionError: & should be escaped as &amp;\n"))
-
-    def test_quotes_in_test_case_id(self):
-        """Check that quotes in an attribute are escaped"""
-        class QuoteId(unittest.TestCase):
-            def id(self):
-                return unittest.TestCase.id(self) + '("quotes")'
-            def runTest(self):
-                pass
-        doc = self._run_and_parse_test(QuoteId())
-        self.assertEqual('runTest("quotes")',
-            doc.getElementsByTagName("testcase")[0].getAttribute("name"))
-
-    def test_skip_reason(self):
-        """Check the skip element content is escaped"""
-        class SkipWithLt(unittest.TestCase):
-            def runTest(self):
-                self.fail("version < 2.7")
-            try:
-                runTest = unittest.skip("2.7 <= version")(runTest)
-            except AttributeError:
-                self.has_skip = False
-            else:
-                self.has_skip = True
-        doc = self._run_and_parse_test(SkipWithLt())
-        if self.has_skip:
-            self.assertEqual('2.7 <= version',
-                doc.getElementsByTagName("skip")[0].firstChild.nodeValue)
-        else:
-            self.assertTrue(
-                doc.getElementsByTagName("failure")[0].firstChild.nodeValue
-                    .endswith("AssertionError: version < 2.7\n"))
-
-    def test_error_with_control_characters(self):
-        """Check C0 control characters are stripped rather than output"""
-        class ErrorWithC0(unittest.TestCase):
-            def runTest(self):
-                raise ValueError("\x1F\x0E\x0C\x0B\x08\x01\x00lost control")
-        doc = self._run_and_parse_test(ErrorWithC0())
-        self.assertTrue(
-            doc.getElementsByTagName("error")[0].firstChild.nodeValue
-                .endswith("ValueError: lost control\n"))
-
-    def test_error_with_invalid_cdata(self):
-        """Check unicode outside the valid cdata range is stripped"""
-        if len("\uffff") == 1:
-            # Basic str type supports unicode
-            exception = ValueError("\ufffe\uffffEOF")
-        else:
-            class UTF8_Error(Exception):
-                def __unicode__(self):
-                    return str(self).decode("UTF-8")
-            exception = UTF8_Error("\xef\xbf\xbe\xef\xbf\xbfEOF")
-        class ErrorWithBadUnicode(unittest.TestCase):
-            def runTest(self):
-                raise exception
-        doc = self._run_and_parse_test(ErrorWithBadUnicode())
-        self.assertTrue(
-            doc.getElementsByTagName("error")[0].firstChild.nodeValue
-                .endswith("Error: EOF\n"))
-
-    def test_error_with_surrogates(self):
-        """Check unicode surrogates are handled properly, paired or otherwise
-
-        This is a pain due to suboptimal unicode support in Python and the
-        various changes in Python 3. On UCS-2 builds there is no easy way of
-        getting rid of unpaired surrogates while leaving valid pairs alone, so
-        this test doesn't require astral characters are kept there.
-        """
-        if len("\uffff") == 1:
-            exception = ValueError("paired: \U000201a2"
-                " unpaired: "+chr(0xD800)+"-"+chr(0xDFFF))
-            astral_char = "\U000201a2"
-        else:
-            class UTF8_Error(Exception):
-                def __unicode__(self):
-                    return str(self).decode("UTF-8")
-            exception = UTF8_Error("paired: \xf0\xa0\x86\xa2"
-                " unpaired: \xed\xa0\x80-\xed\xbf\xbf")
-            astral_char = "\U000201a2".decode("unicode-escape")
-        class ErrorWithSurrogates(unittest.TestCase):
-            def runTest(self):
-                raise exception
-        doc = self._run_and_parse_test(ErrorWithSurrogates())
-        traceback = doc.getElementsByTagName("error")[0].firstChild.nodeValue
-        if sys.maxunicode == 0xFFFF:
-            pass # would be nice to handle astral characters properly even so
-        else:
-            self.assertTrue(astral_char in traceback)
-        self.assertTrue(traceback.endswith(" unpaired: -\n"))

=== removed directory 'python-for-subunit2junitxml/subunit'
=== removed file 'python-for-subunit2junitxml/subunit/__init__.py'
--- python-for-subunit2junitxml/subunit/__init__.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/__init__.py	1970-01-01 00:00:00 +0000
@@ -1,1250 +0,0 @@
-#
-#  subunit: extensions to Python unittest to get test results from subprocesses.
-#  Copyright (C) 2005  Robert Collins <robertc@robertcollins.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-"""Subunit - a streaming test protocol
-
-Overview
-++++++++
-
-The ``subunit`` Python package provides a number of ``unittest`` extensions
-which can be used to cause tests to output Subunit, to parse Subunit streams
-into test activity, perform seamless test isolation within a regular test
-case and variously sort, filter and report on test runs.
-
-
-Key Classes
------------
-
-The ``subunit.TestProtocolClient`` class is a ``unittest.TestResult``
-extension which will translate a test run into a Subunit stream.
-
-The ``subunit.ProtocolTestCase`` class is an adapter between the Subunit wire
-protocol and the ``unittest.TestCase`` object protocol. It is used to translate
-a stream into a test run, which regular ``unittest.TestResult`` objects can
-process and report/inspect.
-
-Subunit has support for non-blocking usage too, for use with asyncore or
-Twisted. See the ``TestProtocolServer`` parser class for more details.
-
-Subunit includes extensions to the Python ``TestResult`` protocol. These are
-all done in a compatible manner: ``TestResult`` objects that do not implement
-the extension methods will not cause errors to be raised, instead the extension
-will either lose fidelity (for instance, folding expected failures to success
-in Python versions < 2.7 or 3.1), or discard the extended data (for extra
-details, tags, timestamping and progress markers).
-
-The test outcome methods ``addSuccess``, ``addError``, ``addExpectedFailure``,
-``addFailure``, ``addSkip`` take an optional keyword parameter ``details``
-which can be used instead of the usual python unittest parameter.
-When used the value of details should be a dict from ``string`` to
-``testtools.content.Content`` objects. This is a draft API being worked on with
-the Python Testing In Python mail list, with the goal of permitting a common
-way to provide additional data beyond a traceback, such as captured data from
-disk, logging messages etc. The reference for this API is in testtools (0.9.0
-and newer).
-
-The ``tags(new_tags, gone_tags)`` method is called (if present) to add or
-remove tags in the test run that is currently executing. If called when no
-test is in progress (that is, if called outside of the ``startTest``,
-``stopTest`` pair), the the tags apply to all sebsequent tests. If called
-when a test is in progress, then the tags only apply to that test.
-
-The ``time(a_datetime)`` method is called (if present) when a ``time:``
-directive is encountered in a Subunit stream. This is used to tell a TestResult
-about the time that events in the stream occured at, to allow reconstructing
-test timing from a stream.
-
-The ``progress(offset, whence)`` method controls progress data for a stream.
-The offset parameter is an int, and whence is one of subunit.PROGRESS_CUR,
-subunit.PROGRESS_SET, PROGRESS_PUSH, PROGRESS_POP. Push and pop operations
-ignore the offset parameter.
-
-
-Python test support
--------------------
-
-``subunit.run`` is a convenience wrapper to run a Python test suite via
-the command line, reporting via Subunit::
-
-  $ python -m subunit.run mylib.tests.test_suite
-
-The ``IsolatedTestSuite`` class is a TestSuite that forks before running its
-tests, allowing isolation between the test runner and some tests.
-
-Similarly, ``IsolatedTestCase`` is a base class which can be subclassed to get
-tests that will fork() before that individual test is run.
-
-`ExecTestCase`` is a convenience wrapper for running an external
-program to get a Subunit stream and then report that back to an arbitrary
-result object::
-
- class AggregateTests(subunit.ExecTestCase):
-
-     def test_script_one(self):
-         './bin/script_one'
-
-     def test_script_two(self):
-         './bin/script_two'
-
- # Normally your normal test loading would take of this automatically,
- # It is only spelt out in detail here for clarity.
- suite = unittest.TestSuite([AggregateTests("test_script_one"),
-     AggregateTests("test_script_two")])
- # Create any TestResult class you like.
- result = unittest._TextTestResult(sys.stdout)
- # And run your suite as normal, Subunit will exec each external script as
- # needed and report to your result object.
- suite.run(result)
-
-Utility modules
----------------
-
-* subunit.chunked contains HTTP chunked encoding/decoding logic.
-* subunit.test_results contains TestResult helper classes.
-"""
-
-import os
-import re
-import subprocess
-import sys
-import unittest
-
-from testtools import content, content_type, ExtendedToOriginalDecorator
-from testtools.compat import _b, _u, BytesIO, StringIO
-try:
-    from testtools.testresult.real import _StringException
-    RemoteException = _StringException
-    # For testing: different pythons have different str() implementations.
-    if sys.version_info > (3, 0):
-        _remote_exception_str = "testtools.testresult.real._StringException"
-        _remote_exception_str_chunked = "34\r\n" + _remote_exception_str
-    else:
-        _remote_exception_str = "_StringException" 
-        _remote_exception_str_chunked = "1A\r\n" + _remote_exception_str
-except ImportError:
-    raise ImportError ("testtools.testresult.real does not contain "
-        "_StringException, check your version.")
-from testtools import testresult
-
-from subunit import chunked, details, iso8601, test_results
-
-
-PROGRESS_SET = 0
-PROGRESS_CUR = 1
-PROGRESS_PUSH = 2
-PROGRESS_POP = 3
-
-
-def test_suite():
-    import subunit.tests
-    return subunit.tests.test_suite()
-
-
-def join_dir(base_path, path):
-    """
-    Returns an absolute path to C{path}, calculated relative to the parent
-    of C{base_path}.
-
-    @param base_path: A path to a file or directory.
-    @param path: An absolute path, or a path relative to the containing
-    directory of C{base_path}.
-
-    @return: An absolute path to C{path}.
-    """
-    return os.path.join(os.path.dirname(os.path.abspath(base_path)), path)
-
-
-def tags_to_new_gone(tags):
-    """Split a list of tags into a new_set and a gone_set."""
-    new_tags = set()
-    gone_tags = set()
-    for tag in tags:
-        if tag[0] == '-':
-            gone_tags.add(tag[1:])
-        else:
-            new_tags.add(tag)
-    return new_tags, gone_tags
-
-
-class DiscardStream(object):
-    """A filelike object which discards what is written to it."""
-
-    def write(self, bytes):
-        pass
-
-
-class _ParserState(object):
-    """State for the subunit parser."""
-
-    def __init__(self, parser):
-        self.parser = parser
-        self._test_sym = (_b('test'), _b('testing'))
-        self._colon_sym = _b(':')
-        self._error_sym = (_b('error'),)
-        self._failure_sym = (_b('failure'),)
-        self._progress_sym = (_b('progress'),)
-        self._skip_sym = _b('skip')
-        self._success_sym = (_b('success'), _b('successful'))
-        self._tags_sym = (_b('tags'),)
-        self._time_sym = (_b('time'),)
-        self._xfail_sym = (_b('xfail'),)
-        self._uxsuccess_sym = (_b('uxsuccess'),)
-        self._start_simple = _u(" [")
-        self._start_multipart = _u(" [ multipart")
-
-    def addError(self, offset, line):
-        """An 'error:' directive has been read."""
-        self.parser.stdOutLineReceived(line)
-
-    def addExpectedFail(self, offset, line):
-        """An 'xfail:' directive has been read."""
-        self.parser.stdOutLineReceived(line)
-
-    def addFailure(self, offset, line):
-        """A 'failure:' directive has been read."""
-        self.parser.stdOutLineReceived(line)
-
-    def addSkip(self, offset, line):
-        """A 'skip:' directive has been read."""
-        self.parser.stdOutLineReceived(line)
-
-    def addSuccess(self, offset, line):
-        """A 'success:' directive has been read."""
-        self.parser.stdOutLineReceived(line)
-
-    def lineReceived(self, line):
-        """a line has been received."""
-        parts = line.split(None, 1)
-        if len(parts) == 2 and line.startswith(parts[0]):
-            cmd, rest = parts
-            offset = len(cmd) + 1
-            cmd = cmd.rstrip(self._colon_sym)
-            if cmd in self._test_sym:
-                self.startTest(offset, line)
-            elif cmd in self._error_sym:
-                self.addError(offset, line)
-            elif cmd in self._failure_sym:
-                self.addFailure(offset, line)
-            elif cmd in self._progress_sym:
-                self.parser._handleProgress(offset, line)
-            elif cmd in self._skip_sym:
-                self.addSkip(offset, line)
-            elif cmd in self._success_sym:
-                self.addSuccess(offset, line)
-            elif cmd in self._tags_sym:
-                self.parser._handleTags(offset, line)
-                self.parser.subunitLineReceived(line)
-            elif cmd in self._time_sym:
-                self.parser._handleTime(offset, line)
-                self.parser.subunitLineReceived(line)
-            elif cmd in self._xfail_sym:
-                self.addExpectedFail(offset, line)
-            elif cmd in self._uxsuccess_sym:
-                self.addUnexpectedSuccess(offset, line)
-            else:
-                self.parser.stdOutLineReceived(line)
-        else:
-            self.parser.stdOutLineReceived(line)
-
-    def lostConnection(self):
-        """Connection lost."""
-        self.parser._lostConnectionInTest(_u('unknown state of '))
-
-    def startTest(self, offset, line):
-        """A test start command received."""
-        self.parser.stdOutLineReceived(line)
-
-
-class _InTest(_ParserState):
-    """State for the subunit parser after reading a test: directive."""
-
-    def _outcome(self, offset, line, no_details, details_state):
-        """An outcome directive has been read.
-
-        :param no_details: Callable to call when no details are presented.
-        :param details_state: The state to switch to for details
-            processing of this outcome.
-        """
-        test_name = line[offset:-1].decode('utf8')
-        if self.parser.current_test_description == test_name:
-            self.parser._state = self.parser._outside_test
-            self.parser.current_test_description = None
-            no_details()
-            self.parser.client.stopTest(self.parser._current_test)
-            self.parser._current_test = None
-            self.parser.subunitLineReceived(line)
-        elif self.parser.current_test_description + self._start_simple == \
-            test_name:
-            self.parser._state = details_state
-            details_state.set_simple()
-            self.parser.subunitLineReceived(line)
-        elif self.parser.current_test_description + self._start_multipart == \
-            test_name:
-            self.parser._state = details_state
-            details_state.set_multipart()
-            self.parser.subunitLineReceived(line)
-        else:
-            self.parser.stdOutLineReceived(line)
-
-    def _error(self):
-        self.parser.client.addError(self.parser._current_test,
-            details={})
-
-    def addError(self, offset, line):
-        """An 'error:' directive has been read."""
-        self._outcome(offset, line, self._error,
-            self.parser._reading_error_details)
-
-    def _xfail(self):
-        self.parser.client.addExpectedFailure(self.parser._current_test,
-            details={})
-
-    def addExpectedFail(self, offset, line):
-        """An 'xfail:' directive has been read."""
-        self._outcome(offset, line, self._xfail,
-            self.parser._reading_xfail_details)
-
-    def _uxsuccess(self):
-        self.parser.client.addUnexpectedSuccess(self.parser._current_test)
-
-    def addUnexpectedSuccess(self, offset, line):
-        """A 'uxsuccess:' directive has been read."""
-        self._outcome(offset, line, self._uxsuccess,
-            self.parser._reading_uxsuccess_details)
-
-    def _failure(self):
-        self.parser.client.addFailure(self.parser._current_test, details={})
-
-    def addFailure(self, offset, line):
-        """A 'failure:' directive has been read."""
-        self._outcome(offset, line, self._failure,
-            self.parser._reading_failure_details)
-
-    def _skip(self):
-        self.parser.client.addSkip(self.parser._current_test, details={})
-
-    def addSkip(self, offset, line):
-        """A 'skip:' directive has been read."""
-        self._outcome(offset, line, self._skip,
-            self.parser._reading_skip_details)
-
-    def _succeed(self):
-        self.parser.client.addSuccess(self.parser._current_test, details={})
-
-    def addSuccess(self, offset, line):
-        """A 'success:' directive has been read."""
-        self._outcome(offset, line, self._succeed,
-            self.parser._reading_success_details)
-
-    def lostConnection(self):
-        """Connection lost."""
-        self.parser._lostConnectionInTest(_u(''))
-
-
-class _OutSideTest(_ParserState):
-    """State for the subunit parser outside of a test context."""
-
-    def lostConnection(self):
-        """Connection lost."""
-
-    def startTest(self, offset, line):
-        """A test start command received."""
-        self.parser._state = self.parser._in_test
-        test_name = line[offset:-1].decode('utf8')
-        self.parser._current_test = RemotedTestCase(test_name)
-        self.parser.current_test_description = test_name
-        self.parser.client.startTest(self.parser._current_test)
-        self.parser.subunitLineReceived(line)
-
-
-class _ReadingDetails(_ParserState):
-    """Common logic for readin state details."""
-
-    def endDetails(self):
-        """The end of a details section has been reached."""
-        self.parser._state = self.parser._outside_test
-        self.parser.current_test_description = None
-        self._report_outcome()
-        self.parser.client.stopTest(self.parser._current_test)
-
-    def lineReceived(self, line):
-        """a line has been received."""
-        self.details_parser.lineReceived(line)
-        self.parser.subunitLineReceived(line)
-
-    def lostConnection(self):
-        """Connection lost."""
-        self.parser._lostConnectionInTest(_u('%s report of ') %
-            self._outcome_label())
-
-    def _outcome_label(self):
-        """The label to describe this outcome."""
-        raise NotImplementedError(self._outcome_label)
-
-    def set_simple(self):
-        """Start a simple details parser."""
-        self.details_parser = details.SimpleDetailsParser(self)
-
-    def set_multipart(self):
-        """Start a multipart details parser."""
-        self.details_parser = details.MultipartDetailsParser(self)
-
-
-class _ReadingFailureDetails(_ReadingDetails):
-    """State for the subunit parser when reading failure details."""
-
-    def _report_outcome(self):
-        self.parser.client.addFailure(self.parser._current_test,
-            details=self.details_parser.get_details())
-
-    def _outcome_label(self):
-        return "failure"
-
-
-class _ReadingErrorDetails(_ReadingDetails):
-    """State for the subunit parser when reading error details."""
-
-    def _report_outcome(self):
-        self.parser.client.addError(self.parser._current_test,
-            details=self.details_parser.get_details())
-
-    def _outcome_label(self):
-        return "error"
-
-
-class _ReadingExpectedFailureDetails(_ReadingDetails):
-    """State for the subunit parser when reading xfail details."""
-
-    def _report_outcome(self):
-        self.parser.client.addExpectedFailure(self.parser._current_test,
-            details=self.details_parser.get_details())
-
-    def _outcome_label(self):
-        return "xfail"
-
-
-class _ReadingUnexpectedSuccessDetails(_ReadingDetails):
-    """State for the subunit parser when reading uxsuccess details."""
-
-    def _report_outcome(self):
-        self.parser.client.addUnexpectedSuccess(self.parser._current_test,
-            details=self.details_parser.get_details())
-
-    def _outcome_label(self):
-        return "uxsuccess"
-
-
-class _ReadingSkipDetails(_ReadingDetails):
-    """State for the subunit parser when reading skip details."""
-
-    def _report_outcome(self):
-        self.parser.client.addSkip(self.parser._current_test,
-            details=self.details_parser.get_details("skip"))
-
-    def _outcome_label(self):
-        return "skip"
-
-
-class _ReadingSuccessDetails(_ReadingDetails):
-    """State for the subunit parser when reading success details."""
-
-    def _report_outcome(self):
-        self.parser.client.addSuccess(self.parser._current_test,
-            details=self.details_parser.get_details("success"))
-
-    def _outcome_label(self):
-        return "success"
-
-
-class TestProtocolServer(object):
-    """A parser for subunit.
-
-    :ivar tags: The current tags associated with the protocol stream.
-    """
-
-    def __init__(self, client, stream=None, forward_stream=None):
-        """Create a TestProtocolServer instance.
-
-        :param client: An object meeting the unittest.TestResult protocol.
-        :param stream: The stream that lines received which are not part of the
-            subunit protocol should be written to. This allows custom handling
-            of mixed protocols. By default, sys.stdout will be used for
-            convenience. It should accept bytes to its write() method.
-        :param forward_stream: A stream to forward subunit lines to. This
-            allows a filter to forward the entire stream while still parsing
-            and acting on it. By default forward_stream is set to
-            DiscardStream() and no forwarding happens.
-        """
-        self.client = ExtendedToOriginalDecorator(client)
-        if stream is None:
-            stream = sys.stdout
-            if sys.version_info > (3, 0):
-                stream = stream.buffer
-        self._stream = stream
-        self._forward_stream = forward_stream or DiscardStream()
-        # state objects we can switch too
-        self._in_test = _InTest(self)
-        self._outside_test = _OutSideTest(self)
-        self._reading_error_details = _ReadingErrorDetails(self)
-        self._reading_failure_details = _ReadingFailureDetails(self)
-        self._reading_skip_details = _ReadingSkipDetails(self)
-        self._reading_success_details = _ReadingSuccessDetails(self)
-        self._reading_xfail_details = _ReadingExpectedFailureDetails(self)
-        self._reading_uxsuccess_details = _ReadingUnexpectedSuccessDetails(self)
-        # start with outside test.
-        self._state = self._outside_test
-        # Avoid casts on every call
-        self._plusminus = _b('+-')
-        self._push_sym = _b('push')
-        self._pop_sym = _b('pop')
-
-    def _handleProgress(self, offset, line):
-        """Process a progress directive."""
-        line = line[offset:].strip()
-        if line[0] in self._plusminus:
-            whence = PROGRESS_CUR
-            delta = int(line)
-        elif line == self._push_sym:
-            whence = PROGRESS_PUSH
-            delta = None
-        elif line == self._pop_sym:
-            whence = PROGRESS_POP
-            delta = None
-        else:
-            whence = PROGRESS_SET
-            delta = int(line)
-        self.client.progress(delta, whence)
-
-    def _handleTags(self, offset, line):
-        """Process a tags command."""
-        tags = line[offset:].decode('utf8').split()
-        new_tags, gone_tags = tags_to_new_gone(tags)
-        self.client.tags(new_tags, gone_tags)
-
-    def _handleTime(self, offset, line):
-        # Accept it, but do not do anything with it yet.
-        try:
-            event_time = iso8601.parse_date(line[offset:-1])
-        except TypeError:
-            raise TypeError(_u("Failed to parse %r, got %r")
-                % (line, sys.exec_info[1]))
-        self.client.time(event_time)
-
-    def lineReceived(self, line):
-        """Call the appropriate local method for the received line."""
-        self._state.lineReceived(line)
-
-    def _lostConnectionInTest(self, state_string):
-        error_string = _u("lost connection during %stest '%s'") % (
-            state_string, self.current_test_description)
-        self.client.addError(self._current_test, RemoteError(error_string))
-        self.client.stopTest(self._current_test)
-
-    def lostConnection(self):
-        """The input connection has finished."""
-        self._state.lostConnection()
-
-    def readFrom(self, pipe):
-        """Blocking convenience API to parse an entire stream.
-
-        :param pipe: A file-like object supporting readlines().
-        :return: None.
-        """
-        for line in pipe.readlines():
-            self.lineReceived(line)
-        self.lostConnection()
-
-    def _startTest(self, offset, line):
-        """Internal call to change state machine. Override startTest()."""
-        self._state.startTest(offset, line)
-
-    def subunitLineReceived(self, line):
-        self._forward_stream.write(line)
-
-    def stdOutLineReceived(self, line):
-        self._stream.write(line)
-
-
-class TestProtocolClient(testresult.TestResult):
-    """A TestResult which generates a subunit stream for a test run.
-
-    # Get a TestSuite or TestCase to run
-    suite = make_suite()
-    # Create a stream (any object with a 'write' method). This should accept
-    # bytes not strings: subunit is a byte orientated protocol.
-    stream = file('tests.log', 'wb')
-    # Create a subunit result object which will output to the stream
-    result = subunit.TestProtocolClient(stream)
-    # Optionally, to get timing data for performance analysis, wrap the
-    # serialiser with a timing decorator
-    result = subunit.test_results.AutoTimingTestResultDecorator(result)
-    # Run the test suite reporting to the subunit result object
-    suite.run(result)
-    # Close the stream.
-    stream.close()
-    """
-
-    def __init__(self, stream):
-        testresult.TestResult.__init__(self)
-        self._stream = stream
-        _make_stream_binary(stream)
-        self._progress_fmt = _b("progress: ")
-        self._bytes_eol = _b("\n")
-        self._progress_plus = _b("+")
-        self._progress_push = _b("push")
-        self._progress_pop = _b("pop")
-        self._empty_bytes = _b("")
-        self._start_simple = _b(" [\n")
-        self._end_simple = _b("]\n")
-
-    def addError(self, test, error=None, details=None):
-        """Report an error in test test.
-
-        Only one of error and details should be provided: conceptually there
-        are two separate methods:
-            addError(self, test, error)
-            addError(self, test, details)
-
-        :param error: Standard unittest positional argument form - an
-            exc_info tuple.
-        :param details: New Testing-in-python drafted API; a dict from string
-            to subunit.Content objects.
-        """
-        self._addOutcome("error", test, error=error, details=details)
-
-    def addExpectedFailure(self, test, error=None, details=None):
-        """Report an expected failure in test test.
-
-        Only one of error and details should be provided: conceptually there
-        are two separate methods:
-            addError(self, test, error)
-            addError(self, test, details)
-
-        :param error: Standard unittest positional argument form - an
-            exc_info tuple.
-        :param details: New Testing-in-python drafted API; a dict from string
-            to subunit.Content objects.
-        """
-        self._addOutcome("xfail", test, error=error, details=details)
-
-    def addFailure(self, test, error=None, details=None):
-        """Report a failure in test test.
-
-        Only one of error and details should be provided: conceptually there
-        are two separate methods:
-            addFailure(self, test, error)
-            addFailure(self, test, details)
-
-        :param error: Standard unittest positional argument form - an
-            exc_info tuple.
-        :param details: New Testing-in-python drafted API; a dict from string
-            to subunit.Content objects.
-        """
-        self._addOutcome("failure", test, error=error, details=details)
-
-    def _addOutcome(self, outcome, test, error=None, details=None,
-        error_permitted=True):
-        """Report a failure in test test.
-
-        Only one of error and details should be provided: conceptually there
-        are two separate methods:
-            addOutcome(self, test, error)
-            addOutcome(self, test, details)
-
-        :param outcome: A string describing the outcome - used as the
-            event name in the subunit stream.
-        :param error: Standard unittest positional argument form - an
-            exc_info tuple.
-        :param details: New Testing-in-python drafted API; a dict from string
-            to subunit.Content objects.
-        :param error_permitted: If True then one and only one of error or
-            details must be supplied. If False then error must not be supplied
-            and details is still optional.  """
-        self._stream.write(_b("%s: %s" % (outcome, test.id())))
-        if error_permitted:
-            if error is None and details is None:
-                raise ValueError
-        else:
-            if error is not None:
-                raise ValueError
-        if error is not None:
-            self._stream.write(self._start_simple)
-            # XXX: this needs to be made much stricter, along the lines of
-            # Martin[gz]'s work in testtools. Perhaps subunit can use that?
-            for line in self._exc_info_to_unicode(error, test).splitlines():
-                self._stream.write(("%s\n" % line).encode('utf8'))
-        elif details is not None:
-            self._write_details(details)
-        else:
-            self._stream.write(_b("\n"))
-        if details is not None or error is not None:
-            self._stream.write(self._end_simple)
-
-    def addSkip(self, test, reason=None, details=None):
-        """Report a skipped test."""
-        if reason is None:
-            self._addOutcome("skip", test, error=None, details=details)
-        else:
-            self._stream.write(_b("skip: %s [\n" % test.id()))
-            self._stream.write(_b("%s\n" % reason))
-            self._stream.write(self._end_simple)
-
-    def addSuccess(self, test, details=None):
-        """Report a success in a test."""
-        self._addOutcome("successful", test, details=details, error_permitted=False)
-
-    def addUnexpectedSuccess(self, test, details=None):
-        """Report an unexpected success in test test.
-
-        Details can optionally be provided: conceptually there
-        are two separate methods:
-            addError(self, test)
-            addError(self, test, details)
-
-        :param details: New Testing-in-python drafted API; a dict from string
-            to subunit.Content objects.
-        """
-        self._addOutcome("uxsuccess", test, details=details,
-            error_permitted=False)
-
-    def startTest(self, test):
-        """Mark a test as starting its test run."""
-        super(TestProtocolClient, self).startTest(test)
-        self._stream.write(_b("test: %s\n" % test.id()))
-        self._stream.flush()
-
-    def stopTest(self, test):
-        super(TestProtocolClient, self).stopTest(test)
-        self._stream.flush()
-
-    def progress(self, offset, whence):
-        """Provide indication about the progress/length of the test run.
-
-        :param offset: Information about the number of tests remaining. If
-            whence is PROGRESS_CUR, then offset increases/decreases the
-            remaining test count. If whence is PROGRESS_SET, then offset
-            specifies exactly the remaining test count.
-        :param whence: One of PROGRESS_CUR, PROGRESS_SET, PROGRESS_PUSH,
-            PROGRESS_POP.
-        """
-        if whence == PROGRESS_CUR and offset > -1:
-            prefix = self._progress_plus
-            offset = _b(str(offset))
-        elif whence == PROGRESS_PUSH:
-            prefix = self._empty_bytes
-            offset = self._progress_push
-        elif whence == PROGRESS_POP:
-            prefix = self._empty_bytes
-            offset = self._progress_pop
-        else:
-            prefix = self._empty_bytes
-            offset = _b(str(offset))
-        self._stream.write(self._progress_fmt + prefix + offset +
-            self._bytes_eol)
-
-    def time(self, a_datetime):
-        """Inform the client of the time.
-
-        ":param datetime: A datetime.datetime object.
-        """
-        time = a_datetime.astimezone(iso8601.Utc())
-        self._stream.write(_b("time: %04d-%02d-%02d %02d:%02d:%02d.%06dZ\n" % (
-            time.year, time.month, time.day, time.hour, time.minute,
-            time.second, time.microsecond)))
-
-    def _write_details(self, details):
-        """Output details to the stream.
-
-        :param details: An extended details dict for a test outcome.
-        """
-        self._stream.write(_b(" [ multipart\n"))
-        for name, content in sorted(details.items()):
-            self._stream.write(_b("Content-Type: %s/%s" %
-                (content.content_type.type, content.content_type.subtype)))
-            parameters = content.content_type.parameters
-            if parameters:
-                self._stream.write(_b(";"))
-                param_strs = []
-                for param, value in parameters.items():
-                    param_strs.append("%s=%s" % (param, value))
-                self._stream.write(_b(",".join(param_strs)))
-            self._stream.write(_b("\n%s\n" % name))
-            encoder = chunked.Encoder(self._stream)
-            list(map(encoder.write, content.iter_bytes()))
-            encoder.close()
-
-    def done(self):
-        """Obey the testtools result.done() interface."""
-
-
-def RemoteError(description=_u("")):
-    return (_StringException, _StringException(description), None)
-
-
-class RemotedTestCase(unittest.TestCase):
-    """A class to represent test cases run in child processes.
-
-    Instances of this class are used to provide the Python test API a TestCase
-    that can be printed to the screen, introspected for metadata and so on.
-    However, as they are a simply a memoisation of a test that was actually
-    run in the past by a separate process, they cannot perform any interactive
-    actions.
-    """
-
-    def __eq__ (self, other):
-        try:
-            return self.__description == other.__description
-        except AttributeError:
-            return False
-
-    def __init__(self, description):
-        """Create a psuedo test case with description description."""
-        self.__description = description
-
-    def error(self, label):
-        raise NotImplementedError("%s on RemotedTestCases is not permitted." %
-            label)
-
-    def setUp(self):
-        self.error("setUp")
-
-    def tearDown(self):
-        self.error("tearDown")
-
-    def shortDescription(self):
-        return self.__description
-
-    def id(self):
-        return "%s" % (self.__description,)
-
-    def __str__(self):
-        return "%s (%s)" % (self.__description, self._strclass())
-
-    def __repr__(self):
-        return "<%s description='%s'>" % \
-               (self._strclass(), self.__description)
-
-    def run(self, result=None):
-        if result is None: result = self.defaultTestResult()
-        result.startTest(self)
-        result.addError(self, RemoteError(_u("Cannot run RemotedTestCases.\n")))
-        result.stopTest(self)
-
-    def _strclass(self):
-        cls = self.__class__
-        return "%s.%s" % (cls.__module__, cls.__name__)
-
-
-class ExecTestCase(unittest.TestCase):
-    """A test case which runs external scripts for test fixtures."""
-
-    def __init__(self, methodName='runTest'):
-        """Create an instance of the class that will use the named test
-           method when executed. Raises a ValueError if the instance does
-           not have a method with the specified name.
-        """
-        unittest.TestCase.__init__(self, methodName)
-        testMethod = getattr(self, methodName)
-        self.script = join_dir(sys.modules[self.__class__.__module__].__file__,
-                               testMethod.__doc__)
-
-    def countTestCases(self):
-        return 1
-
-    def run(self, result=None):
-        if result is None: result = self.defaultTestResult()
-        self._run(result)
-
-    def debug(self):
-        """Run the test without collecting errors in a TestResult"""
-        self._run(testresult.TestResult())
-
-    def _run(self, result):
-        protocol = TestProtocolServer(result)
-        process = subprocess.Popen(self.script, shell=True,
-            stdout=subprocess.PIPE)
-        _make_stream_binary(process.stdout)
-        output = process.communicate()[0]
-        protocol.readFrom(BytesIO(output))
-
-
-class IsolatedTestCase(unittest.TestCase):
-    """A TestCase which executes in a forked process.
-
-    Each test gets its own process, which has a performance overhead but will
-    provide excellent isolation from global state (such as django configs,
-    zope utilities and so on).
-    """
-
-    def run(self, result=None):
-        if result is None: result = self.defaultTestResult()
-        run_isolated(unittest.TestCase, self, result)
-
-
-class IsolatedTestSuite(unittest.TestSuite):
-    """A TestSuite which runs its tests in a forked process.
-
-    This decorator that will fork() before running the tests and report the
-    results from the child process using a Subunit stream.  This is useful for
-    handling tests that mutate global state, or are testing C extensions that
-    could crash the VM.
-    """
-
-    def run(self, result=None):
-        if result is None: result = testresult.TestResult()
-        run_isolated(unittest.TestSuite, self, result)
-
-
-def run_isolated(klass, self, result):
-    """Run a test suite or case in a subprocess, using the run method on klass.
-    """
-    c2pread, c2pwrite = os.pipe()
-    # fixme - error -> result
-    # now fork
-    pid = os.fork()
-    if pid == 0:
-        # Child
-        # Close parent's pipe ends
-        os.close(c2pread)
-        # Dup fds for child
-        os.dup2(c2pwrite, 1)
-        # Close pipe fds.
-        os.close(c2pwrite)
-
-        # at this point, sys.stdin is redirected, now we want
-        # to filter it to escape ]'s.
-        ### XXX: test and write that bit.
-        stream = os.fdopen(1, 'wb')
-        result = TestProtocolClient(stream)
-        klass.run(self, result)
-        stream.flush()
-        sys.stderr.flush()
-        # exit HARD, exit NOW.
-        os._exit(0)
-    else:
-        # Parent
-        # Close child pipe ends
-        os.close(c2pwrite)
-        # hookup a protocol engine
-        protocol = TestProtocolServer(result)
-        fileobj = os.fdopen(c2pread, 'rb')
-        protocol.readFrom(fileobj)
-        os.waitpid(pid, 0)
-        # TODO return code evaluation.
-    return result
-
-
-def TAP2SubUnit(tap, subunit):
-    """Filter a TAP pipe into a subunit pipe.
-
-    :param tap: A tap pipe/stream/file object.
-    :param subunit: A pipe/stream/file object to write subunit results to.
-    :return: The exit code to exit with.
-    """
-    BEFORE_PLAN = 0
-    AFTER_PLAN = 1
-    SKIP_STREAM = 2
-    state = BEFORE_PLAN
-    plan_start = 1
-    plan_stop = 0
-    def _skipped_test(subunit, plan_start):
-        # Some tests were skipped.
-        subunit.write('test test %d\n' % plan_start)
-        subunit.write('error test %d [\n' % plan_start)
-        subunit.write('test missing from TAP output\n')
-        subunit.write(']\n')
-        return plan_start + 1
-    # Test data for the next test to emit
-    test_name = None
-    log = []
-    result = None
-    def _emit_test():
-        "write out a test"
-        if test_name is None:
-            return
-        subunit.write("test %s\n" % test_name)
-        if not log:
-            subunit.write("%s %s\n" % (result, test_name))
-        else:
-            subunit.write("%s %s [\n" % (result, test_name))
-        if log:
-            for line in log:
-                subunit.write("%s\n" % line)
-            subunit.write("]\n")
-        del log[:]
-    for line in tap:
-        if state == BEFORE_PLAN:
-            match = re.match("(\d+)\.\.(\d+)\s*(?:\#\s+(.*))?\n", line)
-            if match:
-                state = AFTER_PLAN
-                _, plan_stop, comment = match.groups()
-                plan_stop = int(plan_stop)
-                if plan_start > plan_stop and plan_stop == 0:
-                    # skipped file
-                    state = SKIP_STREAM
-                    subunit.write("test file skip\n")
-                    subunit.write("skip file skip [\n")
-                    subunit.write("%s\n" % comment)
-                    subunit.write("]\n")
-                continue
-        # not a plan line, or have seen one before
-        match = re.match("(ok|not ok)(?:\s+(\d+)?)?(?:\s+([^#]*[^#\s]+)\s*)?(?:\s+#\s+(TODO|SKIP|skip|todo)(?:\s+(.*))?)?\n", line)
-        if match:
-            # new test, emit current one.
-            _emit_test()
-            status, number, description, directive, directive_comment = match.groups()
-            if status == 'ok':
-                result = 'success'
-            else:
-                result = "failure"
-            if description is None:
-                description = ''
-            else:
-                description = ' ' + description
-            if directive is not None:
-                if directive.upper() == 'TODO':
-                    result = 'xfail'
-                elif directive.upper() == 'SKIP':
-                    result = 'skip'
-                if directive_comment is not None:
-                    log.append(directive_comment)
-            if number is not None:
-                number = int(number)
-                while plan_start < number:
-                    plan_start = _skipped_test(subunit, plan_start)
-            test_name = "test %d%s" % (plan_start, description)
-            plan_start += 1
-            continue
-        match = re.match("Bail out\!(?:\s*(.*))?\n", line)
-        if match:
-            reason, = match.groups()
-            if reason is None:
-                extra = ''
-            else:
-                extra = ' %s' % reason
-            _emit_test()
-            test_name = "Bail out!%s" % extra
-            result = "error"
-            state = SKIP_STREAM
-            continue
-        match = re.match("\#.*\n", line)
-        if match:
-            log.append(line[:-1])
-            continue
-        subunit.write(line)
-    _emit_test()
-    while plan_start <= plan_stop:
-        # record missed tests
-        plan_start = _skipped_test(subunit, plan_start)
-    return 0
-
-
-def tag_stream(original, filtered, tags):
-    """Alter tags on a stream.
-
-    :param original: The input stream.
-    :param filtered: The output stream.
-    :param tags: The tags to apply. As in a normal stream - a list of 'TAG' or
-        '-TAG' commands.
-
-        A 'TAG' command will add the tag to the output stream,
-        and override any existing '-TAG' command in that stream.
-        Specifically:
-         * A global 'tags: TAG' will be added to the start of the stream.
-         * Any tags commands with -TAG will have the -TAG removed.
-
-        A '-TAG' command will remove the TAG command from the stream.
-        Specifically:
-         * A 'tags: -TAG' command will be added to the start of the stream.
-         * Any 'tags: TAG' command will have 'TAG' removed from it.
-        Additionally, any redundant tagging commands (adding a tag globally
-        present, or removing a tag globally removed) are stripped as a
-        by-product of the filtering.
-    :return: 0
-    """
-    new_tags, gone_tags = tags_to_new_gone(tags)
-    def write_tags(new_tags, gone_tags):
-        if new_tags or gone_tags:
-            filtered.write("tags: " + ' '.join(new_tags))
-            if gone_tags:
-                for tag in gone_tags:
-                    filtered.write("-" + tag)
-            filtered.write("\n")
-    write_tags(new_tags, gone_tags)
-    # TODO: use the protocol parser and thus don't mangle test comments.
-    for line in original:
-        if line.startswith("tags:"):
-            line_tags = line[5:].split()
-            line_new, line_gone = tags_to_new_gone(line_tags)
-            line_new = line_new - gone_tags
-            line_gone = line_gone - new_tags
-            write_tags(line_new, line_gone)
-        else:
-            filtered.write(line)
-    return 0
-
-
-class ProtocolTestCase(object):
-    """Subunit wire protocol to unittest.TestCase adapter.
-
-    ProtocolTestCase honours the core of ``unittest.TestCase`` protocol -
-    calling a ProtocolTestCase or invoking the run() method will make a 'test
-    run' happen. The 'test run' will simply be a replay of the test activity
-    that has been encoded into the stream. The ``unittest.TestCase`` ``debug``
-    and ``countTestCases`` methods are not supported because there isn't a
-    sensible mapping for those methods.
-
-    # Get a stream (any object with a readline() method), in this case the
-    # stream output by the example from ``subunit.TestProtocolClient``.
-    stream = file('tests.log', 'rb')
-    # Create a parser which will read from the stream and emit
-    # activity to a unittest.TestResult when run() is called.
-    suite = subunit.ProtocolTestCase(stream)
-    # Create a result object to accept the contents of that stream.
-    result = unittest._TextTestResult(sys.stdout)
-    # 'run' the tests - process the stream and feed its contents to result.
-    suite.run(result)
-    stream.close()
-
-    :seealso: TestProtocolServer (the subunit wire protocol parser).
-    """
-
-    def __init__(self, stream, passthrough=None, forward=False):
-        """Create a ProtocolTestCase reading from stream.
-
-        :param stream: A filelike object which a subunit stream can be read
-            from.
-        :param passthrough: A stream pass non subunit input on to. If not
-            supplied, the TestProtocolServer default is used.
-        :param forward: A stream to pass subunit input on to. If not supplied
-            subunit input is not forwarded.
-        """
-        self._stream = stream
-        _make_stream_binary(stream)
-        self._passthrough = passthrough
-        self._forward = forward
-
-    def __call__(self, result=None):
-        return self.run(result)
-
-    def run(self, result=None):
-        if result is None:
-            result = self.defaultTestResult()
-        protocol = TestProtocolServer(result, self._passthrough, self._forward)
-        line = self._stream.readline()
-        while line:
-            protocol.lineReceived(line)
-            line = self._stream.readline()
-        protocol.lostConnection()
-
-
-class TestResultStats(testresult.TestResult):
-    """A pyunit TestResult interface implementation for making statistics.
-
-    :ivar total_tests: The total tests seen.
-    :ivar passed_tests: The tests that passed.
-    :ivar failed_tests: The tests that failed.
-    :ivar seen_tags: The tags seen across all tests.
-    """
-
-    def __init__(self, stream):
-        """Create a TestResultStats which outputs to stream."""
-        testresult.TestResult.__init__(self)
-        self._stream = stream
-        self.failed_tests = 0
-        self.skipped_tests = 0
-        self.seen_tags = set()
-
-    @property
-    def total_tests(self):
-        return self.testsRun
-
-    def addError(self, test, err, details=None):
-        self.failed_tests += 1
-
-    def addFailure(self, test, err, details=None):
-        self.failed_tests += 1
-
-    def addSkip(self, test, reason, details=None):
-        self.skipped_tests += 1
-
-    def formatStats(self):
-        self._stream.write("Total tests:   %5d\n" % self.total_tests)
-        self._stream.write("Passed tests:  %5d\n" % self.passed_tests)
-        self._stream.write("Failed tests:  %5d\n" % self.failed_tests)
-        self._stream.write("Skipped tests: %5d\n" % self.skipped_tests)
-        tags = sorted(self.seen_tags)
-        self._stream.write("Seen tags: %s\n" % (", ".join(tags)))
-
-    @property
-    def passed_tests(self):
-        return self.total_tests - self.failed_tests - self.skipped_tests
-
-    def tags(self, new_tags, gone_tags):
-        """Accumulate the seen tags."""
-        self.seen_tags.update(new_tags)
-
-    def wasSuccessful(self):
-        """Tells whether or not this result was a success"""
-        return self.failed_tests == 0
-
-
-def get_default_formatter():
-    """Obtain the default formatter to write to.
-
-    :return: A file-like object.
-    """
-    formatter = os.getenv("SUBUNIT_FORMATTER")
-    if formatter:
-        return os.popen(formatter, "w")
-    else:
-        stream = sys.stdout
-        if sys.version_info > (3, 0):
-            stream = stream.buffer
-        return stream
-
-
-if sys.version_info > (3, 0):
-    from io import UnsupportedOperation as _NoFilenoError
-else:
-    _NoFilenoError = AttributeError
-
-def read_test_list(path):
-    """Read a list of test ids from a file on disk.
-
-    :param path: Path to the file
-    :return: Sequence of test ids
-    """
-    f = open(path, 'rb')
-    try:
-        return [l.rstrip("\n") for l in f.readlines()]
-    finally:
-        f.close()
-
-
-def _make_stream_binary(stream):
-    """Ensure that a stream will be binary safe. See _make_binary_on_windows."""
-    try:
-        fileno = stream.fileno()
-    except _NoFilenoError:
-        return
-    _make_binary_on_windows(fileno)
-
-def _make_binary_on_windows(fileno):
-    """Win32 mangles \r\n to \n and that breaks streams. See bug lp:505078."""
-    if sys.platform == "win32":
-        import msvcrt
-        msvcrt.setmode(fileno, os.O_BINARY)

=== removed file 'python-for-subunit2junitxml/subunit/chunked.py'
--- python-for-subunit2junitxml/subunit/chunked.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/chunked.py	1970-01-01 00:00:00 +0000
@@ -1,185 +0,0 @@
-#
-#  subunit: extensions to python unittest to get test results from subprocesses.
-#  Copyright (C) 2005  Robert Collins <robertc@robertcollins.net>
-#  Copyright (C) 2011  Martin Pool <mbp@sourcefrog.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-"""Encoder/decoder for http style chunked encoding."""
-
-from testtools.compat import _b
-
-empty = _b('')
-
-class Decoder(object):
-    """Decode chunked content to a byte stream."""
-
-    def __init__(self, output, strict=True):
-        """Create a decoder decoding to output.
-
-        :param output: A file-like object. Bytes written to the Decoder are
-            decoded to strip off the chunking and written to the output.
-            Up to a full write worth of data or a single control line may be
-            buffered (whichever is larger). The close method should be called
-            when no more data is available, to detect short streams; the
-            write method will return none-None when the end of a stream is
-            detected. The output object must accept bytes objects.
-
-        :param strict: If True (the default), the decoder will not knowingly
-            accept input that is not conformant to the HTTP specification.
-            (This does not imply that it will catch every nonconformance.)
-            If False, it will accept incorrect input that is still
-            unambiguous.
-        """
-        self.output = output
-        self.buffered_bytes = []
-        self.state = self._read_length
-        self.body_length = 0
-        self.strict = strict
-        self._match_chars = _b("0123456789abcdefABCDEF\r\n")
-        self._slash_n = _b('\n')
-        self._slash_r = _b('\r')
-        self._slash_rn = _b('\r\n')
-        self._slash_nr = _b('\n\r')
-
-    def close(self):
-        """Close the decoder.
-
-        :raises ValueError: If the stream is incomplete ValueError is raised.
-        """
-        if self.state != self._finished:
-            raise ValueError("incomplete stream")
-
-    def _finished(self):
-        """Finished reading, return any remaining bytes."""
-        if self.buffered_bytes:
-            buffered_bytes = self.buffered_bytes
-            self.buffered_bytes = []
-            return empty.join(buffered_bytes)
-        else:
-            raise ValueError("stream is finished")
-
-    def _read_body(self):
-        """Pass body bytes to the output."""
-        while self.body_length and self.buffered_bytes:
-            if self.body_length >= len(self.buffered_bytes[0]):
-                self.output.write(self.buffered_bytes[0])
-                self.body_length -= len(self.buffered_bytes[0])
-                del self.buffered_bytes[0]
-                # No more data available.
-                if not self.body_length:
-                    self.state = self._read_length
-            else:
-                self.output.write(self.buffered_bytes[0][:self.body_length])
-                self.buffered_bytes[0] = \
-                    self.buffered_bytes[0][self.body_length:]
-                self.body_length = 0
-                self.state = self._read_length
-                return self.state()
-
-    def _read_length(self):
-        """Try to decode a length from the bytes."""
-        count_chars = []
-        for bytes in self.buffered_bytes:
-            for pos in range(len(bytes)):
-                byte = bytes[pos:pos+1]
-                if byte not in self._match_chars:
-                    break
-                count_chars.append(byte)
-                if byte == self._slash_n:
-                    break
-        if not count_chars:
-            return
-        if count_chars[-1] != self._slash_n:
-            return
-        count_str = empty.join(count_chars)
-        if self.strict:
-            if count_str[-2:] != self._slash_rn:
-                raise ValueError("chunk header invalid: %r" % count_str)
-            if self._slash_r in count_str[:-2]:
-                raise ValueError("too many CRs in chunk header %r" % count_str)
-        self.body_length = int(count_str.rstrip(self._slash_nr), 16)
-        excess_bytes = len(count_str)
-        while excess_bytes:
-            if excess_bytes >= len(self.buffered_bytes[0]):
-                excess_bytes -= len(self.buffered_bytes[0])
-                del self.buffered_bytes[0]
-            else:
-                self.buffered_bytes[0] = self.buffered_bytes[0][excess_bytes:]
-                excess_bytes = 0
-        if not self.body_length:
-            self.state = self._finished
-            if not self.buffered_bytes:
-                # May not call into self._finished with no buffered data.
-                return empty
-        else:
-            self.state = self._read_body
-        return self.state()
-
-    def write(self, bytes):
-        """Decode bytes to the output stream.
-
-        :raises ValueError: If the stream has already seen the end of file
-            marker.
-        :returns: None, or the excess bytes beyond the end of file marker.
-        """
-        if bytes:
-            self.buffered_bytes.append(bytes)
-        return self.state()
-
-
-class Encoder(object):
-    """Encode content to a stream using HTTP Chunked coding."""
-
-    def __init__(self, output):
-        """Create an encoder encoding to output.
-
-        :param output: A file-like object. Bytes written to the Encoder
-            will be encoded using HTTP chunking. Small writes may be buffered
-            and the ``close`` method must be called to finish the stream.
-        """
-        self.output = output
-        self.buffered_bytes = []
-        self.buffer_size = 0
-
-    def flush(self, extra_len=0):
-        """Flush the encoder to the output stream.
-
-        :param extra_len: Increase the size of the chunk by this many bytes
-            to allow for a subsequent write.
-        """
-        if not self.buffer_size and not extra_len:
-            return
-        buffered_bytes = self.buffered_bytes
-        buffer_size = self.buffer_size
-        self.buffered_bytes = []
-        self.buffer_size = 0
-        self.output.write(_b("%X\r\n" % (buffer_size + extra_len)))
-        if buffer_size:
-            self.output.write(empty.join(buffered_bytes))
-        return True
-
-    def write(self, bytes):
-        """Encode bytes to the output stream."""
-        bytes_len = len(bytes)
-        if self.buffer_size + bytes_len >= 65536:
-            self.flush(bytes_len)
-            self.output.write(bytes)
-        else:
-            self.buffered_bytes.append(bytes)
-            self.buffer_size += bytes_len
-
-    def close(self):
-        """Finish the stream. This does not close the output stream."""
-        self.flush()
-        self.output.write(_b("0\r\n"))

=== removed file 'python-for-subunit2junitxml/subunit/details.py'
--- python-for-subunit2junitxml/subunit/details.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/details.py	1970-01-01 00:00:00 +0000
@@ -1,119 +0,0 @@
-#
-#  subunit: extensions to Python unittest to get test results from subprocesses.
-#  Copyright (C) 2005  Robert Collins <robertc@robertcollins.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#  
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-"""Handlers for outcome details."""
-
-from testtools import content, content_type
-from testtools.compat import _b, BytesIO
-
-from subunit import chunked
-
-end_marker = _b("]\n")
-quoted_marker = _b(" ]")
-empty = _b('')
-
-
-class DetailsParser(object):
-    """Base class/API reference for details parsing."""
-
-
-class SimpleDetailsParser(DetailsParser):
-    """Parser for single-part [] delimited details."""
-
-    def __init__(self, state):
-        self._message = _b("")
-        self._state = state
-
-    def lineReceived(self, line):
-        if line == end_marker:
-            self._state.endDetails()
-            return
-        if line[0:2] == quoted_marker:
-            # quoted ] start
-            self._message += line[1:]
-        else:
-            self._message += line
-
-    def get_details(self, style=None):
-        result = {}
-        if not style:
-            # We know that subunit/testtools serialise [] formatted
-            # tracebacks as utf8, but perhaps we need a ReplacingContent
-            # or something like that.
-            result['traceback'] = content.Content(
-                content_type.ContentType("text", "x-traceback",
-                {"charset": "utf8"}),
-                lambda:[self._message])
-        else:
-            if style == 'skip':
-                name = 'reason'
-            else:
-                name = 'message'
-            result[name] = content.Content(
-                content_type.ContentType("text", "plain"),
-                lambda:[self._message])
-        return result
-
-    def get_message(self):
-        return self._message
-
-
-class MultipartDetailsParser(DetailsParser):
-    """Parser for multi-part [] surrounded MIME typed chunked details."""
-
-    def __init__(self, state):
-        self._state = state
-        self._details = {}
-        self._parse_state = self._look_for_content
-
-    def _look_for_content(self, line):
-        if line == end_marker:
-            self._state.endDetails()
-            return
-        # TODO error handling
-        field, value = line[:-1].decode('utf8').split(' ', 1)
-        try:
-            main, sub = value.split('/')
-        except ValueError:
-            raise ValueError("Invalid MIME type %r" % value)
-        self._content_type = content_type.ContentType(main, sub)
-        self._parse_state = self._get_name
-
-    def _get_name(self, line):
-        self._name = line[:-1].decode('utf8')
-        self._body = BytesIO()
-        self._chunk_parser = chunked.Decoder(self._body)
-        self._parse_state = self._feed_chunks
-
-    def _feed_chunks(self, line):
-        residue = self._chunk_parser.write(line)
-        if residue is not None:
-            # Line based use always ends on no residue.
-            assert residue == empty, 'residue: %r' % (residue,)
-            body = self._body
-            self._details[self._name] = content.Content(
-                self._content_type, lambda:[body.getvalue()])
-            self._chunk_parser.close()
-            self._parse_state = self._look_for_content
-
-    def get_details(self, for_skip=False):
-        return self._details
-
-    def get_message(self):
-        return None
-
-    def lineReceived(self, line):
-        self._parse_state(line)

=== removed file 'python-for-subunit2junitxml/subunit/iso8601.py'
--- python-for-subunit2junitxml/subunit/iso8601.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/iso8601.py	1970-01-01 00:00:00 +0000
@@ -1,133 +0,0 @@
-# Copyright (c) 2007 Michael Twomey
-# 
-# Permission is hereby granted, free of charge, to any person obtaining a
-# copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-# 
-# The above copyright notice and this permission notice shall be included
-# in all copies or substantial portions of the Software.
-# 
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""ISO 8601 date time string parsing
-
-Basic usage:
->>> import iso8601
->>> iso8601.parse_date("2007-01-25T12:00:00Z")
-datetime.datetime(2007, 1, 25, 12, 0, tzinfo=<iso8601.iso8601.Utc ...>)
->>>
-
-"""
-
-from datetime import datetime, timedelta, tzinfo
-import re
-import sys
-
-__all__ = ["parse_date", "ParseError"]
-
-# Adapted from http://delete.me.uk/2005/03/iso8601.html
-ISO8601_REGEX_PATTERN = (r"(?P<year>[0-9]{4})(-(?P<month>[0-9]{1,2})(-(?P<day>[0-9]{1,2})"
-    r"((?P<separator>.)(?P<hour>[0-9]{2}):(?P<minute>[0-9]{2})(:(?P<second>[0-9]{2})(\.(?P<fraction>[0-9]+))?)?"
-    r"(?P<timezone>Z|(([-+])([0-9]{2}):([0-9]{2})))?)?)?)?"
-)
-TIMEZONE_REGEX_PATTERN = "(?P<prefix>[+-])(?P<hours>[0-9]{2}).(?P<minutes>[0-9]{2})"
-ISO8601_REGEX = re.compile(ISO8601_REGEX_PATTERN.encode('utf8'))
-TIMEZONE_REGEX = re.compile(TIMEZONE_REGEX_PATTERN.encode('utf8'))
-
-zulu = "Z".encode('latin-1')
-minus = "-".encode('latin-1')
-
-if sys.version_info < (3, 0):
-    bytes = str
-
-
-class ParseError(Exception):
-    """Raised when there is a problem parsing a date string"""
-
-# Yoinked from python docs
-ZERO = timedelta(0)
-class Utc(tzinfo):
-    """UTC
-    
-    """
-    def utcoffset(self, dt):
-        return ZERO
-
-    def tzname(self, dt):
-        return "UTC"
-
-    def dst(self, dt):
-        return ZERO
-UTC = Utc()
-
-class FixedOffset(tzinfo):
-    """Fixed offset in hours and minutes from UTC
-    
-    """
-    def __init__(self, offset_hours, offset_minutes, name):
-        self.__offset = timedelta(hours=offset_hours, minutes=offset_minutes)
-        self.__name = name
-
-    def utcoffset(self, dt):
-        return self.__offset
-
-    def tzname(self, dt):
-        return self.__name
-
-    def dst(self, dt):
-        return ZERO
-    
-    def __repr__(self):
-        return "<FixedOffset %r>" % self.__name
-
-def parse_timezone(tzstring, default_timezone=UTC):
-    """Parses ISO 8601 time zone specs into tzinfo offsets
-    
-    """
-    if tzstring == zulu:
-        return default_timezone
-    # This isn't strictly correct, but it's common to encounter dates without
-    # timezones so I'll assume the default (which defaults to UTC).
-    # Addresses issue 4.
-    if tzstring is None:
-        return default_timezone
-    m = TIMEZONE_REGEX.match(tzstring)
-    prefix, hours, minutes = m.groups()
-    hours, minutes = int(hours), int(minutes)
-    if prefix == minus:
-        hours = -hours
-        minutes = -minutes
-    return FixedOffset(hours, minutes, tzstring)
-
-def parse_date(datestring, default_timezone=UTC):
-    """Parses ISO 8601 dates into datetime objects
-    
-    The timezone is parsed from the date string. However it is quite common to
-    have dates without a timezone (not strictly correct). In this case the
-    default timezone specified in default_timezone is used. This is UTC by
-    default.
-    """
-    if not isinstance(datestring, bytes):
-        raise ParseError("Expecting bytes %r" % datestring)
-    m = ISO8601_REGEX.match(datestring)
-    if not m:
-        raise ParseError("Unable to parse date string %r" % datestring)
-    groups = m.groupdict()
-    tz = parse_timezone(groups["timezone"], default_timezone=default_timezone)
-    if groups["fraction"] is None:
-        groups["fraction"] = 0
-    else:
-        groups["fraction"] = int(float("0.%s" % groups["fraction"]) * 1e6)
-    return datetime(int(groups["year"]), int(groups["month"]), int(groups["day"]),
-        int(groups["hour"]), int(groups["minute"]), int(groups["second"]),
-        int(groups["fraction"]), tz)

=== removed file 'python-for-subunit2junitxml/subunit/progress_model.py'
--- python-for-subunit2junitxml/subunit/progress_model.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/progress_model.py	1970-01-01 00:00:00 +0000
@@ -1,106 +0,0 @@
-#
-#  subunit: extensions to Python unittest to get test results from subprocesses.
-#  Copyright (C) 2009  Robert Collins <robertc@robertcollins.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#  
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-"""Support for dealing with progress state."""
-
-class ProgressModel(object):
-    """A model of progress indicators as subunit defines it.
-    
-    Instances of this class represent a single logical operation that is
-    progressing. The operation may have many steps, and some of those steps may
-    supply their own progress information. ProgressModel uses a nested concept
-    where the overall state can be pushed, creating new starting state, and
-    later pushed to return to the prior state. Many user interfaces will want
-    to display an overall summary though, and accordingly the pos() and width()
-    methods return overall summary information rather than information on the
-    current subtask.
-
-    The default state is 0/0 - indicating that the overall progress is unknown.
-    Anytime the denominator of pos/width is 0, rendering of a ProgressModel
-    should should take this into consideration.
-
-    :ivar: _tasks. This private attribute stores the subtasks. Each is a tuple:
-        pos, width, overall_numerator, overall_denominator. The overall fields
-        store the calculated overall numerator and denominator for the state
-        that was pushed.
-    """
-
-    def __init__(self):
-        """Create a ProgressModel.
-        
-        The new model has no progress data at all - it will claim a summary
-        width of zero and position of 0.
-        """
-        self._tasks = []
-        self.push()
-
-    def adjust_width(self, offset):
-        """Adjust the with of the current subtask."""
-        self._tasks[-1][1] += offset
-
-    def advance(self):
-        """Advance the current subtask."""
-        self._tasks[-1][0] += 1
-
-    def pop(self):
-        """Pop a subtask off the ProgressModel.
-
-        See push for a description of how push and pop work.
-        """
-        self._tasks.pop()
-
-    def pos(self):
-        """Return how far through the operation has progressed."""
-        if not self._tasks:
-            return 0
-        task = self._tasks[-1]
-        if len(self._tasks) > 1:
-            # scale up the overall pos by the current task or preserve it if
-            # no current width is known.
-            offset = task[2] * (task[1] or 1)
-        else:
-            offset = 0
-        return offset + task[0]
-
-    def push(self):
-        """Push a new subtask.
-
-        After pushing a new subtask, the overall progress hasn't changed. Calls
-        to adjust_width, advance, set_width will only after the progress within
-        the range that calling 'advance' would have before - the subtask
-        represents progressing one step in the earlier task.
-
-        Call pop() to restore the progress model to the state before push was
-        called.
-        """
-        self._tasks.append([0, 0, self.pos(), self.width()])
-
-    def set_width(self, width):
-        """Set the width of the current subtask."""
-        self._tasks[-1][1] = width
-
-    def width(self):
-        """Return the total width of the operation."""
-        if not self._tasks:
-            return 0
-        task = self._tasks[-1]
-        if len(self._tasks) > 1:
-            # scale up the overall width by the current task or preserve it if
-            # no current width is known.
-            return task[3] * (task[1] or 1)
-        else:
-            return task[1]
-

=== removed file 'python-for-subunit2junitxml/subunit/run.py'
--- python-for-subunit2junitxml/subunit/run.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/run.py	1970-01-01 00:00:00 +0000
@@ -1,73 +0,0 @@
-#!/usr/bin/python
-#
-# Simple subunit testrunner for python
-# Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2007
-#   
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#  
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-"""Run a unittest testcase reporting results as Subunit.
-
-  $ python -m subunit.run mylib.tests.test_suite
-"""
-
-import sys
-
-from subunit import TestProtocolClient, get_default_formatter
-from testtools.run import (
-    BUFFEROUTPUT,
-    CATCHBREAK,
-    FAILFAST,
-    TestProgram,
-    USAGE_AS_MAIN,
-    )
-
-
-class SubunitTestRunner(object):
-    def __init__(self, stream=sys.stdout):
-        self.stream = stream
-
-    def run(self, test):
-        "Run the given test case or test suite."
-        result = TestProtocolClient(self.stream)
-        test(result)
-        return result
-
-
-class SubunitTestProgram(TestProgram):
-
-    USAGE = USAGE_AS_MAIN
-
-    def usageExit(self, msg=None):
-        if msg:
-            print msg
-        usage = {'progName': self.progName, 'catchbreak': '', 'failfast': '',
-                 'buffer': ''}
-        if self.failfast != False:
-            usage['failfast'] = FAILFAST
-        if self.catchbreak != False:
-            usage['catchbreak'] = CATCHBREAK
-        if self.buffer != False:
-            usage['buffer'] = BUFFEROUTPUT
-        usage_text = self.USAGE % usage
-        usage_lines = usage_text.split('\n')
-        usage_lines.insert(2, "Run a test suite with a subunit reporter.")
-        usage_lines.insert(3, "")
-        print('\n'.join(usage_lines))
-        sys.exit(2)
-
-
-if __name__ == '__main__':
-    stream = get_default_formatter()
-    runner = SubunitTestRunner(stream)
-    SubunitTestProgram(module=None, argv=sys.argv, testRunner=runner,
-        stdout=sys.stdout)

=== removed file 'python-for-subunit2junitxml/subunit/test_results.py'
--- python-for-subunit2junitxml/subunit/test_results.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/test_results.py	1970-01-01 00:00:00 +0000
@@ -1,492 +0,0 @@
-#
-#  subunit: extensions to Python unittest to get test results from subprocesses.
-#  Copyright (C) 2009  Robert Collins <robertc@robertcollins.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-"""TestResult helper classes used to by subunit."""
-
-import datetime
-
-import testtools
-
-from subunit import iso8601
-
-
-# NOT a TestResult, because we are implementing the interface, not inheriting
-# it.
-class TestResultDecorator(object):
-    """General pass-through decorator.
-
-    This provides a base that other TestResults can inherit from to
-    gain basic forwarding functionality. It also takes care of
-    handling the case where the target doesn't support newer methods
-    or features by degrading them.
-    """
-
-    def __init__(self, decorated):
-        """Create a TestResultDecorator forwarding to decorated."""
-        # Make every decorator degrade gracefully.
-        self.decorated = testtools.ExtendedToOriginalDecorator(decorated)
-
-    def startTest(self, test):
-        return self.decorated.startTest(test)
-
-    def startTestRun(self):
-        return self.decorated.startTestRun()
-
-    def stopTest(self, test):
-        return self.decorated.stopTest(test)
-
-    def stopTestRun(self):
-        return self.decorated.stopTestRun()
-
-    def addError(self, test, err=None, details=None):
-        return self.decorated.addError(test, err, details=details)
-
-    def addFailure(self, test, err=None, details=None):
-        return self.decorated.addFailure(test, err, details=details)
-
-    def addSuccess(self, test, details=None):
-        return self.decorated.addSuccess(test, details=details)
-
-    def addSkip(self, test, reason=None, details=None):
-        return self.decorated.addSkip(test, reason, details=details)
-
-    def addExpectedFailure(self, test, err=None, details=None):
-        return self.decorated.addExpectedFailure(test, err, details=details)
-
-    def addUnexpectedSuccess(self, test, details=None):
-        return self.decorated.addUnexpectedSuccess(test, details=details)
-
-    def progress(self, offset, whence):
-        return self.decorated.progress(offset, whence)
-
-    def wasSuccessful(self):
-        return self.decorated.wasSuccessful()
-
-    @property
-    def shouldStop(self):
-        return self.decorated.shouldStop
-
-    def stop(self):
-        return self.decorated.stop()
-
-    @property
-    def testsRun(self):
-        return self.decorated.testsRun
-
-    def tags(self, new_tags, gone_tags):
-        return self.decorated.tags(new_tags, gone_tags)
-
-    def time(self, a_datetime):
-        return self.decorated.time(a_datetime)
-
-
-class HookedTestResultDecorator(TestResultDecorator):
-    """A TestResult which calls a hook on every event."""
-
-    def __init__(self, decorated):
-        self.super = super(HookedTestResultDecorator, self)
-        self.super.__init__(decorated)
-
-    def startTest(self, test):
-        self._before_event()
-        return self.super.startTest(test)
-
-    def startTestRun(self):
-        self._before_event()
-        return self.super.startTestRun()
-
-    def stopTest(self, test):
-        self._before_event()
-        return self.super.stopTest(test)
-
-    def stopTestRun(self):
-        self._before_event()
-        return self.super.stopTestRun()
-
-    def addError(self, test, err=None, details=None):
-        self._before_event()
-        return self.super.addError(test, err, details=details)
-
-    def addFailure(self, test, err=None, details=None):
-        self._before_event()
-        return self.super.addFailure(test, err, details=details)
-
-    def addSuccess(self, test, details=None):
-        self._before_event()
-        return self.super.addSuccess(test, details=details)
-
-    def addSkip(self, test, reason=None, details=None):
-        self._before_event()
-        return self.super.addSkip(test, reason, details=details)
-
-    def addExpectedFailure(self, test, err=None, details=None):
-        self._before_event()
-        return self.super.addExpectedFailure(test, err, details=details)
-
-    def addUnexpectedSuccess(self, test, details=None):
-        self._before_event()
-        return self.super.addUnexpectedSuccess(test, details=details)
-
-    def progress(self, offset, whence):
-        self._before_event()
-        return self.super.progress(offset, whence)
-
-    def wasSuccessful(self):
-        self._before_event()
-        return self.super.wasSuccessful()
-
-    @property
-    def shouldStop(self):
-        self._before_event()
-        return self.super.shouldStop
-
-    def stop(self):
-        self._before_event()
-        return self.super.stop()
-
-    def time(self, a_datetime):
-        self._before_event()
-        return self.super.time(a_datetime)
-
-
-class AutoTimingTestResultDecorator(HookedTestResultDecorator):
-    """Decorate a TestResult to add time events to a test run.
-
-    By default this will cause a time event before every test event,
-    but if explicit time data is being provided by the test run, then
-    this decorator will turn itself off to prevent causing confusion.
-    """
-
-    def __init__(self, decorated):
-        self._time = None
-        super(AutoTimingTestResultDecorator, self).__init__(decorated)
-
-    def _before_event(self):
-        time = self._time
-        if time is not None:
-            return
-        time = datetime.datetime.utcnow().replace(tzinfo=iso8601.Utc())
-        self.decorated.time(time)
-
-    def progress(self, offset, whence):
-        return self.decorated.progress(offset, whence)
-
-    @property
-    def shouldStop(self):
-        return self.decorated.shouldStop
-
-    def time(self, a_datetime):
-        """Provide a timestamp for the current test activity.
-
-        :param a_datetime: If None, automatically add timestamps before every
-            event (this is the default behaviour if time() is not called at
-            all).  If not None, pass the provided time onto the decorated
-            result object and disable automatic timestamps.
-        """
-        self._time = a_datetime
-        return self.decorated.time(a_datetime)
-
-
-class TagCollapsingDecorator(TestResultDecorator):
-    """Collapses many 'tags' calls into one where possible."""
-
-    def __init__(self, result):
-        super(TagCollapsingDecorator, self).__init__(result)
-        # The (new, gone) tags for the current test.
-        self._current_test_tags = None
-
-    def startTest(self, test):
-        """Start a test.
-
-        Not directly passed to the client, but used for handling of tags
-        correctly.
-        """
-        self.decorated.startTest(test)
-        self._current_test_tags = set(), set()
-
-    def stopTest(self, test):
-        """Stop a test.
-
-        Not directly passed to the client, but used for handling of tags
-        correctly.
-        """
-        # Tags to output for this test.
-        if self._current_test_tags[0] or self._current_test_tags[1]:
-            self.decorated.tags(*self._current_test_tags)
-        self.decorated.stopTest(test)
-        self._current_test_tags = None
-
-    def tags(self, new_tags, gone_tags):
-        """Handle tag instructions.
-
-        Adds and removes tags as appropriate. If a test is currently running,
-        tags are not affected for subsequent tests.
-
-        :param new_tags: Tags to add,
-        :param gone_tags: Tags to remove.
-        """
-        if self._current_test_tags is not None:
-            # gather the tags until the test stops.
-            self._current_test_tags[0].update(new_tags)
-            self._current_test_tags[0].difference_update(gone_tags)
-            self._current_test_tags[1].update(gone_tags)
-            self._current_test_tags[1].difference_update(new_tags)
-        else:
-            return self.decorated.tags(new_tags, gone_tags)
-
-
-class TimeCollapsingDecorator(HookedTestResultDecorator):
-    """Only pass on the first and last of a consecutive sequence of times."""
-
-    def __init__(self, decorated):
-        super(TimeCollapsingDecorator, self).__init__(decorated)
-        self._last_received_time = None
-        self._last_sent_time = None
-
-    def _before_event(self):
-        if self._last_received_time is None:
-            return
-        if self._last_received_time != self._last_sent_time:
-            self.decorated.time(self._last_received_time)
-            self._last_sent_time = self._last_received_time
-        self._last_received_time = None
-
-    def time(self, a_time):
-        # Don't upcall, because we don't want to call _before_event, it's only
-        # for non-time events.
-        if self._last_received_time is None:
-            self.decorated.time(a_time)
-            self._last_sent_time = a_time
-        self._last_received_time = a_time
-
-
-def all_true(bools):
-    """Return True if all of 'bools' are True. False otherwise."""
-    for b in bools:
-        if not b:
-            return False
-    return True
-
-
-class TestResultFilter(TestResultDecorator):
-    """A pyunit TestResult interface implementation which filters tests.
-
-    Tests that pass the filter are handed on to another TestResult instance
-    for further processing/reporting. To obtain the filtered results,
-    the other instance must be interrogated.
-
-    :ivar result: The result that tests are passed to after filtering.
-    :ivar filter_predicate: The callback run to decide whether to pass
-        a result.
-    """
-
-    def __init__(self, result, filter_error=False, filter_failure=False,
-        filter_success=True, filter_skip=False,
-        filter_predicate=None, fixup_expected_failures=None):
-        """Create a FilterResult object filtering to result.
-
-        :param filter_error: Filter out errors.
-        :param filter_failure: Filter out failures.
-        :param filter_success: Filter out successful tests.
-        :param filter_skip: Filter out skipped tests.
-        :param filter_predicate: A callable taking (test, outcome, err,
-            details) and returning True if the result should be passed
-            through.  err and details may be none if no error or extra
-            metadata is available. outcome is the name of the outcome such
-            as 'success' or 'failure'.
-        :param fixup_expected_failures: Set of test ids to consider known
-            failing.
-        """
-        super(TestResultFilter, self).__init__(result)
-        self.decorated = TimeCollapsingDecorator(
-            TagCollapsingDecorator(self.decorated))
-        predicates = []
-        if filter_error:
-            predicates.append(lambda t, outcome, e, d: outcome != 'error')
-        if filter_failure:
-            predicates.append(lambda t, outcome, e, d: outcome != 'failure')
-        if filter_success:
-            predicates.append(lambda t, outcome, e, d: outcome != 'success')
-        if filter_skip:
-            predicates.append(lambda t, outcome, e, d: outcome != 'skip')
-        if filter_predicate is not None:
-            predicates.append(filter_predicate)
-        self.filter_predicate = (
-            lambda test, outcome, err, details:
-                all_true(p(test, outcome, err, details) for p in predicates))
-        # The current test (for filtering tags)
-        self._current_test = None
-        # Has the current test been filtered (for outputting test tags)
-        self._current_test_filtered = None
-        # Calls to this result that we don't know whether to forward on yet.
-        self._buffered_calls = []
-        if fixup_expected_failures is None:
-            self._fixup_expected_failures = frozenset()
-        else:
-            self._fixup_expected_failures = fixup_expected_failures
-
-    def addError(self, test, err=None, details=None):
-        if (self.filter_predicate(test, 'error', err, details)):
-            if self._failure_expected(test):
-                self._buffered_calls.append(
-                    ('addExpectedFailure', [test, err], {'details': details}))
-            else:
-                self._buffered_calls.append(
-                    ('addError', [test, err], {'details': details}))
-        else:
-            self._filtered()
-
-    def addFailure(self, test, err=None, details=None):
-        if (self.filter_predicate(test, 'failure', err, details)):
-            if self._failure_expected(test):
-                self._buffered_calls.append(
-                    ('addExpectedFailure', [test, err], {'details': details}))
-            else:
-                self._buffered_calls.append(
-                    ('addFailure', [test, err], {'details': details}))
-        else:
-            self._filtered()
-
-    def addSkip(self, test, reason=None, details=None):
-        if (self.filter_predicate(test, 'skip', reason, details)):
-            self._buffered_calls.append(
-                ('addSkip', [test, reason], {'details': details}))
-        else:
-            self._filtered()
-
-    def addSuccess(self, test, details=None):
-        if (self.filter_predicate(test, 'success', None, details)):
-            if self._failure_expected(test):
-                self._buffered_calls.append(
-                    ('addUnexpectedSuccess', [test], {'details': details}))
-            else:
-                self._buffered_calls.append(
-                    ('addSuccess', [test], {'details': details}))
-        else:
-            self._filtered()
-
-    def addExpectedFailure(self, test, err=None, details=None):
-        if self.filter_predicate(test, 'expectedfailure', err, details):
-            self._buffered_calls.append(
-                ('addExpectedFailure', [test, err], {'details': details}))
-        else:
-            self._filtered()
-
-    def addUnexpectedSuccess(self, test, details=None):
-        self._buffered_calls.append(
-            ('addUnexpectedSuccess', [test], {'details': details}))
-
-    def _filtered(self):
-        self._current_test_filtered = True
-
-    def _failure_expected(self, test):
-        return (test.id() in self._fixup_expected_failures)
-
-    def startTest(self, test):
-        """Start a test.
-
-        Not directly passed to the client, but used for handling of tags
-        correctly.
-        """
-        self._current_test = test
-        self._current_test_filtered = False
-        self._buffered_calls.append(('startTest', [test], {}))
-
-    def stopTest(self, test):
-        """Stop a test.
-
-        Not directly passed to the client, but used for handling of tags
-        correctly.
-        """
-        if not self._current_test_filtered:
-            # Tags to output for this test.
-            for method, args, kwargs in self._buffered_calls:
-                getattr(self.decorated, method)(*args, **kwargs)
-            self.decorated.stopTest(test)
-        self._current_test = None
-        self._current_test_filtered = None
-        self._buffered_calls = []
-
-    def time(self, a_time):
-        if self._current_test is not None:
-            self._buffered_calls.append(('time', [a_time], {}))
-        else:
-            return self.decorated.time(a_time)
-
-    def id_to_orig_id(self, id):
-        if id.startswith("subunit.RemotedTestCase."):
-            return id[len("subunit.RemotedTestCase."):]
-        return id
-
-
-class TestIdPrintingResult(testtools.TestResult):
-
-    def __init__(self, stream, show_times=False):
-        """Create a FilterResult object outputting to stream."""
-        super(TestIdPrintingResult, self).__init__()
-        self._stream = stream
-        self.failed_tests = 0
-        self.__time = None
-        self.show_times = show_times
-        self._test = None
-        self._test_duration = 0
-
-    def addError(self, test, err):
-        self.failed_tests += 1
-        self._test = test
-
-    def addFailure(self, test, err):
-        self.failed_tests += 1
-        self._test = test
-
-    def addSuccess(self, test):
-        self._test = test
-
-    def addSkip(self, test, reason=None, details=None):
-        self._test = test
-
-    def addUnexpectedSuccess(self, test, details=None):
-        self.failed_tests += 1
-        self._test = test
-
-    def addExpectedFailure(self, test, err=None, details=None):
-        self._test = test
-
-    def reportTest(self, test, duration):
-        if self.show_times:
-            seconds = duration.seconds
-            seconds += duration.days * 3600 * 24
-            seconds += duration.microseconds / 1000000.0
-            self._stream.write(test.id() + ' %0.3f\n' % seconds)
-        else:
-            self._stream.write(test.id() + '\n')
-
-    def startTest(self, test):
-        self._start_time = self._time()
-
-    def stopTest(self, test):
-        test_duration = self._time() - self._start_time
-        self.reportTest(self._test, test_duration)
-
-    def time(self, time):
-        self.__time = time
-
-    def _time(self):
-        return self.__time
-
-    def wasSuccessful(self):
-        "Tells whether or not this result was a success"
-        return self.failed_tests == 0

=== removed directory 'python-for-subunit2junitxml/subunit/tests'
=== removed file 'python-for-subunit2junitxml/subunit/tests/TestUtil.py'
--- python-for-subunit2junitxml/subunit/tests/TestUtil.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/tests/TestUtil.py	1970-01-01 00:00:00 +0000
@@ -1,80 +0,0 @@
-# Copyright (c) 2004 Canonical Limited
-#       Author: Robert Collins <robert.collins@canonical.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
-#
-
-import sys
-import logging
-import unittest
-
-
-class LogCollector(logging.Handler):
-    def __init__(self):
-        logging.Handler.__init__(self)
-        self.records=[]
-    def emit(self, record):
-        self.records.append(record.getMessage())
-
-
-def makeCollectingLogger():
-    """I make a logger instance that collects its logs for programmatic analysis
-    -> (logger, collector)"""
-    logger=logging.Logger("collector")
-    handler=LogCollector()
-    handler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
-    logger.addHandler(handler)
-    return logger, handler
-
-
-def visitTests(suite, visitor):
-    """A foreign method for visiting the tests in a test suite."""
-    for test in suite._tests:
-        #Abusing types to avoid monkey patching unittest.TestCase.
-        # Maybe that would be better?
-        try:
-            test.visit(visitor)
-        except AttributeError:
-            if isinstance(test, unittest.TestCase):
-                visitor.visitCase(test)
-            elif isinstance(test, unittest.TestSuite):
-                visitor.visitSuite(test)
-                visitTests(test, visitor)
-            else:
-                print ("unvisitable non-unittest.TestCase element %r (%r)" % (test, test.__class__))
-
-
-class TestSuite(unittest.TestSuite):
-    """I am an extended TestSuite with a visitor interface.
-    This is primarily to allow filtering of tests - and suites or
-    more in the future. An iterator of just tests wouldn't scale..."""
-
-    def visit(self, visitor):
-        """visit the composite. Visiting is depth-first.
-        current callbacks are visitSuite and visitCase."""
-        visitor.visitSuite(self)
-        visitTests(self, visitor)
-
-
-class TestLoader(unittest.TestLoader):
-    """Custome TestLoader to set the right TestSuite class."""
-    suiteClass = TestSuite
-
-class TestVisitor(object):
-    """A visitor for Tests"""
-    def visitSuite(self, aTestSuite):
-        pass
-    def visitCase(self, aTestCase):
-        pass

=== removed file 'python-for-subunit2junitxml/subunit/tests/__init__.py'
--- python-for-subunit2junitxml/subunit/tests/__init__.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/tests/__init__.py	1970-01-01 00:00:00 +0000
@@ -1,41 +0,0 @@
-#
-#  subunit: extensions to python unittest to get test results from subprocesses.
-#  Copyright (C) 2005  Robert Collins <robertc@robertcollins.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#  
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-from subunit.tests import (
-    TestUtil,
-    test_chunked,
-    test_details,
-    test_progress_model,
-    test_subunit_filter,
-    test_subunit_stats,
-    test_subunit_tags,
-    test_tap2subunit,
-    test_test_protocol,
-    test_test_results,
-    )
-
-def test_suite():
-    result = TestUtil.TestSuite()
-    result.addTest(test_chunked.test_suite())
-    result.addTest(test_details.test_suite())
-    result.addTest(test_progress_model.test_suite())
-    result.addTest(test_test_results.test_suite())
-    result.addTest(test_test_protocol.test_suite())
-    result.addTest(test_tap2subunit.test_suite())
-    result.addTest(test_subunit_filter.test_suite())
-    result.addTest(test_subunit_tags.test_suite())
-    result.addTest(test_subunit_stats.test_suite())
-    return result

=== removed file 'python-for-subunit2junitxml/subunit/tests/sample-script.py'
--- python-for-subunit2junitxml/subunit/tests/sample-script.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/tests/sample-script.py	1970-01-01 00:00:00 +0000
@@ -1,21 +0,0 @@
-#!/usr/bin/env python
-import sys
-if sys.platform == "win32":
-    import msvcrt, os
-    msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
-if len(sys.argv) == 2:
-    # subunit.tests.test_test_protocol.TestExecTestCase.test_sample_method_args 
-    # uses this code path to be sure that the arguments were passed to
-    # sample-script.py
-    print "test fail"
-    print "error fail"
-    sys.exit(0)
-print "test old mcdonald"
-print "success old mcdonald"
-print "test bing crosby"
-print "failure bing crosby ["
-print "foo.c:53:ERROR invalid state"
-print "]"
-print "test an error"
-print "error an error"
-sys.exit(0)

=== removed file 'python-for-subunit2junitxml/subunit/tests/sample-two-script.py'
--- python-for-subunit2junitxml/subunit/tests/sample-two-script.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/tests/sample-two-script.py	1970-01-01 00:00:00 +0000
@@ -1,7 +0,0 @@
-#!/usr/bin/env python
-import sys
-print "test old mcdonald"
-print "success old mcdonald"
-print "test bing crosby"
-print "success bing crosby"
-sys.exit(0)

=== removed file 'python-for-subunit2junitxml/subunit/tests/test_chunked.py'
--- python-for-subunit2junitxml/subunit/tests/test_chunked.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/tests/test_chunked.py	1970-01-01 00:00:00 +0000
@@ -1,152 +0,0 @@
-#
-#  subunit: extensions to python unittest to get test results from subprocesses.
-#  Copyright (C) 2005  Robert Collins <robertc@robertcollins.net>
-#  Copyright (C) 2011  Martin Pool <mbp@sourcefrog.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#  
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-import unittest
-
-from testtools.compat import _b, BytesIO
-
-import subunit.chunked
-
-
-def test_suite():
-    loader = subunit.tests.TestUtil.TestLoader()
-    result = loader.loadTestsFromName(__name__)
-    return result
-
-
-class TestDecode(unittest.TestCase):
-
-    def setUp(self):
-        unittest.TestCase.setUp(self)
-        self.output = BytesIO()
-        self.decoder = subunit.chunked.Decoder(self.output)
-
-    def test_close_read_length_short_errors(self):
-        self.assertRaises(ValueError, self.decoder.close)
-
-    def test_close_body_short_errors(self):
-        self.assertEqual(None, self.decoder.write(_b('2\r\na')))
-        self.assertRaises(ValueError, self.decoder.close)
-
-    def test_close_body_buffered_data_errors(self):
-        self.assertEqual(None, self.decoder.write(_b('2\r')))
-        self.assertRaises(ValueError, self.decoder.close)
-
-    def test_close_after_finished_stream_safe(self):
-        self.assertEqual(None, self.decoder.write(_b('2\r\nab')))
-        self.assertEqual(_b(''), self.decoder.write(_b('0\r\n')))
-        self.decoder.close()
-
-    def test_decode_nothing(self):
-        self.assertEqual(_b(''), self.decoder.write(_b('0\r\n')))
-        self.assertEqual(_b(''), self.output.getvalue())
-
-    def test_decode_serialised_form(self):
-        self.assertEqual(None, self.decoder.write(_b("F\r\n")))
-        self.assertEqual(None, self.decoder.write(_b("serialised\n")))
-        self.assertEqual(_b(''), self.decoder.write(_b("form0\r\n")))
-
-    def test_decode_short(self):
-        self.assertEqual(_b(''), self.decoder.write(_b('3\r\nabc0\r\n')))
-        self.assertEqual(_b('abc'), self.output.getvalue())
-
-    def test_decode_combines_short(self):
-        self.assertEqual(_b(''), self.decoder.write(_b('6\r\nabcdef0\r\n')))
-        self.assertEqual(_b('abcdef'), self.output.getvalue())
-
-    def test_decode_excess_bytes_from_write(self):
-        self.assertEqual(_b('1234'), self.decoder.write(_b('3\r\nabc0\r\n1234')))
-        self.assertEqual(_b('abc'), self.output.getvalue())
-
-    def test_decode_write_after_finished_errors(self):
-        self.assertEqual(_b('1234'), self.decoder.write(_b('3\r\nabc0\r\n1234')))
-        self.assertRaises(ValueError, self.decoder.write, _b(''))
-
-    def test_decode_hex(self):
-        self.assertEqual(_b(''), self.decoder.write(_b('A\r\n12345678900\r\n')))
-        self.assertEqual(_b('1234567890'), self.output.getvalue())
-
-    def test_decode_long_ranges(self):
-        self.assertEqual(None, self.decoder.write(_b('10000\r\n')))
-        self.assertEqual(None, self.decoder.write(_b('1' * 65536)))
-        self.assertEqual(None, self.decoder.write(_b('10000\r\n')))
-        self.assertEqual(None, self.decoder.write(_b('2' * 65536)))
-        self.assertEqual(_b(''), self.decoder.write(_b('0\r\n')))
-        self.assertEqual(_b('1' * 65536 + '2' * 65536), self.output.getvalue())
-
-    def test_decode_newline_nonstrict(self):
-        """Tolerate chunk markers with no CR character."""
-        # From <http://pad.lv/505078>
-        self.decoder = subunit.chunked.Decoder(self.output, strict=False)
-        self.assertEqual(None, self.decoder.write(_b('a\n')))
-        self.assertEqual(None, self.decoder.write(_b('abcdeabcde')))
-        self.assertEqual(_b(''), self.decoder.write(_b('0\n')))
-        self.assertEqual(_b('abcdeabcde'), self.output.getvalue())
-
-    def test_decode_strict_newline_only(self):
-        """Reject chunk markers with no CR character in strict mode."""
-        # From <http://pad.lv/505078>
-        self.assertRaises(ValueError,
-            self.decoder.write, _b('a\n'))
-
-    def test_decode_strict_multiple_crs(self):
-        self.assertRaises(ValueError,
-            self.decoder.write, _b('a\r\r\n'))
-
-    def test_decode_short_header(self):
-        self.assertRaises(ValueError,
-            self.decoder.write, _b('\n'))
-
-
-class TestEncode(unittest.TestCase):
-
-    def setUp(self):
-        unittest.TestCase.setUp(self)
-        self.output = BytesIO()
-        self.encoder = subunit.chunked.Encoder(self.output)
-
-    def test_encode_nothing(self):
-        self.encoder.close()
-        self.assertEqual(_b('0\r\n'), self.output.getvalue())
-
-    def test_encode_empty(self):
-        self.encoder.write(_b(''))
-        self.encoder.close()
-        self.assertEqual(_b('0\r\n'), self.output.getvalue())
-
-    def test_encode_short(self):
-        self.encoder.write(_b('abc'))
-        self.encoder.close()
-        self.assertEqual(_b('3\r\nabc0\r\n'), self.output.getvalue())
-
-    def test_encode_combines_short(self):
-        self.encoder.write(_b('abc'))
-        self.encoder.write(_b('def'))
-        self.encoder.close()
-        self.assertEqual(_b('6\r\nabcdef0\r\n'), self.output.getvalue())
-
-    def test_encode_over_9_is_in_hex(self):
-        self.encoder.write(_b('1234567890'))
-        self.encoder.close()
-        self.assertEqual(_b('A\r\n12345678900\r\n'), self.output.getvalue())
-
-    def test_encode_long_ranges_not_combined(self):
-        self.encoder.write(_b('1' * 65536))
-        self.encoder.write(_b('2' * 65536))
-        self.encoder.close()
-        self.assertEqual(_b('10000\r\n' + '1' * 65536 + '10000\r\n' +
-            '2' * 65536 + '0\r\n'), self.output.getvalue())

=== removed file 'python-for-subunit2junitxml/subunit/tests/test_details.py'
--- python-for-subunit2junitxml/subunit/tests/test_details.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/tests/test_details.py	1970-01-01 00:00:00 +0000
@@ -1,112 +0,0 @@
-#
-#  subunit: extensions to python unittest to get test results from subprocesses.
-#  Copyright (C) 2005  Robert Collins <robertc@robertcollins.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#  
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-import unittest
-
-from testtools.compat import _b, StringIO
-
-import subunit.tests
-from subunit import content, content_type, details
-
-
-def test_suite():
-    loader = subunit.tests.TestUtil.TestLoader()
-    result = loader.loadTestsFromName(__name__)
-    return result
-
-
-class TestSimpleDetails(unittest.TestCase):
-
-    def test_lineReceived(self):
-        parser = details.SimpleDetailsParser(None)
-        parser.lineReceived(_b("foo\n"))
-        parser.lineReceived(_b("bar\n"))
-        self.assertEqual(_b("foo\nbar\n"), parser._message)
-
-    def test_lineReceived_escaped_bracket(self):
-        parser = details.SimpleDetailsParser(None)
-        parser.lineReceived(_b("foo\n"))
-        parser.lineReceived(_b(" ]are\n"))
-        parser.lineReceived(_b("bar\n"))
-        self.assertEqual(_b("foo\n]are\nbar\n"), parser._message)
-
-    def test_get_message(self):
-        parser = details.SimpleDetailsParser(None)
-        self.assertEqual(_b(""), parser.get_message())
-
-    def test_get_details(self):
-        parser = details.SimpleDetailsParser(None)
-        traceback = ""
-        expected = {}
-        expected['traceback'] = content.Content(
-            content_type.ContentType("text", "x-traceback",
-                {'charset': 'utf8'}),
-            lambda:[_b("")])
-        found = parser.get_details()
-        self.assertEqual(expected.keys(), found.keys())
-        self.assertEqual(expected['traceback'].content_type,
-            found['traceback'].content_type)
-        self.assertEqual(_b('').join(expected['traceback'].iter_bytes()),
-            _b('').join(found['traceback'].iter_bytes()))
-
-    def test_get_details_skip(self):
-        parser = details.SimpleDetailsParser(None)
-        traceback = ""
-        expected = {}
-        expected['reason'] = content.Content(
-            content_type.ContentType("text", "plain"),
-            lambda:[_b("")])
-        found = parser.get_details("skip")
-        self.assertEqual(expected, found)
-
-    def test_get_details_success(self):
-        parser = details.SimpleDetailsParser(None)
-        traceback = ""
-        expected = {}
-        expected['message'] = content.Content(
-            content_type.ContentType("text", "plain"),
-            lambda:[_b("")])
-        found = parser.get_details("success")
-        self.assertEqual(expected, found)
-
-
-class TestMultipartDetails(unittest.TestCase):
-
-    def test_get_message_is_None(self):
-        parser = details.MultipartDetailsParser(None)
-        self.assertEqual(None, parser.get_message())
-
-    def test_get_details(self):
-        parser = details.MultipartDetailsParser(None)
-        self.assertEqual({}, parser.get_details())
-
-    def test_parts(self):
-        parser = details.MultipartDetailsParser(None)
-        parser.lineReceived(_b("Content-Type: text/plain\n"))
-        parser.lineReceived(_b("something\n"))
-        parser.lineReceived(_b("F\r\n"))
-        parser.lineReceived(_b("serialised\n"))
-        parser.lineReceived(_b("form0\r\n"))
-        expected = {}
-        expected['something'] = content.Content(
-            content_type.ContentType("text", "plain"),
-            lambda:[_b("serialised\nform")])
-        found = parser.get_details()
-        self.assertEqual(expected.keys(), found.keys())
-        self.assertEqual(expected['something'].content_type,
-            found['something'].content_type)
-        self.assertEqual(_b('').join(expected['something'].iter_bytes()),
-            _b('').join(found['something'].iter_bytes()))

=== removed file 'python-for-subunit2junitxml/subunit/tests/test_progress_model.py'
--- python-for-subunit2junitxml/subunit/tests/test_progress_model.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/tests/test_progress_model.py	1970-01-01 00:00:00 +0000
@@ -1,118 +0,0 @@
-#
-#  subunit: extensions to Python unittest to get test results from subprocesses.
-#  Copyright (C) 2009  Robert Collins <robertc@robertcollins.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#  
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-import unittest
-
-import subunit
-from subunit.progress_model import ProgressModel
-
-
-class TestProgressModel(unittest.TestCase):
-
-    def assertProgressSummary(self, pos, total, progress):
-        """Assert that a progress model has reached a particular point."""
-        self.assertEqual(pos, progress.pos())
-        self.assertEqual(total, progress.width())
-
-    def test_new_progress_0_0(self):
-        progress = ProgressModel()
-        self.assertProgressSummary(0, 0, progress)
-
-    def test_advance_0_0(self):
-        progress = ProgressModel()
-        progress.advance()
-        self.assertProgressSummary(1, 0, progress)
-
-    def test_advance_1_0(self):
-        progress = ProgressModel()
-        progress.advance()
-        self.assertProgressSummary(1, 0, progress)
-
-    def test_set_width_absolute(self):
-        progress = ProgressModel()
-        progress.set_width(10)
-        self.assertProgressSummary(0, 10, progress)
-
-    def test_set_width_absolute_preserves_pos(self):
-        progress = ProgressModel()
-        progress.advance()
-        progress.set_width(2)
-        self.assertProgressSummary(1, 2, progress)
-
-    def test_adjust_width(self):
-        progress = ProgressModel()
-        progress.adjust_width(10)
-        self.assertProgressSummary(0, 10, progress)
-        progress.adjust_width(-10)
-        self.assertProgressSummary(0, 0, progress)
-
-    def test_adjust_width_preserves_pos(self):
-        progress = ProgressModel()
-        progress.advance()
-        progress.adjust_width(10)
-        self.assertProgressSummary(1, 10, progress)
-        progress.adjust_width(-10)
-        self.assertProgressSummary(1, 0, progress)
-
-    def test_push_preserves_progress(self):
-        progress = ProgressModel()
-        progress.adjust_width(3)
-        progress.advance()
-        progress.push()
-        self.assertProgressSummary(1, 3, progress)
-
-    def test_advance_advances_substack(self):
-        progress = ProgressModel()
-        progress.adjust_width(3)
-        progress.advance()
-        progress.push()
-        progress.adjust_width(1)
-        progress.advance()
-        self.assertProgressSummary(2, 3, progress)
-
-    def test_adjust_width_adjusts_substack(self):
-        progress = ProgressModel()
-        progress.adjust_width(3)
-        progress.advance()
-        progress.push()
-        progress.adjust_width(2)
-        progress.advance()
-        self.assertProgressSummary(3, 6, progress)
-
-    def test_set_width_adjusts_substack(self):
-        progress = ProgressModel()
-        progress.adjust_width(3)
-        progress.advance()
-        progress.push()
-        progress.set_width(2)
-        progress.advance()
-        self.assertProgressSummary(3, 6, progress)
-
-    def test_pop_restores_progress(self):
-        progress = ProgressModel()
-        progress.adjust_width(3)
-        progress.advance()
-        progress.push()
-        progress.adjust_width(1)
-        progress.advance()
-        progress.pop()
-        self.assertProgressSummary(1, 3, progress)
-
-
-def test_suite():
-    loader = subunit.tests.TestUtil.TestLoader()
-    result = loader.loadTestsFromName(__name__)
-    return result

=== removed file 'python-for-subunit2junitxml/subunit/tests/test_subunit_filter.py'
--- python-for-subunit2junitxml/subunit/tests/test_subunit_filter.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/tests/test_subunit_filter.py	1970-01-01 00:00:00 +0000
@@ -1,208 +0,0 @@
-#
-#  subunit: extensions to python unittest to get test results from subprocesses.
-#  Copyright (C) 2005  Robert Collins <robertc@robertcollins.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-"""Tests for subunit.TestResultFilter."""
-
-from datetime import datetime
-from subunit import iso8601
-import unittest
-
-from testtools import TestCase
-from testtools.compat import _b, BytesIO, StringIO
-from testtools.testresult.doubles import ExtendedTestResult
-
-import subunit
-from subunit.test_results import TestResultFilter
-
-
-class TestTestResultFilter(TestCase):
-    """Test for TestResultFilter, a TestResult object which filters tests."""
-
-    # While TestResultFilter works on python objects, using a subunit stream
-    # is an easy pithy way of getting a series of test objects to call into
-    # the TestResult, and as TestResultFilter is intended for use with subunit
-    # also has the benefit of detecting any interface skew issues.
-    example_subunit_stream = _b("""\
-tags: global
-test passed
-success passed
-test failed
-tags: local
-failure failed
-test error
-error error [
-error details
-]
-test skipped
-skip skipped
-test todo
-xfail todo
-""")
-
-    def run_tests(self, result_filter, input_stream=None):
-        """Run tests through the given filter.
-
-        :param result_filter: A filtering TestResult object.
-        :param input_stream: Bytes of subunit stream data. If not provided,
-            uses TestTestResultFilter.example_subunit_stream.
-        """
-        if input_stream is None:
-            input_stream = self.example_subunit_stream
-        test = subunit.ProtocolTestCase(BytesIO(input_stream))
-        test.run(result_filter)
-
-    def test_default(self):
-        """The default is to exclude success and include everything else."""
-        filtered_result = unittest.TestResult()
-        result_filter = TestResultFilter(filtered_result)
-        self.run_tests(result_filter)
-        # skips are seen as success by default python TestResult.
-        self.assertEqual(['error'],
-            [error[0].id() for error in filtered_result.errors])
-        self.assertEqual(['failed'],
-            [failure[0].id() for failure in
-            filtered_result.failures])
-        self.assertEqual(4, filtered_result.testsRun)
-
-    def test_exclude_errors(self):
-        filtered_result = unittest.TestResult()
-        result_filter = TestResultFilter(filtered_result, filter_error=True)
-        self.run_tests(result_filter)
-        # skips are seen as errors by default python TestResult.
-        self.assertEqual([], filtered_result.errors)
-        self.assertEqual(['failed'],
-            [failure[0].id() for failure in
-            filtered_result.failures])
-        self.assertEqual(3, filtered_result.testsRun)
-
-    def test_fixup_expected_failures(self):
-        filtered_result = unittest.TestResult()
-        result_filter = TestResultFilter(filtered_result,
-            fixup_expected_failures=set(["failed"]))
-        self.run_tests(result_filter)
-        self.assertEqual(['failed', 'todo'],
-            [failure[0].id() for failure in filtered_result.expectedFailures])
-        self.assertEqual([], filtered_result.failures)
-        self.assertEqual(4, filtered_result.testsRun)
-
-    def test_fixup_expected_errors(self):
-        filtered_result = unittest.TestResult()
-        result_filter = TestResultFilter(filtered_result,
-            fixup_expected_failures=set(["error"]))
-        self.run_tests(result_filter)
-        self.assertEqual(['error', 'todo'],
-            [failure[0].id() for failure in filtered_result.expectedFailures])
-        self.assertEqual([], filtered_result.errors)
-        self.assertEqual(4, filtered_result.testsRun)
-
-    def test_fixup_unexpected_success(self):
-        filtered_result = unittest.TestResult()
-        result_filter = TestResultFilter(filtered_result, filter_success=False,
-            fixup_expected_failures=set(["passed"]))
-        self.run_tests(result_filter)
-        self.assertEqual(['passed'],
-            [passed.id() for passed in filtered_result.unexpectedSuccesses])
-        self.assertEqual(5, filtered_result.testsRun)
-
-    def test_exclude_failure(self):
-        filtered_result = unittest.TestResult()
-        result_filter = TestResultFilter(filtered_result, filter_failure=True)
-        self.run_tests(result_filter)
-        self.assertEqual(['error'],
-            [error[0].id() for error in filtered_result.errors])
-        self.assertEqual([],
-            [failure[0].id() for failure in
-            filtered_result.failures])
-        self.assertEqual(3, filtered_result.testsRun)
-
-    def test_exclude_skips(self):
-        filtered_result = subunit.TestResultStats(None)
-        result_filter = TestResultFilter(filtered_result, filter_skip=True)
-        self.run_tests(result_filter)
-        self.assertEqual(0, filtered_result.skipped_tests)
-        self.assertEqual(2, filtered_result.failed_tests)
-        self.assertEqual(3, filtered_result.testsRun)
-
-    def test_include_success(self):
-        """Successes can be included if requested."""
-        filtered_result = unittest.TestResult()
-        result_filter = TestResultFilter(filtered_result,
-            filter_success=False)
-        self.run_tests(result_filter)
-        self.assertEqual(['error'],
-            [error[0].id() for error in filtered_result.errors])
-        self.assertEqual(['failed'],
-            [failure[0].id() for failure in
-            filtered_result.failures])
-        self.assertEqual(5, filtered_result.testsRun)
-
-    def test_filter_predicate(self):
-        """You can filter by predicate callbacks"""
-        filtered_result = unittest.TestResult()
-        def filter_cb(test, outcome, err, details):
-            return outcome == 'success'
-        result_filter = TestResultFilter(filtered_result,
-            filter_predicate=filter_cb,
-            filter_success=False)
-        self.run_tests(result_filter)
-        # Only success should pass
-        self.assertEqual(1, filtered_result.testsRun)
-
-    def test_time_ordering_preserved(self):
-        # Passing a subunit stream through TestResultFilter preserves the
-        # relative ordering of 'time' directives and any other subunit
-        # directives that are still included.
-        date_a = datetime(year=2000, month=1, day=1, tzinfo=iso8601.UTC)
-        date_b = datetime(year=2000, month=1, day=2, tzinfo=iso8601.UTC)
-        date_c = datetime(year=2000, month=1, day=3, tzinfo=iso8601.UTC)
-        subunit_stream = _b('\n'.join([
-            "time: %s",
-            "test: foo",
-            "time: %s",
-            "error: foo",
-            "time: %s",
-            ""]) % (date_a, date_b, date_c))
-        result = ExtendedTestResult()
-        result_filter = TestResultFilter(result)
-        self.run_tests(result_filter, subunit_stream)
-        foo = subunit.RemotedTestCase('foo')
-        self.assertEquals(
-            [('time', date_a),
-             ('startTest', foo),
-             ('time', date_b),
-             ('addError', foo, {}),
-             ('stopTest', foo),
-             ('time', date_c)], result._events)
-
-    def test_skip_preserved(self):
-        subunit_stream = _b('\n'.join([
-            "test: foo",
-            "skip: foo",
-            ""]))
-        result = ExtendedTestResult()
-        result_filter = TestResultFilter(result)
-        self.run_tests(result_filter, subunit_stream)
-        foo = subunit.RemotedTestCase('foo')
-        self.assertEquals(
-            [('startTest', foo),
-             ('addSkip', foo, {}),
-             ('stopTest', foo), ], result._events)
-
-
-def test_suite():
-    loader = subunit.tests.TestUtil.TestLoader()
-    result = loader.loadTestsFromName(__name__)
-    return result

=== removed file 'python-for-subunit2junitxml/subunit/tests/test_subunit_stats.py'
--- python-for-subunit2junitxml/subunit/tests/test_subunit_stats.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/tests/test_subunit_stats.py	1970-01-01 00:00:00 +0000
@@ -1,84 +0,0 @@
-#
-#  subunit: extensions to python unittest to get test results from subprocesses.
-#  Copyright (C) 2005  Robert Collins <robertc@robertcollins.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#  
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-"""Tests for subunit.TestResultStats."""
-
-import unittest
-
-from testtools.compat import _b, BytesIO, StringIO
-
-import subunit
-
-
-class TestTestResultStats(unittest.TestCase):
-    """Test for TestResultStats, a TestResult object that generates stats."""
-
-    def setUp(self):
-        self.output = StringIO()
-        self.result = subunit.TestResultStats(self.output)
-        self.input_stream = BytesIO()
-        self.test = subunit.ProtocolTestCase(self.input_stream)
-
-    def test_stats_empty(self):
-        self.test.run(self.result)
-        self.assertEqual(0, self.result.total_tests)
-        self.assertEqual(0, self.result.passed_tests)
-        self.assertEqual(0, self.result.failed_tests)
-        self.assertEqual(set(), self.result.seen_tags)
-
-    def setUpUsedStream(self):
-        self.input_stream.write(_b("""tags: global
-test passed
-success passed
-test failed
-tags: local
-failure failed
-test error
-error error
-test skipped
-skip skipped
-test todo
-xfail todo
-"""))
-        self.input_stream.seek(0)
-        self.test.run(self.result)
-    
-    def test_stats_smoke_everything(self):
-        # Statistics are calculated usefully.
-        self.setUpUsedStream()
-        self.assertEqual(5, self.result.total_tests)
-        self.assertEqual(2, self.result.passed_tests)
-        self.assertEqual(2, self.result.failed_tests)
-        self.assertEqual(1, self.result.skipped_tests)
-        self.assertEqual(set(["global", "local"]), self.result.seen_tags)
-
-    def test_stat_formatting(self):
-        expected = ("""
-Total tests:       5
-Passed tests:      2
-Failed tests:      2
-Skipped tests:     1
-Seen tags: global, local
-""")[1:]
-        self.setUpUsedStream()
-        self.result.formatStats()
-        self.assertEqual(expected, self.output.getvalue())
-
-
-def test_suite():
-    loader = subunit.tests.TestUtil.TestLoader()
-    result = loader.loadTestsFromName(__name__)
-    return result

=== removed file 'python-for-subunit2junitxml/subunit/tests/test_subunit_tags.py'
--- python-for-subunit2junitxml/subunit/tests/test_subunit_tags.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/tests/test_subunit_tags.py	1970-01-01 00:00:00 +0000
@@ -1,69 +0,0 @@
-#
-#  subunit: extensions to python unittest to get test results from subprocesses.
-#  Copyright (C) 2005  Robert Collins <robertc@robertcollins.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#  
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-"""Tests for subunit.tag_stream."""
-
-import unittest
-
-from testtools.compat import StringIO
-
-import subunit
-import subunit.test_results
-
-
-class TestSubUnitTags(unittest.TestCase):
-
-    def setUp(self):
-        self.original = StringIO()
-        self.filtered = StringIO()
-
-    def test_add_tag(self):
-        self.original.write("tags: foo\n")
-        self.original.write("test: test\n")
-        self.original.write("tags: bar -quux\n")
-        self.original.write("success: test\n")
-        self.original.seek(0)
-        result = subunit.tag_stream(self.original, self.filtered, ["quux"])
-        self.assertEqual([
-            "tags: quux",
-            "tags: foo",
-            "test: test",
-            "tags: bar",
-            "success: test",
-            ],
-            self.filtered.getvalue().splitlines())
-
-    def test_remove_tag(self):
-        self.original.write("tags: foo\n")
-        self.original.write("test: test\n")
-        self.original.write("tags: bar -quux\n")
-        self.original.write("success: test\n")
-        self.original.seek(0)
-        result = subunit.tag_stream(self.original, self.filtered, ["-bar"])
-        self.assertEqual([
-            "tags: -bar",
-            "tags: foo",
-            "test: test",
-            "tags: -quux",
-            "success: test",
-            ],
-            self.filtered.getvalue().splitlines())
-
-
-def test_suite():
-    loader = subunit.tests.TestUtil.TestLoader()
-    result = loader.loadTestsFromName(__name__)
-    return result

=== removed file 'python-for-subunit2junitxml/subunit/tests/test_tap2subunit.py'
--- python-for-subunit2junitxml/subunit/tests/test_tap2subunit.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/tests/test_tap2subunit.py	1970-01-01 00:00:00 +0000
@@ -1,445 +0,0 @@
-#
-#  subunit: extensions to python unittest to get test results from subprocesses.
-#  Copyright (C) 2005  Robert Collins <robertc@robertcollins.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#  
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-"""Tests for TAP2SubUnit."""
-
-import unittest
-
-from testtools.compat import StringIO
-
-import subunit
-
-
-class TestTAP2SubUnit(unittest.TestCase):
-    """Tests for TAP2SubUnit.
-
-    These tests test TAP string data in, and subunit string data out.
-    This is ok because the subunit protocol is intended to be stable,
-    but it might be easier/pithier to write tests against TAP string in,
-    parsed subunit objects out (by hooking the subunit stream to a subunit
-    protocol server.
-    """
-
-    def setUp(self):
-        self.tap = StringIO()
-        self.subunit = StringIO()
-
-    def test_skip_entire_file(self):
-        # A file
-        # 1..- # Skipped: comment
-        # results in a single skipped test.
-        self.tap.write("1..0 # Skipped: entire file skipped\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            "test file skip",
-            "skip file skip [",
-            "Skipped: entire file skipped",
-            "]",
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_ok_test_pass(self):
-        # A file
-        # ok
-        # results in a passed test with name 'test 1' (a synthetic name as tap
-        # does not require named fixtures - it is the first test in the tap
-        # stream).
-        self.tap.write("ok\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            "test test 1",
-            "success test 1",
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_ok_test_number_pass(self):
-        # A file
-        # ok 1
-        # results in a passed test with name 'test 1'
-        self.tap.write("ok 1\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            "test test 1",
-            "success test 1",
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_ok_test_number_description_pass(self):
-        # A file
-        # ok 1 - There is a description
-        # results in a passed test with name 'test 1 - There is a description'
-        self.tap.write("ok 1 - There is a description\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            "test test 1 - There is a description",
-            "success test 1 - There is a description",
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_ok_test_description_pass(self):
-        # A file
-        # ok There is a description
-        # results in a passed test with name 'test 1 There is a description'
-        self.tap.write("ok There is a description\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            "test test 1 There is a description",
-            "success test 1 There is a description",
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_ok_SKIP_skip(self):
-        # A file
-        # ok # SKIP
-        # results in a skkip test with name 'test 1'
-        self.tap.write("ok # SKIP\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            "test test 1",
-            "skip test 1",
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_ok_skip_number_comment_lowercase(self):
-        self.tap.write("ok 1 # skip no samba environment available, skipping compilation\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            "test test 1",
-            "skip test 1 [", 
-            "no samba environment available, skipping compilation",
-            "]"
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_ok_number_description_SKIP_skip_comment(self):
-        # A file
-        # ok 1 foo  # SKIP Not done yet
-        # results in a skip test with name 'test 1 foo' and a log of
-        # Not done yet
-        self.tap.write("ok 1 foo  # SKIP Not done yet\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            "test test 1 foo",
-            "skip test 1 foo [",
-            "Not done yet",
-            "]",
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_ok_SKIP_skip_comment(self):
-        # A file
-        # ok # SKIP Not done yet
-        # results in a skip test with name 'test 1' and a log of Not done yet
-        self.tap.write("ok # SKIP Not done yet\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            "test test 1",
-            "skip test 1 [",
-            "Not done yet",
-            "]",
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_ok_TODO_xfail(self):
-        # A file
-        # ok # TODO
-        # results in a xfail test with name 'test 1'
-        self.tap.write("ok # TODO\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            "test test 1",
-            "xfail test 1",
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_ok_TODO_xfail_comment(self):
-        # A file
-        # ok # TODO Not done yet
-        # results in a xfail test with name 'test 1' and a log of Not done yet
-        self.tap.write("ok # TODO Not done yet\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            "test test 1",
-            "xfail test 1 [",
-            "Not done yet",
-            "]",
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_bail_out_errors(self):
-        # A file with line in it
-        # Bail out! COMMENT
-        # is treated as an error
-        self.tap.write("ok 1 foo\n")
-        self.tap.write("Bail out! Lifejacket engaged\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            "test test 1 foo",
-            "success test 1 foo",
-            "test Bail out! Lifejacket engaged",
-            "error Bail out! Lifejacket engaged",
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_missing_test_at_end_with_plan_adds_error(self):
-        # A file
-        # 1..3
-        # ok first test
-        # not ok third test
-        # results in three tests, with the third being created
-        self.tap.write('1..3\n')
-        self.tap.write('ok first test\n')
-        self.tap.write('not ok second test\n')
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            'test test 1 first test',
-            'success test 1 first test',
-            'test test 2 second test',
-            'failure test 2 second test',
-            'test test 3',
-            'error test 3 [',
-            'test missing from TAP output',
-            ']',
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_missing_test_with_plan_adds_error(self):
-        # A file
-        # 1..3
-        # ok first test
-        # not ok 3 third test
-        # results in three tests, with the second being created
-        self.tap.write('1..3\n')
-        self.tap.write('ok first test\n')
-        self.tap.write('not ok 3 third test\n')
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            'test test 1 first test',
-            'success test 1 first test',
-            'test test 2',
-            'error test 2 [',
-            'test missing from TAP output',
-            ']',
-            'test test 3 third test',
-            'failure test 3 third test',
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_missing_test_no_plan_adds_error(self):
-        # A file
-        # ok first test
-        # not ok 3 third test
-        # results in three tests, with the second being created
-        self.tap.write('ok first test\n')
-        self.tap.write('not ok 3 third test\n')
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            'test test 1 first test',
-            'success test 1 first test',
-            'test test 2',
-            'error test 2 [',
-            'test missing from TAP output',
-            ']',
-            'test test 3 third test',
-            'failure test 3 third test',
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_four_tests_in_a_row_trailing_plan(self):
-        # A file
-        # ok 1 - first test in a script with no plan at all
-        # not ok 2 - second
-        # ok 3 - third
-        # not ok 4 - fourth
-        # 1..4
-        # results in four tests numbered and named
-        self.tap.write('ok 1 - first test in a script with trailing plan\n')
-        self.tap.write('not ok 2 - second\n')
-        self.tap.write('ok 3 - third\n')
-        self.tap.write('not ok 4 - fourth\n')
-        self.tap.write('1..4\n')
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            'test test 1 - first test in a script with trailing plan',
-            'success test 1 - first test in a script with trailing plan',
-            'test test 2 - second',
-            'failure test 2 - second',
-            'test test 3 - third',
-            'success test 3 - third',
-            'test test 4 - fourth',
-            'failure test 4 - fourth'
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_four_tests_in_a_row_with_plan(self):
-        # A file
-        # 1..4
-        # ok 1 - first test in a script with no plan at all
-        # not ok 2 - second
-        # ok 3 - third
-        # not ok 4 - fourth
-        # results in four tests numbered and named
-        self.tap.write('1..4\n')
-        self.tap.write('ok 1 - first test in a script with a plan\n')
-        self.tap.write('not ok 2 - second\n')
-        self.tap.write('ok 3 - third\n')
-        self.tap.write('not ok 4 - fourth\n')
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            'test test 1 - first test in a script with a plan',
-            'success test 1 - first test in a script with a plan',
-            'test test 2 - second',
-            'failure test 2 - second',
-            'test test 3 - third',
-            'success test 3 - third',
-            'test test 4 - fourth',
-            'failure test 4 - fourth'
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_four_tests_in_a_row_no_plan(self):
-        # A file
-        # ok 1 - first test in a script with no plan at all
-        # not ok 2 - second
-        # ok 3 - third
-        # not ok 4 - fourth
-        # results in four tests numbered and named
-        self.tap.write('ok 1 - first test in a script with no plan at all\n')
-        self.tap.write('not ok 2 - second\n')
-        self.tap.write('ok 3 - third\n')
-        self.tap.write('not ok 4 - fourth\n')
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            'test test 1 - first test in a script with no plan at all',
-            'success test 1 - first test in a script with no plan at all',
-            'test test 2 - second',
-            'failure test 2 - second',
-            'test test 3 - third',
-            'success test 3 - third',
-            'test test 4 - fourth',
-            'failure test 4 - fourth'
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_todo_and_skip(self):
-        # A file
-        # not ok 1 - a fail but # TODO but is TODO
-        # not ok 2 - another fail # SKIP instead
-        # results in two tests, numbered and commented.
-        self.tap.write("not ok 1 - a fail but # TODO but is TODO\n")
-        self.tap.write("not ok 2 - another fail # SKIP instead\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            'test test 1 - a fail but',
-            'xfail test 1 - a fail but [',
-            'but is TODO',
-            ']',
-            'test test 2 - another fail',
-            'skip test 2 - another fail [',
-            'instead',
-            ']',
-            ],
-            self.subunit.getvalue().splitlines())
-
-    def test_leading_comments_add_to_next_test_log(self):
-        # A file
-        # # comment
-        # ok 
-        # ok
-        # results in a single test with the comment included
-        # in the first test and not the second.
-        self.tap.write("# comment\n")
-        self.tap.write("ok\n")
-        self.tap.write("ok\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            'test test 1',
-            'success test 1 [',
-            '# comment',
-            ']',
-            'test test 2',
-            'success test 2',
-            ],
-            self.subunit.getvalue().splitlines())
-    
-    def test_trailing_comments_are_included_in_last_test_log(self):
-        # A file
-        # ok foo
-        # ok foo
-        # # comment
-        # results in a two tests, with the second having the comment
-        # attached to its log.
-        self.tap.write("ok\n")
-        self.tap.write("ok\n")
-        self.tap.write("# comment\n")
-        self.tap.seek(0)
-        result = subunit.TAP2SubUnit(self.tap, self.subunit)
-        self.assertEqual(0, result)
-        self.assertEqual([
-            'test test 1',
-            'success test 1',
-            'test test 2',
-            'success test 2 [',
-            '# comment',
-            ']',
-            ],
-            self.subunit.getvalue().splitlines())
-
-
-def test_suite():
-    loader = subunit.tests.TestUtil.TestLoader()
-    result = loader.loadTestsFromName(__name__)
-    return result

=== removed file 'python-for-subunit2junitxml/subunit/tests/test_test_protocol.py'
--- python-for-subunit2junitxml/subunit/tests/test_test_protocol.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/tests/test_test_protocol.py	1970-01-01 00:00:00 +0000
@@ -1,1299 +0,0 @@
-#
-#  subunit: extensions to Python unittest to get test results from subprocesses.
-#  Copyright (C) 2005  Robert Collins <robertc@robertcollins.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-import datetime
-import unittest
-import os
-
-from testtools import skipIf, TestCase
-from testtools.compat import _b, _u, BytesIO, StringIO
-from testtools.content import Content, TracebackContent
-from testtools.content_type import ContentType
-from testtools.tests.helpers import (
-    Python26TestResult,
-    Python27TestResult,
-    ExtendedTestResult,
-    )
-
-import subunit
-from subunit import _remote_exception_str, _remote_exception_str_chunked
-import subunit.iso8601 as iso8601
-
-
-class TestTestImports(unittest.TestCase):
-
-    def test_imports(self):
-        from subunit import DiscardStream
-        from subunit import TestProtocolServer
-        from subunit import RemotedTestCase
-        from subunit import RemoteError
-        from subunit import ExecTestCase
-        from subunit import IsolatedTestCase
-        from subunit import TestProtocolClient
-        from subunit import ProtocolTestCase
-
-
-class TestDiscardStream(unittest.TestCase):
-
-    def test_write(self):
-        subunit.DiscardStream().write("content")
-
-
-class TestProtocolServerForward(unittest.TestCase):
-
-    def test_story(self):
-        client = unittest.TestResult()
-        out = BytesIO()
-        protocol = subunit.TestProtocolServer(client, forward_stream=out)
-        pipe = BytesIO(_b("test old mcdonald\n"
-                        "success old mcdonald\n"))
-        protocol.readFrom(pipe)
-        self.assertEqual(client.testsRun, 1)
-        self.assertEqual(pipe.getvalue(), out.getvalue())
-
-    def test_not_command(self):
-        client = unittest.TestResult()
-        out = BytesIO()
-        protocol = subunit.TestProtocolServer(client,
-            stream=subunit.DiscardStream(), forward_stream=out)
-        pipe = BytesIO(_b("success old mcdonald\n"))
-        protocol.readFrom(pipe)
-        self.assertEqual(client.testsRun, 0)
-        self.assertEqual(_b(""), out.getvalue())
-
-
-class TestTestProtocolServerPipe(unittest.TestCase):
-
-    def test_story(self):
-        client = unittest.TestResult()
-        protocol = subunit.TestProtocolServer(client)
-        pipe = BytesIO(_b("test old mcdonald\n"
-                        "success old mcdonald\n"
-                        "test bing crosby\n"
-                        "failure bing crosby [\n"
-                        "foo.c:53:ERROR invalid state\n"
-                        "]\n"
-                        "test an error\n"
-                        "error an error\n"))
-        protocol.readFrom(pipe)
-        bing = subunit.RemotedTestCase("bing crosby")
-        an_error = subunit.RemotedTestCase("an error")
-        self.assertEqual(client.errors,
-                         [(an_error, _remote_exception_str + '\n')])
-        self.assertEqual(
-            client.failures,
-            [(bing, _remote_exception_str + ": Text attachment: traceback\n"
-                "------------\nfoo.c:53:ERROR invalid state\n"
-                "------------\n\n")])
-        self.assertEqual(client.testsRun, 3)
-
-    def test_non_test_characters_forwarded_immediately(self):
-        pass
-
-
-class TestTestProtocolServerStartTest(unittest.TestCase):
-
-    def setUp(self):
-        self.client = Python26TestResult()
-        self.stream = BytesIO()
-        self.protocol = subunit.TestProtocolServer(self.client, self.stream)
-
-    def test_start_test(self):
-        self.protocol.lineReceived(_b("test old mcdonald\n"))
-        self.assertEqual(self.client._events,
-            [('startTest', subunit.RemotedTestCase("old mcdonald"))])
-
-    def test_start_testing(self):
-        self.protocol.lineReceived(_b("testing old mcdonald\n"))
-        self.assertEqual(self.client._events,
-            [('startTest', subunit.RemotedTestCase("old mcdonald"))])
-
-    def test_start_test_colon(self):
-        self.protocol.lineReceived(_b("test: old mcdonald\n"))
-        self.assertEqual(self.client._events,
-            [('startTest', subunit.RemotedTestCase("old mcdonald"))])
-
-    def test_indented_test_colon_ignored(self):
-        ignored_line = _b(" test: old mcdonald\n")
-        self.protocol.lineReceived(ignored_line)
-        self.assertEqual([], self.client._events)
-        self.assertEqual(self.stream.getvalue(), ignored_line)
-
-    def test_start_testing_colon(self):
-        self.protocol.lineReceived(_b("testing: old mcdonald\n"))
-        self.assertEqual(self.client._events,
-            [('startTest', subunit.RemotedTestCase("old mcdonald"))])
-
-
-class TestTestProtocolServerPassThrough(unittest.TestCase):
-
-    def setUp(self):
-        self.stdout = BytesIO()
-        self.test = subunit.RemotedTestCase("old mcdonald")
-        self.client = ExtendedTestResult()
-        self.protocol = subunit.TestProtocolServer(self.client, self.stdout)
-
-    def keywords_before_test(self):
-        self.protocol.lineReceived(_b("failure a\n"))
-        self.protocol.lineReceived(_b("failure: a\n"))
-        self.protocol.lineReceived(_b("error a\n"))
-        self.protocol.lineReceived(_b("error: a\n"))
-        self.protocol.lineReceived(_b("success a\n"))
-        self.protocol.lineReceived(_b("success: a\n"))
-        self.protocol.lineReceived(_b("successful a\n"))
-        self.protocol.lineReceived(_b("successful: a\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        self.assertEqual(self.stdout.getvalue(), _b("failure a\n"
-                                                 "failure: a\n"
-                                                 "error a\n"
-                                                 "error: a\n"
-                                                 "success a\n"
-                                                 "success: a\n"
-                                                 "successful a\n"
-                                                 "successful: a\n"
-                                                 "]\n"))
-
-    def test_keywords_before_test(self):
-        self.keywords_before_test()
-        self.assertEqual(self.client._events, [])
-
-    def test_keywords_after_error(self):
-        self.protocol.lineReceived(_b("test old mcdonald\n"))
-        self.protocol.lineReceived(_b("error old mcdonald\n"))
-        self.keywords_before_test()
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addError', self.test, {}),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def test_keywords_after_failure(self):
-        self.protocol.lineReceived(_b("test old mcdonald\n"))
-        self.protocol.lineReceived(_b("failure old mcdonald\n"))
-        self.keywords_before_test()
-        self.assertEqual(self.client._events, [
-            ('startTest', self.test),
-            ('addFailure', self.test, {}),
-            ('stopTest', self.test),
-            ])
-
-    def test_keywords_after_success(self):
-        self.protocol.lineReceived(_b("test old mcdonald\n"))
-        self.protocol.lineReceived(_b("success old mcdonald\n"))
-        self.keywords_before_test()
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addSuccess', self.test),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def test_keywords_after_test(self):
-        self.protocol.lineReceived(_b("test old mcdonald\n"))
-        self.protocol.lineReceived(_b("test old mcdonald\n"))
-        self.protocol.lineReceived(_b("failure a\n"))
-        self.protocol.lineReceived(_b("failure: a\n"))
-        self.protocol.lineReceived(_b("error a\n"))
-        self.protocol.lineReceived(_b("error: a\n"))
-        self.protocol.lineReceived(_b("success a\n"))
-        self.protocol.lineReceived(_b("success: a\n"))
-        self.protocol.lineReceived(_b("successful a\n"))
-        self.protocol.lineReceived(_b("successful: a\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        self.protocol.lineReceived(_b("failure old mcdonald\n"))
-        self.assertEqual(self.stdout.getvalue(), _b("test old mcdonald\n"
-                                                 "failure a\n"
-                                                 "failure: a\n"
-                                                 "error a\n"
-                                                 "error: a\n"
-                                                 "success a\n"
-                                                 "success: a\n"
-                                                 "successful a\n"
-                                                 "successful: a\n"
-                                                 "]\n"))
-        self.assertEqual(self.client._events, [
-            ('startTest', self.test),
-            ('addFailure', self.test, {}),
-            ('stopTest', self.test),
-            ])
-
-    def test_keywords_during_failure(self):
-        # A smoke test to make sure that the details parsers have control
-        # appropriately.
-        self.protocol.lineReceived(_b("test old mcdonald\n"))
-        self.protocol.lineReceived(_b("failure: old mcdonald [\n"))
-        self.protocol.lineReceived(_b("test old mcdonald\n"))
-        self.protocol.lineReceived(_b("failure a\n"))
-        self.protocol.lineReceived(_b("failure: a\n"))
-        self.protocol.lineReceived(_b("error a\n"))
-        self.protocol.lineReceived(_b("error: a\n"))
-        self.protocol.lineReceived(_b("success a\n"))
-        self.protocol.lineReceived(_b("success: a\n"))
-        self.protocol.lineReceived(_b("successful a\n"))
-        self.protocol.lineReceived(_b("successful: a\n"))
-        self.protocol.lineReceived(_b(" ]\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        self.assertEqual(self.stdout.getvalue(), _b(""))
-        details = {}
-        details['traceback'] = Content(ContentType("text", "x-traceback",
-            {'charset': 'utf8'}),
-            lambda:[_b(
-            "test old mcdonald\n"
-            "failure a\n"
-            "failure: a\n"
-            "error a\n"
-            "error: a\n"
-            "success a\n"
-            "success: a\n"
-            "successful a\n"
-            "successful: a\n"
-            "]\n")])
-        self.assertEqual(self.client._events, [
-            ('startTest', self.test),
-            ('addFailure', self.test, details),
-            ('stopTest', self.test),
-            ])
-
-    def test_stdout_passthrough(self):
-        """Lines received which cannot be interpreted as any protocol action
-        should be passed through to sys.stdout.
-        """
-        bytes = _b("randombytes\n")
-        self.protocol.lineReceived(bytes)
-        self.assertEqual(self.stdout.getvalue(), bytes)
-
-
-class TestTestProtocolServerLostConnection(unittest.TestCase):
-
-    def setUp(self):
-        self.client = Python26TestResult()
-        self.protocol = subunit.TestProtocolServer(self.client)
-        self.test = subunit.RemotedTestCase("old mcdonald")
-
-    def test_lost_connection_no_input(self):
-        self.protocol.lostConnection()
-        self.assertEqual([], self.client._events)
-
-    def test_lost_connection_after_start(self):
-        self.protocol.lineReceived(_b("test old mcdonald\n"))
-        self.protocol.lostConnection()
-        failure = subunit.RemoteError(
-            _u("lost connection during test 'old mcdonald'"))
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addError', self.test, failure),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def test_lost_connected_after_error(self):
-        self.protocol.lineReceived(_b("test old mcdonald\n"))
-        self.protocol.lineReceived(_b("error old mcdonald\n"))
-        self.protocol.lostConnection()
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addError', self.test, subunit.RemoteError(_u(""))),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def do_connection_lost(self, outcome, opening):
-        self.protocol.lineReceived(_b("test old mcdonald\n"))
-        self.protocol.lineReceived(_b("%s old mcdonald %s" % (outcome, opening)))
-        self.protocol.lostConnection()
-        failure = subunit.RemoteError(
-            _u("lost connection during %s report of test 'old mcdonald'") %
-            outcome)
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addError', self.test, failure),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def test_lost_connection_during_error(self):
-        self.do_connection_lost("error", "[\n")
-
-    def test_lost_connection_during_error_details(self):
-        self.do_connection_lost("error", "[ multipart\n")
-
-    def test_lost_connected_after_failure(self):
-        self.protocol.lineReceived(_b("test old mcdonald\n"))
-        self.protocol.lineReceived(_b("failure old mcdonald\n"))
-        self.protocol.lostConnection()
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addFailure', self.test, subunit.RemoteError(_u(""))),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def test_lost_connection_during_failure(self):
-        self.do_connection_lost("failure", "[\n")
-
-    def test_lost_connection_during_failure_details(self):
-        self.do_connection_lost("failure", "[ multipart\n")
-
-    def test_lost_connection_after_success(self):
-        self.protocol.lineReceived(_b("test old mcdonald\n"))
-        self.protocol.lineReceived(_b("success old mcdonald\n"))
-        self.protocol.lostConnection()
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addSuccess', self.test),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def test_lost_connection_during_success(self):
-        self.do_connection_lost("success", "[\n")
-
-    def test_lost_connection_during_success_details(self):
-        self.do_connection_lost("success", "[ multipart\n")
-
-    def test_lost_connection_during_skip(self):
-        self.do_connection_lost("skip", "[\n")
-
-    def test_lost_connection_during_skip_details(self):
-        self.do_connection_lost("skip", "[ multipart\n")
-
-    def test_lost_connection_during_xfail(self):
-        self.do_connection_lost("xfail", "[\n")
-
-    def test_lost_connection_during_xfail_details(self):
-        self.do_connection_lost("xfail", "[ multipart\n")
-
-    def test_lost_connection_during_uxsuccess(self):
-        self.do_connection_lost("uxsuccess", "[\n")
-
-    def test_lost_connection_during_uxsuccess_details(self):
-        self.do_connection_lost("uxsuccess", "[ multipart\n")
-
-
-class TestInTestMultipart(unittest.TestCase):
-
-    def setUp(self):
-        self.client = ExtendedTestResult()
-        self.protocol = subunit.TestProtocolServer(self.client)
-        self.protocol.lineReceived(_b("test mcdonalds farm\n"))
-        self.test = subunit.RemotedTestCase(_u("mcdonalds farm"))
-
-    def test__outcome_sets_details_parser(self):
-        self.protocol._reading_success_details.details_parser = None
-        self.protocol._state._outcome(0, _b("mcdonalds farm [ multipart\n"),
-            None, self.protocol._reading_success_details)
-        parser = self.protocol._reading_success_details.details_parser
-        self.assertNotEqual(None, parser)
-        self.assertTrue(isinstance(parser,
-            subunit.details.MultipartDetailsParser))
-
-
-class TestTestProtocolServerAddError(unittest.TestCase):
-
-    def setUp(self):
-        self.client = ExtendedTestResult()
-        self.protocol = subunit.TestProtocolServer(self.client)
-        self.protocol.lineReceived(_b("test mcdonalds farm\n"))
-        self.test = subunit.RemotedTestCase("mcdonalds farm")
-
-    def simple_error_keyword(self, keyword):
-        self.protocol.lineReceived(_b("%s mcdonalds farm\n" % keyword))
-        details = {}
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addError', self.test, details),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def test_simple_error(self):
-        self.simple_error_keyword("error")
-
-    def test_simple_error_colon(self):
-        self.simple_error_keyword("error:")
-
-    def test_error_empty_message(self):
-        self.protocol.lineReceived(_b("error mcdonalds farm [\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        details = {}
-        details['traceback'] = Content(ContentType("text", "x-traceback",
-            {'charset': 'utf8'}), lambda:[_b("")])
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addError', self.test, details),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def error_quoted_bracket(self, keyword):
-        self.protocol.lineReceived(_b("%s mcdonalds farm [\n" % keyword))
-        self.protocol.lineReceived(_b(" ]\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        details = {}
-        details['traceback'] = Content(ContentType("text", "x-traceback",
-            {'charset': 'utf8'}), lambda:[_b("]\n")])
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addError', self.test, details),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def test_error_quoted_bracket(self):
-        self.error_quoted_bracket("error")
-
-    def test_error_colon_quoted_bracket(self):
-        self.error_quoted_bracket("error:")
-
-
-class TestTestProtocolServerAddFailure(unittest.TestCase):
-
-    def setUp(self):
-        self.client = ExtendedTestResult()
-        self.protocol = subunit.TestProtocolServer(self.client)
-        self.protocol.lineReceived(_b("test mcdonalds farm\n"))
-        self.test = subunit.RemotedTestCase("mcdonalds farm")
-
-    def assertFailure(self, details):
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addFailure', self.test, details),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def simple_failure_keyword(self, keyword):
-        self.protocol.lineReceived(_b("%s mcdonalds farm\n" % keyword))
-        details = {}
-        self.assertFailure(details)
-
-    def test_simple_failure(self):
-        self.simple_failure_keyword("failure")
-
-    def test_simple_failure_colon(self):
-        self.simple_failure_keyword("failure:")
-
-    def test_failure_empty_message(self):
-        self.protocol.lineReceived(_b("failure mcdonalds farm [\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        details = {}
-        details['traceback'] = Content(ContentType("text", "x-traceback",
-            {'charset': 'utf8'}), lambda:[_b("")])
-        self.assertFailure(details)
-
-    def failure_quoted_bracket(self, keyword):
-        self.protocol.lineReceived(_b("%s mcdonalds farm [\n" % keyword))
-        self.protocol.lineReceived(_b(" ]\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        details = {}
-        details['traceback'] = Content(ContentType("text", "x-traceback",
-            {'charset': 'utf8'}), lambda:[_b("]\n")])
-        self.assertFailure(details)
-
-    def test_failure_quoted_bracket(self):
-        self.failure_quoted_bracket("failure")
-
-    def test_failure_colon_quoted_bracket(self):
-        self.failure_quoted_bracket("failure:")
-
-
-class TestTestProtocolServerAddxFail(unittest.TestCase):
-    """Tests for the xfail keyword.
-
-    In Python this can thunk through to Success due to stdlib limitations (see
-    README).
-    """
-
-    def capture_expected_failure(self, test, err):
-        self._events.append((test, err))
-
-    def setup_python26(self):
-        """Setup a test object ready to be xfailed and thunk to success."""
-        self.client = Python26TestResult()
-        self.setup_protocol()
-
-    def setup_python27(self):
-        """Setup a test object ready to be xfailed."""
-        self.client = Python27TestResult()
-        self.setup_protocol()
-
-    def setup_python_ex(self):
-        """Setup a test object ready to be xfailed with details."""
-        self.client = ExtendedTestResult()
-        self.setup_protocol()
-
-    def setup_protocol(self):
-        """Setup the protocol based on self.client."""
-        self.protocol = subunit.TestProtocolServer(self.client)
-        self.protocol.lineReceived(_b("test mcdonalds farm\n"))
-        self.test = self.client._events[-1][-1]
-
-    def simple_xfail_keyword(self, keyword, as_success):
-        self.protocol.lineReceived(_b("%s mcdonalds farm\n" % keyword))
-        self.check_success_or_xfail(as_success)
-
-    def check_success_or_xfail(self, as_success, error_message=None):
-        if as_success:
-            self.assertEqual([
-                ('startTest', self.test),
-                ('addSuccess', self.test),
-                ('stopTest', self.test),
-                ], self.client._events)
-        else:
-            details = {}
-            if error_message is not None:
-                details['traceback'] = Content(
-                    ContentType("text", "x-traceback", {'charset': 'utf8'}),
-                    lambda:[_b(error_message)])
-            if isinstance(self.client, ExtendedTestResult):
-                value = details
-            else:
-                if error_message is not None:
-                    value = subunit.RemoteError(_u("Text attachment: traceback\n"
-                        "------------\n") + _u(error_message) +
-                        _u("------------\n"))
-                else:
-                    value = subunit.RemoteError()
-            self.assertEqual([
-                ('startTest', self.test),
-                ('addExpectedFailure', self.test, value),
-                ('stopTest', self.test),
-                ], self.client._events)
-
-    def test_simple_xfail(self):
-        self.setup_python26()
-        self.simple_xfail_keyword("xfail", True)
-        self.setup_python27()
-        self.simple_xfail_keyword("xfail",  False)
-        self.setup_python_ex()
-        self.simple_xfail_keyword("xfail",  False)
-
-    def test_simple_xfail_colon(self):
-        self.setup_python26()
-        self.simple_xfail_keyword("xfail:", True)
-        self.setup_python27()
-        self.simple_xfail_keyword("xfail:", False)
-        self.setup_python_ex()
-        self.simple_xfail_keyword("xfail:", False)
-
-    def test_xfail_empty_message(self):
-        self.setup_python26()
-        self.empty_message(True)
-        self.setup_python27()
-        self.empty_message(False)
-        self.setup_python_ex()
-        self.empty_message(False, error_message="")
-
-    def empty_message(self, as_success, error_message="\n"):
-        self.protocol.lineReceived(_b("xfail mcdonalds farm [\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        self.check_success_or_xfail(as_success, error_message)
-
-    def xfail_quoted_bracket(self, keyword, as_success):
-        # This tests it is accepted, but cannot test it is used today, because
-        # of not having a way to expose it in Python so far.
-        self.protocol.lineReceived(_b("%s mcdonalds farm [\n" % keyword))
-        self.protocol.lineReceived(_b(" ]\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        self.check_success_or_xfail(as_success, "]\n")
-
-    def test_xfail_quoted_bracket(self):
-        self.setup_python26()
-        self.xfail_quoted_bracket("xfail", True)
-        self.setup_python27()
-        self.xfail_quoted_bracket("xfail", False)
-        self.setup_python_ex()
-        self.xfail_quoted_bracket("xfail", False)
-
-    def test_xfail_colon_quoted_bracket(self):
-        self.setup_python26()
-        self.xfail_quoted_bracket("xfail:", True)
-        self.setup_python27()
-        self.xfail_quoted_bracket("xfail:", False)
-        self.setup_python_ex()
-        self.xfail_quoted_bracket("xfail:", False)
-
-
-class TestTestProtocolServerAddunexpectedSuccess(TestCase):
-    """Tests for the uxsuccess keyword."""
-
-    def capture_expected_failure(self, test, err):
-        self._events.append((test, err))
-
-    def setup_python26(self):
-        """Setup a test object ready to be xfailed and thunk to success."""
-        self.client = Python26TestResult()
-        self.setup_protocol()
-
-    def setup_python27(self):
-        """Setup a test object ready to be xfailed."""
-        self.client = Python27TestResult()
-        self.setup_protocol()
-
-    def setup_python_ex(self):
-        """Setup a test object ready to be xfailed with details."""
-        self.client = ExtendedTestResult()
-        self.setup_protocol()
-
-    def setup_protocol(self):
-        """Setup the protocol based on self.client."""
-        self.protocol = subunit.TestProtocolServer(self.client)
-        self.protocol.lineReceived(_b("test mcdonalds farm\n"))
-        self.test = self.client._events[-1][-1]
-
-    def simple_uxsuccess_keyword(self, keyword, as_fail):
-        self.protocol.lineReceived(_b("%s mcdonalds farm\n" % keyword))
-        self.check_fail_or_uxsuccess(as_fail)
-
-    def check_fail_or_uxsuccess(self, as_fail, error_message=None):
-        details = {}
-        if error_message is not None:
-            details['traceback'] = Content(
-                ContentType("text", "x-traceback", {'charset': 'utf8'}),
-                lambda:[_b(error_message)])
-        if isinstance(self.client, ExtendedTestResult):
-            value = details
-        else:
-            value = None
-        if as_fail:
-            self.client._events[1] = self.client._events[1][:2]
-            # The value is generated within the extended to original decorator:
-            # todo use the testtools matcher to check on this.
-            self.assertEqual([
-                ('startTest', self.test),
-                ('addFailure', self.test),
-                ('stopTest', self.test),
-                ], self.client._events)
-        elif value:
-            self.assertEqual([
-                ('startTest', self.test),
-                ('addUnexpectedSuccess', self.test, value),
-                ('stopTest', self.test),
-                ], self.client._events)
-        else:
-            self.assertEqual([
-                ('startTest', self.test),
-                ('addUnexpectedSuccess', self.test),
-                ('stopTest', self.test),
-                ], self.client._events)
-
-    def test_simple_uxsuccess(self):
-        self.setup_python26()
-        self.simple_uxsuccess_keyword("uxsuccess", True)
-        self.setup_python27()
-        self.simple_uxsuccess_keyword("uxsuccess",  False)
-        self.setup_python_ex()
-        self.simple_uxsuccess_keyword("uxsuccess",  False)
-
-    def test_simple_uxsuccess_colon(self):
-        self.setup_python26()
-        self.simple_uxsuccess_keyword("uxsuccess:", True)
-        self.setup_python27()
-        self.simple_uxsuccess_keyword("uxsuccess:", False)
-        self.setup_python_ex()
-        self.simple_uxsuccess_keyword("uxsuccess:", False)
-
-    def test_uxsuccess_empty_message(self):
-        self.setup_python26()
-        self.empty_message(True)
-        self.setup_python27()
-        self.empty_message(False)
-        self.setup_python_ex()
-        self.empty_message(False, error_message="")
-
-    def empty_message(self, as_fail, error_message="\n"):
-        self.protocol.lineReceived(_b("uxsuccess mcdonalds farm [\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        self.check_fail_or_uxsuccess(as_fail, error_message)
-
-    def uxsuccess_quoted_bracket(self, keyword, as_fail):
-        self.protocol.lineReceived(_b("%s mcdonalds farm [\n" % keyword))
-        self.protocol.lineReceived(_b(" ]\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        self.check_fail_or_uxsuccess(as_fail, "]\n")
-
-    def test_uxsuccess_quoted_bracket(self):
-        self.setup_python26()
-        self.uxsuccess_quoted_bracket("uxsuccess", True)
-        self.setup_python27()
-        self.uxsuccess_quoted_bracket("uxsuccess", False)
-        self.setup_python_ex()
-        self.uxsuccess_quoted_bracket("uxsuccess", False)
-
-    def test_uxsuccess_colon_quoted_bracket(self):
-        self.setup_python26()
-        self.uxsuccess_quoted_bracket("uxsuccess:", True)
-        self.setup_python27()
-        self.uxsuccess_quoted_bracket("uxsuccess:", False)
-        self.setup_python_ex()
-        self.uxsuccess_quoted_bracket("uxsuccess:", False)
-
-
-class TestTestProtocolServerAddSkip(unittest.TestCase):
-    """Tests for the skip keyword.
-
-    In Python this meets the testtools extended TestResult contract.
-    (See https://launchpad.net/testtools).
-    """
-
-    def setUp(self):
-        """Setup a test object ready to be skipped."""
-        self.client = ExtendedTestResult()
-        self.protocol = subunit.TestProtocolServer(self.client)
-        self.protocol.lineReceived(_b("test mcdonalds farm\n"))
-        self.test = self.client._events[-1][-1]
-
-    def assertSkip(self, reason):
-        details = {}
-        if reason is not None:
-            details['reason'] = Content(
-                ContentType("text", "plain"), lambda:[reason])
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addSkip', self.test, details),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def simple_skip_keyword(self, keyword):
-        self.protocol.lineReceived(_b("%s mcdonalds farm\n" % keyword))
-        self.assertSkip(None)
-
-    def test_simple_skip(self):
-        self.simple_skip_keyword("skip")
-
-    def test_simple_skip_colon(self):
-        self.simple_skip_keyword("skip:")
-
-    def test_skip_empty_message(self):
-        self.protocol.lineReceived(_b("skip mcdonalds farm [\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        self.assertSkip(_b(""))
-
-    def skip_quoted_bracket(self, keyword):
-        # This tests it is accepted, but cannot test it is used today, because
-        # of not having a way to expose it in Python so far.
-        self.protocol.lineReceived(_b("%s mcdonalds farm [\n" % keyword))
-        self.protocol.lineReceived(_b(" ]\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        self.assertSkip(_b("]\n"))
-
-    def test_skip_quoted_bracket(self):
-        self.skip_quoted_bracket("skip")
-
-    def test_skip_colon_quoted_bracket(self):
-        self.skip_quoted_bracket("skip:")
-
-
-class TestTestProtocolServerAddSuccess(unittest.TestCase):
-
-    def setUp(self):
-        self.client = ExtendedTestResult()
-        self.protocol = subunit.TestProtocolServer(self.client)
-        self.protocol.lineReceived(_b("test mcdonalds farm\n"))
-        self.test = subunit.RemotedTestCase("mcdonalds farm")
-
-    def simple_success_keyword(self, keyword):
-        self.protocol.lineReceived(_b("%s mcdonalds farm\n" % keyword))
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addSuccess', self.test),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def test_simple_success(self):
-        self.simple_success_keyword("successful")
-
-    def test_simple_success_colon(self):
-        self.simple_success_keyword("successful:")
-
-    def assertSuccess(self, details):
-        self.assertEqual([
-            ('startTest', self.test),
-            ('addSuccess', self.test, details),
-            ('stopTest', self.test),
-            ], self.client._events)
-
-    def test_success_empty_message(self):
-        self.protocol.lineReceived(_b("success mcdonalds farm [\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        details = {}
-        details['message'] = Content(ContentType("text", "plain"),
-            lambda:[_b("")])
-        self.assertSuccess(details)
-
-    def success_quoted_bracket(self, keyword):
-        # This tests it is accepted, but cannot test it is used today, because
-        # of not having a way to expose it in Python so far.
-        self.protocol.lineReceived(_b("%s mcdonalds farm [\n" % keyword))
-        self.protocol.lineReceived(_b(" ]\n"))
-        self.protocol.lineReceived(_b("]\n"))
-        details = {}
-        details['message'] = Content(ContentType("text", "plain"),
-            lambda:[_b("]\n")])
-        self.assertSuccess(details)
-
-    def test_success_quoted_bracket(self):
-        self.success_quoted_bracket("success")
-
-    def test_success_colon_quoted_bracket(self):
-        self.success_quoted_bracket("success:")
-
-
-class TestTestProtocolServerProgress(unittest.TestCase):
-    """Test receipt of progress: directives."""
-
-    def test_progress_accepted_stdlib(self):
-        self.result = Python26TestResult()
-        self.stream = BytesIO()
-        self.protocol = subunit.TestProtocolServer(self.result,
-            stream=self.stream)
-        self.protocol.lineReceived(_b("progress: 23"))
-        self.protocol.lineReceived(_b("progress: -2"))
-        self.protocol.lineReceived(_b("progress: +4"))
-        self.assertEqual(_b(""), self.stream.getvalue())
-
-    def test_progress_accepted_extended(self):
-        # With a progress capable TestResult, progress events are emitted.
-        self.result = ExtendedTestResult()
-        self.stream = BytesIO()
-        self.protocol = subunit.TestProtocolServer(self.result,
-            stream=self.stream)
-        self.protocol.lineReceived(_b("progress: 23"))
-        self.protocol.lineReceived(_b("progress: push"))
-        self.protocol.lineReceived(_b("progress: -2"))
-        self.protocol.lineReceived(_b("progress: pop"))
-        self.protocol.lineReceived(_b("progress: +4"))
-        self.assertEqual(_b(""), self.stream.getvalue())
-        self.assertEqual([
-            ('progress', 23, subunit.PROGRESS_SET),
-            ('progress', None, subunit.PROGRESS_PUSH),
-            ('progress', -2, subunit.PROGRESS_CUR),
-            ('progress', None, subunit.PROGRESS_POP),
-            ('progress', 4, subunit.PROGRESS_CUR),
-            ], self.result._events)
-
-
-class TestTestProtocolServerStreamTags(unittest.TestCase):
-    """Test managing tags on the protocol level."""
-
-    def setUp(self):
-        self.client = ExtendedTestResult()
-        self.protocol = subunit.TestProtocolServer(self.client)
-
-    def test_initial_tags(self):
-        self.protocol.lineReceived(_b("tags: foo bar:baz  quux\n"))
-        self.assertEqual([
-            ('tags', set(["foo", "bar:baz", "quux"]), set()),
-            ], self.client._events)
-
-    def test_minus_removes_tags(self):
-        self.protocol.lineReceived(_b("tags: -bar quux\n"))
-        self.assertEqual([
-            ('tags', set(["quux"]), set(["bar"])),
-            ], self.client._events)
-
-    def test_tags_do_not_get_set_on_test(self):
-        self.protocol.lineReceived(_b("test mcdonalds farm\n"))
-        test = self.client._events[0][-1]
-        self.assertEqual(None, getattr(test, 'tags', None))
-
-    def test_tags_do_not_get_set_on_global_tags(self):
-        self.protocol.lineReceived(_b("tags: foo bar\n"))
-        self.protocol.lineReceived(_b("test mcdonalds farm\n"))
-        test = self.client._events[-1][-1]
-        self.assertEqual(None, getattr(test, 'tags', None))
-
-    def test_tags_get_set_on_test_tags(self):
-        self.protocol.lineReceived(_b("test mcdonalds farm\n"))
-        test = self.client._events[-1][-1]
-        self.protocol.lineReceived(_b("tags: foo bar\n"))
-        self.protocol.lineReceived(_b("success mcdonalds farm\n"))
-        self.assertEqual(None, getattr(test, 'tags', None))
-
-
-class TestTestProtocolServerStreamTime(unittest.TestCase):
-    """Test managing time information at the protocol level."""
-
-    def test_time_accepted_stdlib(self):
-        self.result = Python26TestResult()
-        self.stream = BytesIO()
-        self.protocol = subunit.TestProtocolServer(self.result,
-            stream=self.stream)
-        self.protocol.lineReceived(_b("time: 2001-12-12 12:59:59Z\n"))
-        self.assertEqual(_b(""), self.stream.getvalue())
-
-    def test_time_accepted_extended(self):
-        self.result = ExtendedTestResult()
-        self.stream = BytesIO()
-        self.protocol = subunit.TestProtocolServer(self.result,
-            stream=self.stream)
-        self.protocol.lineReceived(_b("time: 2001-12-12 12:59:59Z\n"))
-        self.assertEqual(_b(""), self.stream.getvalue())
-        self.assertEqual([
-            ('time', datetime.datetime(2001, 12, 12, 12, 59, 59, 0,
-            iso8601.Utc()))
-            ], self.result._events)
-
-
-class TestRemotedTestCase(unittest.TestCase):
-
-    def test_simple(self):
-        test = subunit.RemotedTestCase("A test description")
-        self.assertRaises(NotImplementedError, test.setUp)
-        self.assertRaises(NotImplementedError, test.tearDown)
-        self.assertEqual("A test description",
-                         test.shortDescription())
-        self.assertEqual("A test description",
-                         test.id())
-        self.assertEqual("A test description (subunit.RemotedTestCase)", "%s" % test)
-        self.assertEqual("<subunit.RemotedTestCase description="
-                         "'A test description'>", "%r" % test)
-        result = unittest.TestResult()
-        test.run(result)
-        self.assertEqual([(test, _remote_exception_str + ": "
-                                 "Cannot run RemotedTestCases.\n\n")],
-                         result.errors)
-        self.assertEqual(1, result.testsRun)
-        another_test = subunit.RemotedTestCase("A test description")
-        self.assertEqual(test, another_test)
-        different_test = subunit.RemotedTestCase("ofo")
-        self.assertNotEqual(test, different_test)
-        self.assertNotEqual(another_test, different_test)
-
-
-class TestRemoteError(unittest.TestCase):
-
-    def test_eq(self):
-        error = subunit.RemoteError(_u("Something went wrong"))
-        another_error = subunit.RemoteError(_u("Something went wrong"))
-        different_error = subunit.RemoteError(_u("boo!"))
-        self.assertEqual(error, another_error)
-        self.assertNotEqual(error, different_error)
-        self.assertNotEqual(different_error, another_error)
-
-    def test_empty_constructor(self):
-        self.assertEqual(subunit.RemoteError(), subunit.RemoteError(_u("")))
-
-
-class TestExecTestCase(unittest.TestCase):
-
-    class SampleExecTestCase(subunit.ExecTestCase):
-
-        def test_sample_method(self):
-            """sample-script.py"""
-            # the sample script runs three tests, one each
-            # that fails, errors and succeeds
-
-        def test_sample_method_args(self):
-            """sample-script.py foo"""
-            # sample that will run just one test.
-
-    def test_construct(self):
-        test = self.SampleExecTestCase("test_sample_method")
-        self.assertEqual(test.script,
-                         subunit.join_dir(__file__, 'sample-script.py'))
-
-    def test_args(self):
-        result = unittest.TestResult()
-        test = self.SampleExecTestCase("test_sample_method_args")
-        test.run(result)
-        self.assertEqual(1, result.testsRun)
-
-    def test_run(self):
-        result = ExtendedTestResult()
-        test = self.SampleExecTestCase("test_sample_method")
-        test.run(result)
-        mcdonald = subunit.RemotedTestCase("old mcdonald")
-        bing = subunit.RemotedTestCase("bing crosby")
-        bing_details = {}
-        bing_details['traceback'] = Content(ContentType("text", "x-traceback",
-            {'charset': 'utf8'}), lambda:[_b("foo.c:53:ERROR invalid state\n")])
-        an_error = subunit.RemotedTestCase("an error")
-        error_details = {}
-        self.assertEqual([
-            ('startTest', mcdonald),
-            ('addSuccess', mcdonald),
-            ('stopTest', mcdonald),
-            ('startTest', bing),
-            ('addFailure', bing, bing_details),
-            ('stopTest', bing),
-            ('startTest', an_error),
-            ('addError', an_error, error_details),
-            ('stopTest', an_error),
-            ], result._events)
-
-    def test_debug(self):
-        test = self.SampleExecTestCase("test_sample_method")
-        test.debug()
-
-    def test_count_test_cases(self):
-        """TODO run the child process and count responses to determine the count."""
-
-    def test_join_dir(self):
-        sibling = subunit.join_dir(__file__, 'foo')
-        filedir = os.path.abspath(os.path.dirname(__file__))
-        expected = os.path.join(filedir, 'foo')
-        self.assertEqual(sibling, expected)
-
-
-class DoExecTestCase(subunit.ExecTestCase):
-
-    def test_working_script(self):
-        """sample-two-script.py"""
-
-
-class TestIsolatedTestCase(TestCase):
-
-    class SampleIsolatedTestCase(subunit.IsolatedTestCase):
-
-        SETUP = False
-        TEARDOWN = False
-        TEST = False
-
-        def setUp(self):
-            TestIsolatedTestCase.SampleIsolatedTestCase.SETUP = True
-
-        def tearDown(self):
-            TestIsolatedTestCase.SampleIsolatedTestCase.TEARDOWN = True
-
-        def test_sets_global_state(self):
-            TestIsolatedTestCase.SampleIsolatedTestCase.TEST = True
-
-
-    def test_construct(self):
-        self.SampleIsolatedTestCase("test_sets_global_state")
-
-    @skipIf(os.name != "posix", "Need a posix system for forking tests")
-    def test_run(self):
-        result = unittest.TestResult()
-        test = self.SampleIsolatedTestCase("test_sets_global_state")
-        test.run(result)
-        self.assertEqual(result.testsRun, 1)
-        self.assertEqual(self.SampleIsolatedTestCase.SETUP, False)
-        self.assertEqual(self.SampleIsolatedTestCase.TEARDOWN, False)
-        self.assertEqual(self.SampleIsolatedTestCase.TEST, False)
-
-    def test_debug(self):
-        pass
-        #test = self.SampleExecTestCase("test_sample_method")
-        #test.debug()
-
-
-class TestIsolatedTestSuite(TestCase):
-
-    class SampleTestToIsolate(unittest.TestCase):
-
-        SETUP = False
-        TEARDOWN = False
-        TEST = False
-
-        def setUp(self):
-            TestIsolatedTestSuite.SampleTestToIsolate.SETUP = True
-
-        def tearDown(self):
-            TestIsolatedTestSuite.SampleTestToIsolate.TEARDOWN = True
-
-        def test_sets_global_state(self):
-            TestIsolatedTestSuite.SampleTestToIsolate.TEST = True
-
-
-    def test_construct(self):
-        subunit.IsolatedTestSuite()
-
-    @skipIf(os.name != "posix", "Need a posix system for forking tests")
-    def test_run(self):
-        result = unittest.TestResult()
-        suite = subunit.IsolatedTestSuite()
-        sub_suite = unittest.TestSuite()
-        sub_suite.addTest(self.SampleTestToIsolate("test_sets_global_state"))
-        sub_suite.addTest(self.SampleTestToIsolate("test_sets_global_state"))
-        suite.addTest(sub_suite)
-        suite.addTest(self.SampleTestToIsolate("test_sets_global_state"))
-        suite.run(result)
-        self.assertEqual(result.testsRun, 3)
-        self.assertEqual(self.SampleTestToIsolate.SETUP, False)
-        self.assertEqual(self.SampleTestToIsolate.TEARDOWN, False)
-        self.assertEqual(self.SampleTestToIsolate.TEST, False)
-
-
-class TestTestProtocolClient(unittest.TestCase):
-
-    def setUp(self):
-        self.io = BytesIO()
-        self.protocol = subunit.TestProtocolClient(self.io)
-        self.test = TestTestProtocolClient("test_start_test")
-        self.sample_details = {'something':Content(
-            ContentType('text', 'plain'), lambda:[_b('serialised\nform')])}
-        self.sample_tb_details = dict(self.sample_details)
-        self.sample_tb_details['traceback'] = TracebackContent(
-            subunit.RemoteError(_u("boo qux")), self.test)
-
-    def test_start_test(self):
-        """Test startTest on a TestProtocolClient."""
-        self.protocol.startTest(self.test)
-        self.assertEqual(self.io.getvalue(), _b("test: %s\n" % self.test.id()))
-
-    def test_stop_test(self):
-        # stopTest doesn't output anything.
-        self.protocol.stopTest(self.test)
-        self.assertEqual(self.io.getvalue(), _b(""))
-
-    def test_add_success(self):
-        """Test addSuccess on a TestProtocolClient."""
-        self.protocol.addSuccess(self.test)
-        self.assertEqual(
-            self.io.getvalue(), _b("successful: %s\n" % self.test.id()))
-
-    def test_add_success_details(self):
-        """Test addSuccess on a TestProtocolClient with details."""
-        self.protocol.addSuccess(self.test, details=self.sample_details)
-        self.assertEqual(
-            self.io.getvalue(), _b("successful: %s [ multipart\n"
-                "Content-Type: text/plain\n"
-                "something\n"
-                "F\r\nserialised\nform0\r\n]\n" % self.test.id()))
-
-    def test_add_failure(self):
-        """Test addFailure on a TestProtocolClient."""
-        self.protocol.addFailure(
-            self.test, subunit.RemoteError(_u("boo qux")))
-        self.assertEqual(
-            self.io.getvalue(),
-            _b(('failure: %s [\n' + _remote_exception_str + ': boo qux\n]\n')
-            % self.test.id()))
-
-    def test_add_failure_details(self):
-        """Test addFailure on a TestProtocolClient with details."""
-        self.protocol.addFailure(
-            self.test, details=self.sample_tb_details)
-        self.assertEqual(
-            self.io.getvalue(),
-            _b(("failure: %s [ multipart\n"
-            "Content-Type: text/plain\n"
-            "something\n"
-            "F\r\nserialised\nform0\r\n"
-            "Content-Type: text/x-traceback;charset=utf8,language=python\n"
-            "traceback\n" + _remote_exception_str_chunked + ": boo qux\n0\r\n"
-            "]\n") % self.test.id()))
-
-    def test_add_error(self):
-        """Test stopTest on a TestProtocolClient."""
-        self.protocol.addError(
-            self.test, subunit.RemoteError(_u("phwoar crikey")))
-        self.assertEqual(
-            self.io.getvalue(),
-            _b(('error: %s [\n' +
-            _remote_exception_str + ": phwoar crikey\n"
-            "]\n") % self.test.id()))
-
-    def test_add_error_details(self):
-        """Test stopTest on a TestProtocolClient with details."""
-        self.protocol.addError(
-            self.test, details=self.sample_tb_details)
-        self.assertEqual(
-            self.io.getvalue(),
-            _b(("error: %s [ multipart\n"
-            "Content-Type: text/plain\n"
-            "something\n"
-            "F\r\nserialised\nform0\r\n"
-            "Content-Type: text/x-traceback;charset=utf8,language=python\n"
-            "traceback\n" + _remote_exception_str_chunked + ": boo qux\n0\r\n"
-            "]\n") % self.test.id()))
-
-    def test_add_expected_failure(self):
-        """Test addExpectedFailure on a TestProtocolClient."""
-        self.protocol.addExpectedFailure(
-            self.test, subunit.RemoteError(_u("phwoar crikey")))
-        self.assertEqual(
-            self.io.getvalue(),
-            _b(('xfail: %s [\n' +
-            _remote_exception_str + ": phwoar crikey\n"
-            "]\n") % self.test.id()))
-
-    def test_add_expected_failure_details(self):
-        """Test addExpectedFailure on a TestProtocolClient with details."""
-        self.protocol.addExpectedFailure(
-            self.test, details=self.sample_tb_details)
-        self.assertEqual(
-            self.io.getvalue(),
-            _b(("xfail: %s [ multipart\n"
-            "Content-Type: text/plain\n"
-            "something\n"
-            "F\r\nserialised\nform0\r\n"
-            "Content-Type: text/x-traceback;charset=utf8,language=python\n"
-            "traceback\n" + _remote_exception_str_chunked + ": boo qux\n0\r\n"
-            "]\n") % self.test.id()))
-
-
-    def test_add_skip(self):
-        """Test addSkip on a TestProtocolClient."""
-        self.protocol.addSkip(
-            self.test, "Has it really?")
-        self.assertEqual(
-            self.io.getvalue(),
-            _b('skip: %s [\nHas it really?\n]\n' % self.test.id()))
-
-    def test_add_skip_details(self):
-        """Test addSkip on a TestProtocolClient with details."""
-        details = {'reason':Content(
-            ContentType('text', 'plain'), lambda:[_b('Has it really?')])}
-        self.protocol.addSkip(self.test, details=details)
-        self.assertEqual(
-            self.io.getvalue(),
-            _b("skip: %s [ multipart\n"
-            "Content-Type: text/plain\n"
-            "reason\n"
-            "E\r\nHas it really?0\r\n"
-            "]\n" % self.test.id()))
-
-    def test_progress_set(self):
-        self.protocol.progress(23, subunit.PROGRESS_SET)
-        self.assertEqual(self.io.getvalue(), _b('progress: 23\n'))
-
-    def test_progress_neg_cur(self):
-        self.protocol.progress(-23, subunit.PROGRESS_CUR)
-        self.assertEqual(self.io.getvalue(), _b('progress: -23\n'))
-
-    def test_progress_pos_cur(self):
-        self.protocol.progress(23, subunit.PROGRESS_CUR)
-        self.assertEqual(self.io.getvalue(), _b('progress: +23\n'))
-
-    def test_progress_pop(self):
-        self.protocol.progress(1234, subunit.PROGRESS_POP)
-        self.assertEqual(self.io.getvalue(), _b('progress: pop\n'))
-
-    def test_progress_push(self):
-        self.protocol.progress(1234, subunit.PROGRESS_PUSH)
-        self.assertEqual(self.io.getvalue(), _b('progress: push\n'))
-
-    def test_time(self):
-        # Calling time() outputs a time signal immediately.
-        self.protocol.time(
-            datetime.datetime(2009,10,11,12,13,14,15, iso8601.Utc()))
-        self.assertEqual(
-            _b("time: 2009-10-11 12:13:14.000015Z\n"),
-            self.io.getvalue())
-
-    def test_add_unexpected_success(self):
-        """Test addUnexpectedSuccess on a TestProtocolClient."""
-        self.protocol.addUnexpectedSuccess(self.test)
-        self.assertEqual(
-            self.io.getvalue(), _b("uxsuccess: %s\n" % self.test.id()))
-
-    def test_add_unexpected_success_details(self):
-        """Test addUnexpectedSuccess on a TestProtocolClient with details."""
-        self.protocol.addUnexpectedSuccess(self.test, details=self.sample_details)
-        self.assertEqual(
-            self.io.getvalue(), _b("uxsuccess: %s [ multipart\n"
-                "Content-Type: text/plain\n"
-                "something\n"
-                "F\r\nserialised\nform0\r\n]\n" % self.test.id()))
-
-
-def test_suite():
-    loader = subunit.tests.TestUtil.TestLoader()
-    result = loader.loadTestsFromName(__name__)
-    return result

=== removed file 'python-for-subunit2junitxml/subunit/tests/test_test_results.py'
--- python-for-subunit2junitxml/subunit/tests/test_test_results.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/subunit/tests/test_test_results.py	1970-01-01 00:00:00 +0000
@@ -1,300 +0,0 @@
-#
-#  subunit: extensions to Python unittest to get test results from subprocesses.
-#  Copyright (C) 2009  Robert Collins <robertc@robertcollins.net>
-#
-#  Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-#  license at the users choice. A copy of both licenses are available in the
-#  project source as Apache-2.0 and BSD. You may not use this file except in
-#  compliance with one of these two licences.
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-#  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-#  license you chose for the specific language governing permissions and
-#  limitations under that license.
-#
-
-import datetime
-import unittest
-
-from testtools import TestCase
-from testtools.testresult.doubles import ExtendedTestResult
-
-import subunit
-import subunit.iso8601 as iso8601
-import subunit.test_results
-
-
-class LoggingDecorator(subunit.test_results.HookedTestResultDecorator):
-
-    def __init__(self, decorated):
-        self._calls = 0
-        super(LoggingDecorator, self).__init__(decorated)
-
-    def _before_event(self):
-        self._calls += 1
-
-
-class AssertBeforeTestResult(LoggingDecorator):
-    """A TestResult for checking preconditions."""
-
-    def __init__(self, decorated, test):
-        self.test = test
-        super(AssertBeforeTestResult, self).__init__(decorated)
-
-    def _before_event(self):
-        self.test.assertEqual(1, self.earlier._calls)
-        super(AssertBeforeTestResult, self)._before_event()
-
-
-class TimeCapturingResult(unittest.TestResult):
-
-    def __init__(self):
-        super(TimeCapturingResult, self).__init__()
-        self._calls = []
-
-    def time(self, a_datetime):
-        self._calls.append(a_datetime)
-
-
-class TestHookedTestResultDecorator(unittest.TestCase):
-
-    def setUp(self):
-        # An end to the chain
-        terminal = unittest.TestResult()
-        # Asserts that the call was made to self.result before asserter was
-        # called.
-        asserter = AssertBeforeTestResult(terminal, self)
-        # The result object we call, which much increase its call count.
-        self.result = LoggingDecorator(asserter)
-        asserter.earlier = self.result
-        self.decorated = asserter
-
-    def tearDown(self):
-        # The hook in self.result must have been called
-        self.assertEqual(1, self.result._calls)
-        # The hook in asserter must have been called too, otherwise the
-        # assertion about ordering won't have completed.
-        self.assertEqual(1, self.decorated._calls)
-
-    def test_startTest(self):
-        self.result.startTest(self)
-
-    def test_startTestRun(self):
-        self.result.startTestRun()
-
-    def test_stopTest(self):
-        self.result.stopTest(self)
-
-    def test_stopTestRun(self):
-        self.result.stopTestRun()
-
-    def test_addError(self):
-        self.result.addError(self, subunit.RemoteError())
-
-    def test_addError_details(self):
-        self.result.addError(self, details={})
-
-    def test_addFailure(self):
-        self.result.addFailure(self, subunit.RemoteError())
-
-    def test_addFailure_details(self):
-        self.result.addFailure(self, details={})
-
-    def test_addSuccess(self):
-        self.result.addSuccess(self)
-
-    def test_addSuccess_details(self):
-        self.result.addSuccess(self, details={})
-
-    def test_addSkip(self):
-        self.result.addSkip(self, "foo")
-
-    def test_addSkip_details(self):
-        self.result.addSkip(self, details={})
-
-    def test_addExpectedFailure(self):
-        self.result.addExpectedFailure(self, subunit.RemoteError())
-
-    def test_addExpectedFailure_details(self):
-        self.result.addExpectedFailure(self, details={})
-
-    def test_addUnexpectedSuccess(self):
-        self.result.addUnexpectedSuccess(self)
-
-    def test_addUnexpectedSuccess_details(self):
-        self.result.addUnexpectedSuccess(self, details={})
-
-    def test_progress(self):
-        self.result.progress(1, subunit.PROGRESS_SET)
-
-    def test_wasSuccessful(self):
-        self.result.wasSuccessful()
-
-    def test_shouldStop(self):
-        self.result.shouldStop
-
-    def test_stop(self):
-        self.result.stop()
-
-    def test_time(self):
-        self.result.time(None)
-
-
-class TestAutoTimingTestResultDecorator(unittest.TestCase):
-
-    def setUp(self):
-        # And end to the chain which captures time events.
-        terminal = TimeCapturingResult()
-        # The result object under test.
-        self.result = subunit.test_results.AutoTimingTestResultDecorator(
-            terminal)
-        self.decorated = terminal
-
-    def test_without_time_calls_time_is_called_and_not_None(self):
-        self.result.startTest(self)
-        self.assertEqual(1, len(self.decorated._calls))
-        self.assertNotEqual(None, self.decorated._calls[0])
-
-    def test_no_time_from_progress(self):
-        self.result.progress(1, subunit.PROGRESS_CUR)
-        self.assertEqual(0, len(self.decorated._calls))
-
-    def test_no_time_from_shouldStop(self):
-        self.decorated.stop()
-        self.result.shouldStop
-        self.assertEqual(0, len(self.decorated._calls))
-
-    def test_calling_time_inhibits_automatic_time(self):
-        # Calling time() outputs a time signal immediately and prevents
-        # automatically adding one when other methods are called.
-        time = datetime.datetime(2009,10,11,12,13,14,15, iso8601.Utc())
-        self.result.time(time)
-        self.result.startTest(self)
-        self.result.stopTest(self)
-        self.assertEqual(1, len(self.decorated._calls))
-        self.assertEqual(time, self.decorated._calls[0])
-
-    def test_calling_time_None_enables_automatic_time(self):
-        time = datetime.datetime(2009,10,11,12,13,14,15, iso8601.Utc())
-        self.result.time(time)
-        self.assertEqual(1, len(self.decorated._calls))
-        self.assertEqual(time, self.decorated._calls[0])
-        # Calling None passes the None through, in case other results care.
-        self.result.time(None)
-        self.assertEqual(2, len(self.decorated._calls))
-        self.assertEqual(None, self.decorated._calls[1])
-        # Calling other methods doesn't generate an automatic time event.
-        self.result.startTest(self)
-        self.assertEqual(3, len(self.decorated._calls))
-        self.assertNotEqual(None, self.decorated._calls[2])
-
-
-class TestTagCollapsingDecorator(TestCase):
-
-    def test_tags_forwarded_outside_of_tests(self):
-        result = ExtendedTestResult()
-        tag_collapser = subunit.test_results.TagCollapsingDecorator(result)
-        tag_collapser.tags(set(['a', 'b']), set())
-        self.assertEquals(
-            [('tags', set(['a', 'b']), set([]))], result._events)
-
-    def test_tags_collapsed_inside_of_tests(self):
-        result = ExtendedTestResult()
-        tag_collapser = subunit.test_results.TagCollapsingDecorator(result)
-        test = subunit.RemotedTestCase('foo')
-        tag_collapser.startTest(test)
-        tag_collapser.tags(set(['a']), set())
-        tag_collapser.tags(set(['b']), set(['a']))
-        tag_collapser.tags(set(['c']), set())
-        tag_collapser.stopTest(test)
-        self.assertEquals(
-            [('startTest', test),
-             ('tags', set(['b', 'c']), set(['a'])),
-             ('stopTest', test)],
-            result._events)
-
-    def test_tags_collapsed_inside_of_tests_different_ordering(self):
-        result = ExtendedTestResult()
-        tag_collapser = subunit.test_results.TagCollapsingDecorator(result)
-        test = subunit.RemotedTestCase('foo')
-        tag_collapser.startTest(test)
-        tag_collapser.tags(set(), set(['a']))
-        tag_collapser.tags(set(['a', 'b']), set())
-        tag_collapser.tags(set(['c']), set())
-        tag_collapser.stopTest(test)
-        self.assertEquals(
-            [('startTest', test),
-             ('tags', set(['a', 'b', 'c']), set()),
-             ('stopTest', test)],
-            result._events)
-
-
-class TestTimeCollapsingDecorator(TestCase):
-
-    def make_time(self):
-        # Heh heh.
-        return datetime.datetime(
-            2000, 1, self.getUniqueInteger(), tzinfo=iso8601.UTC)
-
-    def test_initial_time_forwarded(self):
-        # We always forward the first time event we see.
-        result = ExtendedTestResult()
-        tag_collapser = subunit.test_results.TimeCollapsingDecorator(result)
-        a_time = self.make_time()
-        tag_collapser.time(a_time)
-        self.assertEquals([('time', a_time)], result._events)
-
-    def test_time_collapsed_to_first_and_last(self):
-        # If there are many consecutive time events, only the first and last
-        # are sent through.
-        result = ExtendedTestResult()
-        tag_collapser = subunit.test_results.TimeCollapsingDecorator(result)
-        times = [self.make_time() for i in range(5)]
-        for a_time in times:
-            tag_collapser.time(a_time)
-        tag_collapser.startTest(subunit.RemotedTestCase('foo'))
-        self.assertEquals(
-            [('time', times[0]), ('time', times[-1])], result._events[:-1])
-
-    def test_only_one_time_sent(self):
-        # If we receive a single time event followed by a non-time event, we
-        # send exactly one time event.
-        result = ExtendedTestResult()
-        tag_collapser = subunit.test_results.TimeCollapsingDecorator(result)
-        a_time = self.make_time()
-        tag_collapser.time(a_time)
-        tag_collapser.startTest(subunit.RemotedTestCase('foo'))
-        self.assertEquals([('time', a_time)], result._events[:-1])
-
-    def test_duplicate_times_not_sent(self):
-        # Many time events with the exact same time are collapsed into one
-        # time event.
-        result = ExtendedTestResult()
-        tag_collapser = subunit.test_results.TimeCollapsingDecorator(result)
-        a_time = self.make_time()
-        for i in range(5):
-            tag_collapser.time(a_time)
-        tag_collapser.startTest(subunit.RemotedTestCase('foo'))
-        self.assertEquals([('time', a_time)], result._events[:-1])
-
-    def test_no_times_inserted(self):
-        result = ExtendedTestResult()
-        tag_collapser = subunit.test_results.TimeCollapsingDecorator(result)
-        a_time = self.make_time()
-        tag_collapser.time(a_time)
-        foo = subunit.RemotedTestCase('foo')
-        tag_collapser.startTest(foo)
-        tag_collapser.addSuccess(foo)
-        tag_collapser.stopTest(foo)
-        self.assertEquals(
-            [('time', a_time),
-             ('startTest', foo),
-             ('addSuccess', foo),
-             ('stopTest', foo)], result._events)
-
-
-def test_suite():
-    loader = subunit.tests.TestUtil.TestLoader()
-    result = loader.loadTestsFromName(__name__)
-    return result

=== removed directory 'python-for-subunit2junitxml/testtools'
=== removed file 'python-for-subunit2junitxml/testtools/__init__.py'
--- python-for-subunit2junitxml/testtools/__init__.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/__init__.py	1970-01-01 00:00:00 +0000
@@ -1,80 +0,0 @@
-# Copyright (c) 2008-2011 testtools developers. See LICENSE for details.
-
-"""Extensions to the standard Python unittest library."""
-
-__all__ = [
-    'clone_test_with_new_id',
-    'ConcurrentTestSuite',
-    'ErrorHolder',
-    'ExpectedException',
-    'ExtendedToOriginalDecorator',
-    'iterate_tests',
-    'MultipleExceptions',
-    'MultiTestResult',
-    'PlaceHolder',
-    'run_test_with',
-    'TestCase',
-    'TestResult',
-    'TextTestResult',
-    'RunTest',
-    'skip',
-    'skipIf',
-    'skipUnless',
-    'ThreadsafeForwardingResult',
-    'try_import',
-    'try_imports',
-    ]
-
-from testtools.helpers import (
-    try_import,
-    try_imports,
-    )
-from testtools.matchers import (
-    Matcher,
-    )
-# Shut up, pyflakes. We are importing for documentation, not for namespacing.
-Matcher
-
-from testtools.runtest import (
-    MultipleExceptions,
-    RunTest,
-    )
-from testtools.testcase import (
-    ErrorHolder,
-    ExpectedException,
-    PlaceHolder,
-    TestCase,
-    clone_test_with_new_id,
-    run_test_with,
-    skip,
-    skipIf,
-    skipUnless,
-    )
-from testtools.testresult import (
-    ExtendedToOriginalDecorator,
-    MultiTestResult,
-    TestResult,
-    TextTestResult,
-    ThreadsafeForwardingResult,
-    )
-from testtools.testsuite import (
-    ConcurrentTestSuite,
-    iterate_tests,
-    )
-from testtools.distutilscmd import (
-    TestCommand,
-)
-
-# same format as sys.version_info: "A tuple containing the five components of
-# the version number: major, minor, micro, releaselevel, and serial. All
-# values except releaselevel are integers; the release level is 'alpha',
-# 'beta', 'candidate', or 'final'. The version_info value corresponding to the
-# Python version 2.0 is (2, 0, 0, 'final', 0)."  Additionally we use a
-# releaselevel of 'dev' for unreleased under-development code.
-#
-# If the releaselevel is 'alpha' then the major/minor/micro components are not
-# established at this point, and setup.py will use a version of next-$(revno).
-# If the releaselevel is 'final', then the tarball will be major.minor.micro.
-# Otherwise it is major.minor.micro~$(revno).
-
-__version__ = (0, 9, 10, 'final', 0)

=== removed file 'python-for-subunit2junitxml/testtools/_spinner.py'
--- python-for-subunit2junitxml/testtools/_spinner.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/_spinner.py	1970-01-01 00:00:00 +0000
@@ -1,316 +0,0 @@
-# Copyright (c) 2010 testtools developers. See LICENSE for details.
-
-"""Evil reactor-spinning logic for running Twisted tests.
-
-This code is highly experimental, liable to change and not to be trusted.  If
-you couldn't write this yourself, you should not be using it.
-"""
-
-__all__ = [
-    'DeferredNotFired',
-    'extract_result',
-    'NoResultError',
-    'not_reentrant',
-    'ReentryError',
-    'Spinner',
-    'StaleJunkError',
-    'TimeoutError',
-    'trap_unhandled_errors',
-    ]
-
-import signal
-
-from testtools.monkey import MonkeyPatcher
-
-from twisted.internet import defer
-from twisted.internet.base import DelayedCall
-from twisted.internet.interfaces import IReactorThreads
-from twisted.python.failure import Failure
-from twisted.python.util import mergeFunctionMetadata
-
-
-class ReentryError(Exception):
-    """Raised when we try to re-enter a function that forbids it."""
-
-    def __init__(self, function):
-        Exception.__init__(self,
-            "%r in not re-entrant but was called within a call to itself."
-            % (function,))
-
-
-def not_reentrant(function, _calls={}):
-    """Decorates a function as not being re-entrant.
-
-    The decorated function will raise an error if called from within itself.
-    """
-    def decorated(*args, **kwargs):
-        if _calls.get(function, False):
-            raise ReentryError(function)
-        _calls[function] = True
-        try:
-            return function(*args, **kwargs)
-        finally:
-            _calls[function] = False
-    return mergeFunctionMetadata(function, decorated)
-
-
-class DeferredNotFired(Exception):
-    """Raised when we extract a result from a Deferred that's not fired yet."""
-
-
-def extract_result(deferred):
-    """Extract the result from a fired deferred.
-
-    It can happen that you have an API that returns Deferreds for
-    compatibility with Twisted code, but is in fact synchronous, i.e. the
-    Deferreds it returns have always fired by the time it returns.  In this
-    case, you can use this function to convert the result back into the usual
-    form for a synchronous API, i.e. the result itself or a raised exception.
-
-    It would be very bad form to use this as some way of checking if a
-    Deferred has fired.
-    """
-    failures = []
-    successes = []
-    deferred.addCallbacks(successes.append, failures.append)
-    if len(failures) == 1:
-        failures[0].raiseException()
-    elif len(successes) == 1:
-        return successes[0]
-    else:
-        raise DeferredNotFired("%r has not fired yet." % (deferred,))
-
-
-def trap_unhandled_errors(function, *args, **kwargs):
-    """Run a function, trapping any unhandled errors in Deferreds.
-
-    Assumes that 'function' will have handled any errors in Deferreds by the
-    time it is complete.  This is almost never true of any Twisted code, since
-    you can never tell when someone has added an errback to a Deferred.
-
-    If 'function' raises, then don't bother doing any unhandled error
-    jiggery-pokery, since something horrible has probably happened anyway.
-
-    :return: A tuple of '(result, error)', where 'result' is the value
-        returned by 'function' and 'error' is a list of 'defer.DebugInfo'
-        objects that have unhandled errors in Deferreds.
-    """
-    real_DebugInfo = defer.DebugInfo
-    debug_infos = []
-    def DebugInfo():
-        info = real_DebugInfo()
-        debug_infos.append(info)
-        return info
-    defer.DebugInfo = DebugInfo
-    try:
-        result = function(*args, **kwargs)
-    finally:
-        defer.DebugInfo = real_DebugInfo
-    errors = []
-    for info in debug_infos:
-        if info.failResult is not None:
-            errors.append(info)
-            # Disable the destructor that logs to error. We are already
-            # catching the error here.
-            info.__del__ = lambda: None
-    return result, errors
-
-
-class TimeoutError(Exception):
-    """Raised when run_in_reactor takes too long to run a function."""
-
-    def __init__(self, function, timeout):
-        Exception.__init__(self,
-            "%r took longer than %s seconds" % (function, timeout))
-
-
-class NoResultError(Exception):
-    """Raised when the reactor has stopped but we don't have any result."""
-
-    def __init__(self):
-        Exception.__init__(self,
-            "Tried to get test's result from Deferred when no result is "
-            "available.  Probably means we received SIGINT or similar.")
-
-
-class StaleJunkError(Exception):
-    """Raised when there's junk in the spinner from a previous run."""
-
-    def __init__(self, junk):
-        Exception.__init__(self,
-            "There was junk in the spinner from a previous run. "
-            "Use clear_junk() to clear it out: %r" % (junk,))
-
-
-class Spinner(object):
-    """Spin the reactor until a function is done.
-
-    This class emulates the behaviour of twisted.trial in that it grotesquely
-    and horribly spins the Twisted reactor while a function is running, and
-    then kills the reactor when that function is complete and all the
-    callbacks in its chains are done.
-    """
-
-    _UNSET = object()
-
-    # Signals that we save and restore for each spin.
-    _PRESERVED_SIGNALS = [
-        'SIGINT',
-        'SIGTERM',
-        'SIGCHLD',
-        ]
-
-    # There are many APIs within Twisted itself where a Deferred fires but
-    # leaves cleanup work scheduled for the reactor to do.  Arguably, many of
-    # these are bugs.  As such, we provide a facility to iterate the reactor
-    # event loop a number of times after every call, in order to shake out
-    # these buggy-but-commonplace events.  The default is 0, because that is
-    # the ideal, and it actually works for many cases.
-    _OBLIGATORY_REACTOR_ITERATIONS = 0
-
-    def __init__(self, reactor, debug=False):
-        """Construct a Spinner.
-
-        :param reactor: A Twisted reactor.
-        :param debug: Whether or not to enable Twisted's debugging.  Defaults
-            to False.
-        """
-        self._reactor = reactor
-        self._timeout_call = None
-        self._success = self._UNSET
-        self._failure = self._UNSET
-        self._saved_signals = []
-        self._junk = []
-        self._debug = debug
-
-    def _cancel_timeout(self):
-        if self._timeout_call:
-            self._timeout_call.cancel()
-
-    def _get_result(self):
-        if self._failure is not self._UNSET:
-            self._failure.raiseException()
-        if self._success is not self._UNSET:
-            return self._success
-        raise NoResultError()
-
-    def _got_failure(self, result):
-        self._cancel_timeout()
-        self._failure = result
-
-    def _got_success(self, result):
-        self._cancel_timeout()
-        self._success = result
-
-    def _stop_reactor(self, ignored=None):
-        """Stop the reactor!"""
-        self._reactor.crash()
-
-    def _timed_out(self, function, timeout):
-        e = TimeoutError(function, timeout)
-        self._failure = Failure(e)
-        self._stop_reactor()
-
-    def _clean(self):
-        """Clean up any junk in the reactor.
-
-        Will always iterate the reactor a number of times equal to
-        ``Spinner._OBLIGATORY_REACTOR_ITERATIONS``.  This is to work around
-        bugs in various Twisted APIs where a Deferred fires but still leaves
-        work (e.g. cancelling a call, actually closing a connection) for the
-        reactor to do.
-        """
-        for i in range(self._OBLIGATORY_REACTOR_ITERATIONS):
-            self._reactor.iterate(0)
-        junk = []
-        for delayed_call in self._reactor.getDelayedCalls():
-            delayed_call.cancel()
-            junk.append(delayed_call)
-        for selectable in self._reactor.removeAll():
-            # Twisted sends a 'KILL' signal to selectables that provide
-            # IProcessTransport.  Since only _dumbwin32proc processes do this,
-            # we aren't going to bother.
-            junk.append(selectable)
-        if IReactorThreads.providedBy(self._reactor):
-            if self._reactor.threadpool is not None:
-                self._reactor._stopThreadPool()
-        self._junk.extend(junk)
-        return junk
-
-    def clear_junk(self):
-        """Clear out our recorded junk.
-
-        :return: Whatever junk was there before.
-        """
-        junk = self._junk
-        self._junk = []
-        return junk
-
-    def get_junk(self):
-        """Return any junk that has been found on the reactor."""
-        return self._junk
-
-    def _save_signals(self):
-        available_signals = [
-            getattr(signal, name, None) for name in self._PRESERVED_SIGNALS]
-        self._saved_signals = [
-            (sig, signal.getsignal(sig)) for sig in available_signals if sig]
-
-    def _restore_signals(self):
-        for sig, hdlr in self._saved_signals:
-            signal.signal(sig, hdlr)
-        self._saved_signals = []
-
-    @not_reentrant
-    def run(self, timeout, function, *args, **kwargs):
-        """Run 'function' in a reactor.
-
-        If 'function' returns a Deferred, the reactor will keep spinning until
-        the Deferred fires and its chain completes or until the timeout is
-        reached -- whichever comes first.
-
-        :raise TimeoutError: If 'timeout' is reached before the Deferred
-            returned by 'function' has completed its callback chain.
-        :raise NoResultError: If the reactor is somehow interrupted before
-            the Deferred returned by 'function' has completed its callback
-            chain.
-        :raise StaleJunkError: If there's junk in the spinner from a previous
-            run.
-        :return: Whatever is at the end of the function's callback chain.  If
-            it's an error, then raise that.
-        """
-        debug = MonkeyPatcher()
-        if self._debug:
-            debug.add_patch(defer.Deferred, 'debug', True)
-            debug.add_patch(DelayedCall, 'debug', True)
-        debug.patch()
-        try:
-            junk = self.get_junk()
-            if junk:
-                raise StaleJunkError(junk)
-            self._save_signals()
-            self._timeout_call = self._reactor.callLater(
-                timeout, self._timed_out, function, timeout)
-            # Calling 'stop' on the reactor will make it impossible to
-            # re-start the reactor.  Since the default signal handlers for
-            # TERM, BREAK and INT all call reactor.stop(), we'll patch it over
-            # with crash.  XXX: It might be a better idea to either install
-            # custom signal handlers or to override the methods that are
-            # Twisted's signal handlers.
-            stop, self._reactor.stop = self._reactor.stop, self._reactor.crash
-            def run_function():
-                d = defer.maybeDeferred(function, *args, **kwargs)
-                d.addCallbacks(self._got_success, self._got_failure)
-                d.addBoth(self._stop_reactor)
-            try:
-                self._reactor.callWhenRunning(run_function)
-                self._reactor.run()
-            finally:
-                self._reactor.stop = stop
-                self._restore_signals()
-            try:
-                return self._get_result()
-            finally:
-                self._clean()
-        finally:
-            debug.restore()

=== removed file 'python-for-subunit2junitxml/testtools/compat.py'
--- python-for-subunit2junitxml/testtools/compat.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/compat.py	1970-01-01 00:00:00 +0000
@@ -1,286 +0,0 @@
-# Copyright (c) 2008-2011 testtools developers. See LICENSE for details.
-
-"""Compatibility support for python 2 and 3."""
-
-__metaclass__ = type
-__all__ = [
-    '_b',
-    '_u',
-    'advance_iterator',
-    'str_is_unicode',
-    'StringIO',
-    'BytesIO',
-    'unicode_output_stream',
-    ]
-
-import codecs
-import linecache
-import locale
-import os
-import re
-import sys
-import traceback
-
-from testtools.helpers import try_imports
-
-StringIO = try_imports(['StringIO.StringIO', 'io.StringIO'])
-
-BytesIO = try_imports(['io.BytesIO', 'BytesIO'])
-
-
-__u_doc = """A function version of the 'u' prefix.
-
-This is needed becayse the u prefix is not usable in Python 3 but is required
-in Python 2 to get a unicode object.
-
-To migrate code that was written as u'\u1234' in Python 2 to 2+3 change
-it to be _u('\u1234'). The Python 3 interpreter will decode it
-appropriately and the no-op _u for Python 3 lets it through, in Python
-2 we then call unicode-escape in the _u function.
-"""
-
-if sys.version_info > (3, 0):
-    def _u(s):
-        return s
-    _r = ascii
-    def _b(s):
-        """A byte literal."""
-        return s.encode("latin-1")
-    advance_iterator = next
-    def istext(x):
-        return isinstance(x, str)
-    def classtypes():
-        return (type,)
-    str_is_unicode = True
-else:
-    def _u(s):
-        # The double replace mangling going on prepares the string for
-        # unicode-escape - \foo is preserved, \u and \U are decoded.
-        return (s.replace("\\", "\\\\").replace("\\\\u", "\\u")
-            .replace("\\\\U", "\\U").decode("unicode-escape"))
-    _r = repr
-    def _b(s):
-        return s
-    advance_iterator = lambda it: it.next()
-    def istext(x):
-        return isinstance(x, basestring)
-    def classtypes():
-        import types
-        return (type, types.ClassType)
-    str_is_unicode = sys.platform == "cli"
-
-_u.__doc__ = __u_doc
-
-
-if sys.version_info > (2, 5):
-    all = all
-    _error_repr = BaseException.__repr__
-    def isbaseexception(exception):
-        """Return whether exception inherits from BaseException only"""
-        return (isinstance(exception, BaseException)
-            and not isinstance(exception, Exception))
-else:
-    def all(iterable):
-        """If contents of iterable all evaluate as boolean True"""
-        for obj in iterable:
-            if not obj:
-                return False
-        return True
-    def _error_repr(exception):
-        """Format an exception instance as Python 2.5 and later do"""
-        return exception.__class__.__name__ + repr(exception.args)
-    def isbaseexception(exception):
-        """Return whether exception would inherit from BaseException only
-
-        This approximates the hierarchy in Python 2.5 and later, compare the
-        difference between the diagrams at the bottom of the pages:
-        <http://docs.python.org/release/2.4.4/lib/module-exceptions.html>
-        <http://docs.python.org/release/2.5.4/lib/module-exceptions.html>
-        """
-        return isinstance(exception, (KeyboardInterrupt, SystemExit))
-
-
-def unicode_output_stream(stream):
-    """Get wrapper for given stream that writes any unicode without exception
-
-    Characters that can't be coerced to the encoding of the stream, or 'ascii'
-    if valid encoding is not found, will be replaced. The original stream may
-    be returned in situations where a wrapper is determined unneeded.
-
-    The wrapper only allows unicode to be written, not non-ascii bytestrings,
-    which is a good thing to ensure sanity and sanitation.
-    """
-    if sys.platform == "cli":
-        # Best to never encode before writing in IronPython
-        return stream
-    try:
-        writer = codecs.getwriter(stream.encoding or "")
-    except (AttributeError, LookupError):
-        # GZ 2010-06-16: Python 3 StringIO ends up here, but probably needs
-        #                different handling as it doesn't want bytestrings
-        return codecs.getwriter("ascii")(stream, "replace")
-    if writer.__module__.rsplit(".", 1)[1].startswith("utf"):
-        # The current stream has a unicode encoding so no error handler is needed
-        return stream
-    if sys.version_info > (3, 0):
-        # Python 3 doesn't seem to make this easy, handle a common case
-        try:
-            return stream.__class__(stream.buffer, stream.encoding, "replace",
-                stream.newlines, stream.line_buffering)
-        except AttributeError:
-            pass
-    return writer(stream, "replace")    
-
-
-# The default source encoding is actually "iso-8859-1" until Python 2.5 but
-# using non-ascii causes a deprecation warning in 2.4 and it's cleaner to
-# treat all versions the same way
-_default_source_encoding = "ascii"
-
-# Pattern specified in <http://www.python.org/dev/peps/pep-0263/>
-_cookie_search=re.compile("coding[:=]\s*([-\w.]+)").search
-
-def _detect_encoding(lines):
-    """Get the encoding of a Python source file from a list of lines as bytes
-
-    This function does less than tokenize.detect_encoding added in Python 3 as
-    it does not attempt to raise a SyntaxError when the interpreter would, it
-    just wants the encoding of a source file Python has already compiled and
-    determined is valid.
-    """
-    if not lines:
-        return _default_source_encoding
-    if lines[0].startswith("\xef\xbb\xbf"):
-        # Source starting with UTF-8 BOM is either UTF-8 or a SyntaxError
-        return "utf-8"
-    # Only the first two lines of the source file are examined
-    magic = _cookie_search("".join(lines[:2]))
-    if magic is None:
-        return _default_source_encoding
-    encoding = magic.group(1)
-    try:
-        codecs.lookup(encoding)
-    except LookupError:
-        # Some codecs raise something other than LookupError if they don't
-        # support the given error handler, but not the text ones that could
-        # actually be used for Python source code
-        return _default_source_encoding
-    return encoding
-
-
-class _EncodingTuple(tuple):
-    """A tuple type that can have an encoding attribute smuggled on"""
-
-
-def _get_source_encoding(filename):
-    """Detect, cache and return the encoding of Python source at filename"""
-    try:
-        return linecache.cache[filename].encoding
-    except (AttributeError, KeyError):
-        encoding = _detect_encoding(linecache.getlines(filename))
-        if filename in linecache.cache:
-            newtuple = _EncodingTuple(linecache.cache[filename])
-            newtuple.encoding = encoding
-            linecache.cache[filename] = newtuple
-        return encoding
-
-
-def _get_exception_encoding():
-    """Return the encoding we expect messages from the OS to be encoded in"""
-    if os.name == "nt":
-        # GZ 2010-05-24: Really want the codepage number instead, the error
-        #                handling of standard codecs is more deterministic
-        return "mbcs"
-    # GZ 2010-05-23: We need this call to be after initialisation, but there's
-    #                no benefit in asking more than once as it's a global
-    #                setting that can change after the message is formatted.
-    return locale.getlocale(locale.LC_MESSAGES)[1] or "ascii"
-
-
-def _exception_to_text(evalue):
-    """Try hard to get a sensible text value out of an exception instance"""
-    try:
-        return unicode(evalue)
-    except KeyboardInterrupt:
-        raise
-    except:
-        # Apparently this is what traceback._some_str does. Sigh - RBC 20100623
-        pass
-    try:
-        return str(evalue).decode(_get_exception_encoding(), "replace")
-    except KeyboardInterrupt:
-        raise
-    except:
-        # Apparently this is what traceback._some_str does. Sigh - RBC 20100623
-        pass
-    # Okay, out of ideas, let higher level handle it
-    return None
-
-
-# GZ 2010-05-23: This function is huge and horrible and I welcome suggestions
-#                on the best way to break it up
-_TB_HEADER = _u('Traceback (most recent call last):\n')
-def _format_exc_info(eclass, evalue, tb, limit=None):
-    """Format a stack trace and the exception information as unicode
-
-    Compatibility function for Python 2 which ensures each component of a
-    traceback is correctly decoded according to its origins.
-
-    Based on traceback.format_exception and related functions.
-    """
-    fs_enc = sys.getfilesystemencoding()
-    if tb:
-        list = [_TB_HEADER]
-        extracted_list = []
-        for filename, lineno, name, line in traceback.extract_tb(tb, limit):
-            extracted_list.append((
-                filename.decode(fs_enc, "replace"),
-                lineno,
-                name.decode("ascii", "replace"),
-                line and line.decode(
-                    _get_source_encoding(filename), "replace")))
-        list.extend(traceback.format_list(extracted_list))
-    else:
-        list = []
-    if evalue is None:
-        # Is a (deprecated) string exception
-        list.append((eclass + "\n").decode("ascii", "replace"))
-        return list
-    if isinstance(evalue, SyntaxError):
-        # Avoid duplicating the special formatting for SyntaxError here,
-        # instead create a new instance with unicode filename and line
-        # Potentially gives duff spacing, but that's a pre-existing issue
-        try:
-            msg, (filename, lineno, offset, line) = evalue
-        except (TypeError, ValueError):
-            pass # Strange exception instance, fall through to generic code
-        else:
-            # Errors during parsing give the line from buffer encoded as
-            # latin-1 or utf-8 or the encoding of the file depending on the
-            # coding and whether the patch for issue #1031213 is applied, so
-            # give up on trying to decode it and just read the file again
-            if line:
-                bytestr = linecache.getline(filename, lineno)
-                if bytestr:
-                    if lineno == 1 and bytestr.startswith("\xef\xbb\xbf"):
-                        bytestr = bytestr[3:]
-                    line = bytestr.decode(
-                        _get_source_encoding(filename), "replace")
-                    del linecache.cache[filename]
-                else:
-                    line = line.decode("ascii", "replace")
-            if filename:
-                filename = filename.decode(fs_enc, "replace")
-            evalue = eclass(msg, (filename, lineno, offset, line))
-            list.extend(traceback.format_exception_only(eclass, evalue))
-            return list
-    sclass = eclass.__name__
-    svalue = _exception_to_text(evalue)
-    if svalue:
-        list.append("%s: %s\n" % (sclass, svalue))
-    elif svalue is None:
-        # GZ 2010-05-24: Not a great fallback message, but keep for the moment
-        list.append("%s: <unprintable %s object>\n" % (sclass, sclass))
-    else:
-        list.append("%s\n" % sclass)
-    return list

=== removed file 'python-for-subunit2junitxml/testtools/content.py'
--- python-for-subunit2junitxml/testtools/content.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/content.py	1970-01-01 00:00:00 +0000
@@ -1,238 +0,0 @@
-# Copyright (c) 2009-2011 testtools developers. See LICENSE for details.
-
-"""Content - a MIME-like Content object."""
-
-__all__ = [
-    'attach_file',
-    'Content',
-    'content_from_file',
-    'content_from_stream',
-    'text_content',
-    'TracebackContent',
-    ]
-
-import codecs
-import os
-
-from testtools import try_import
-from testtools.compat import _b
-from testtools.content_type import ContentType, UTF8_TEXT
-from testtools.testresult import TestResult
-
-functools = try_import('functools')
-
-_join_b = _b("").join
-
-
-DEFAULT_CHUNK_SIZE = 4096
-
-
-def _iter_chunks(stream, chunk_size):
-    """Read 'stream' in chunks of 'chunk_size'.
-
-    :param stream: A file-like object to read from.
-    :param chunk_size: The size of each read from 'stream'.
-    """
-    chunk = stream.read(chunk_size)
-    while chunk:
-        yield chunk
-        chunk = stream.read(chunk_size)
-
-
-class Content(object):
-    """A MIME-like Content object.
-
-    Content objects can be serialised to bytes using the iter_bytes method.
-    If the Content-Type is recognised by other code, they are welcome to
-    look for richer contents that mere byte serialisation - for example in
-    memory object graphs etc. However, such code MUST be prepared to receive
-    a generic Content object that has been reconstructed from a byte stream.
-
-    :ivar content_type: The content type of this Content.
-    """
-
-    def __init__(self, content_type, get_bytes):
-        """Create a ContentType."""
-        if None in (content_type, get_bytes):
-            raise ValueError("None not permitted in %r, %r" % (
-                content_type, get_bytes))
-        self.content_type = content_type
-        self._get_bytes = get_bytes
-
-    def __eq__(self, other):
-        return (self.content_type == other.content_type and
-            _join_b(self.iter_bytes()) == _join_b(other.iter_bytes()))
-
-    def iter_bytes(self):
-        """Iterate over bytestrings of the serialised content."""
-        return self._get_bytes()
-
-    def iter_text(self):
-        """Iterate over the text of the serialised content.
-
-        This is only valid for text MIME types, and will use ISO-8859-1 if
-        no charset parameter is present in the MIME type. (This is somewhat
-        arbitrary, but consistent with RFC2617 3.7.1).
-
-        :raises ValueError: If the content type is not text/\*.
-        """
-        if self.content_type.type != "text":
-            raise ValueError("Not a text type %r" % self.content_type)
-        return self._iter_text()
-
-    def _iter_text(self):
-        """Worker for iter_text - does the decoding."""
-        encoding = self.content_type.parameters.get('charset', 'ISO-8859-1')
-        try:
-            # 2.5+
-            decoder = codecs.getincrementaldecoder(encoding)()
-            for bytes in self.iter_bytes():
-                yield decoder.decode(bytes)
-            final = decoder.decode(_b(''), True)
-            if final:
-                yield final
-        except AttributeError:
-            # < 2.5
-            bytes = ''.join(self.iter_bytes())
-            yield bytes.decode(encoding)
-
-    def __repr__(self):
-        return "<Content type=%r, value=%r>" % (
-            self.content_type, _join_b(self.iter_bytes()))
-
-
-class TracebackContent(Content):
-    """Content object for tracebacks.
-
-    This adapts an exc_info tuple to the Content interface.
-    text/x-traceback;language=python is used for the mime type, in order to
-    provide room for other languages to format their tracebacks differently.
-    """
-
-    def __init__(self, err, test):
-        """Create a TracebackContent for err."""
-        if err is None:
-            raise ValueError("err may not be None")
-        content_type = ContentType('text', 'x-traceback',
-            {"language": "python", "charset": "utf8"})
-        self._result = TestResult()
-        value = self._result._exc_info_to_unicode(err, test)
-        super(TracebackContent, self).__init__(
-            content_type, lambda: [value.encode("utf8")])
-
-
-def text_content(text):
-    """Create a `Content` object from some text.
-
-    This is useful for adding details which are short strings.
-    """
-    return Content(UTF8_TEXT, lambda: [text.encode('utf8')])
-
-
-
-def maybe_wrap(wrapper, func):
-    """Merge metadata for func into wrapper if functools is present."""
-    if functools is not None:
-        wrapper = functools.update_wrapper(wrapper, func)
-    return wrapper
-
-
-def content_from_file(path, content_type=None, chunk_size=DEFAULT_CHUNK_SIZE,
-                      buffer_now=False):
-    """Create a `Content` object from a file on disk.
-
-    Note that unless 'read_now' is explicitly passed in as True, the file
-    will only be read from when ``iter_bytes`` is called.
-
-    :param path: The path to the file to be used as content.
-    :param content_type: The type of content.  If not specified, defaults
-        to UTF8-encoded text/plain.
-    :param chunk_size: The size of chunks to read from the file.
-        Defaults to `DEFAULT_CHUNK_SIZE`.
-    :param buffer_now: If True, read the file from disk now and keep it in
-        memory. Otherwise, only read when the content is serialized.
-    """
-    if content_type is None:
-        content_type = UTF8_TEXT
-    def reader():
-        # This should be try:finally:, but python2.4 makes that hard. When
-        # We drop older python support we can make this use a context manager
-        # for maximum simplicity.
-        stream = open(path, 'rb')
-        for chunk in _iter_chunks(stream, chunk_size):
-            yield chunk
-        stream.close()
-    return content_from_reader(reader, content_type, buffer_now)
-
-
-def content_from_stream(stream, content_type=None,
-                        chunk_size=DEFAULT_CHUNK_SIZE, buffer_now=False):
-    """Create a `Content` object from a file-like stream.
-
-    Note that the stream will only be read from when ``iter_bytes`` is
-    called.
-
-    :param stream: A file-like object to read the content from. The stream
-        is not closed by this function or the content object it returns.
-    :param content_type: The type of content. If not specified, defaults
-        to UTF8-encoded text/plain.
-    :param chunk_size: The size of chunks to read from the file.
-        Defaults to `DEFAULT_CHUNK_SIZE`.
-    :param buffer_now: If True, reads from the stream right now. Otherwise,
-        only reads when the content is serialized. Defaults to False.
-    """
-    if content_type is None:
-        content_type = UTF8_TEXT
-    reader = lambda: _iter_chunks(stream, chunk_size)
-    return content_from_reader(reader, content_type, buffer_now)
-
-
-def content_from_reader(reader, content_type, buffer_now):
-    """Create a Content object that will obtain the content from reader.
-
-    :param reader: A callback to read the content. Should return an iterable of
-        bytestrings.
-    :param content_type: The content type to create.
-    :param buffer_now: If True the reader is evaluated immediately and
-        buffered.
-    """
-    if content_type is None:
-        content_type = UTF8_TEXT
-    if buffer_now:
-        contents = list(reader())
-        reader = lambda: contents
-    return Content(content_type, reader)
-
-
-def attach_file(detailed, path, name=None, content_type=None,
-                chunk_size=DEFAULT_CHUNK_SIZE, buffer_now=True):
-    """Attach a file to this test as a detail.
-
-    This is a convenience method wrapping around `addDetail`.
-
-    Note that unless 'read_now' is explicitly passed in as True, the file
-    *must* exist when the test result is called with the results of this
-    test, after the test has been torn down.
-
-    :param detailed: An object with details
-    :param path: The path to the file to attach.
-    :param name: The name to give to the detail for the attached file.
-    :param content_type: The content type of the file.  If not provided,
-        defaults to UTF8-encoded text/plain.
-    :param chunk_size: The size of chunks to read from the file.  Defaults
-        to something sensible.
-    :param buffer_now: If False the file content is read when the content
-        object is evaluated rather than when attach_file is called.
-        Note that this may be after any cleanups that obj_with_details has, so
-        if the file is a temporary file disabling buffer_now may cause the file
-        to be read after it is deleted. To handle those cases, using
-        attach_file as a cleanup is recommended because it guarantees a
-        sequence for when the attach_file call is made::
-
-            detailed.addCleanup(attach_file, 'foo.txt', detailed)
-    """
-    if name is None:
-        name = os.path.basename(path)
-    content_object = content_from_file(
-        path, content_type, chunk_size, buffer_now)
-    detailed.addDetail(name, content_object)

=== removed file 'python-for-subunit2junitxml/testtools/content_type.py'
--- python-for-subunit2junitxml/testtools/content_type.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/content_type.py	1970-01-01 00:00:00 +0000
@@ -1,33 +0,0 @@
-# Copyright (c) 2009-2010 testtools developers. See LICENSE for details.
-
-"""ContentType - a MIME Content Type."""
-
-
-class ContentType(object):
-    """A content type from http://www.iana.org/assignments/media-types/
-
-    :ivar type: The primary type, e.g. "text" or "application"
-    :ivar subtype: The subtype, e.g. "plain" or "octet-stream"
-    :ivar parameters: A dict of additional parameters specific to the
-        content type.
-    """
-
-    def __init__(self, primary_type, sub_type, parameters=None):
-        """Create a ContentType."""
-        if None in (primary_type, sub_type):
-            raise ValueError("None not permitted in %r, %r" % (
-                primary_type, sub_type))
-        self.type = primary_type
-        self.subtype = sub_type
-        self.parameters = parameters or {}
-
-    def __eq__(self, other):
-        if type(other) != ContentType:
-            return False
-        return self.__dict__ == other.__dict__
-
-    def __repr__(self):
-        return "%s/%s params=%s" % (self.type, self.subtype, self.parameters)
-
-
-UTF8_TEXT = ContentType('text', 'plain', {'charset': 'utf8'})

=== removed file 'python-for-subunit2junitxml/testtools/deferredruntest.py'
--- python-for-subunit2junitxml/testtools/deferredruntest.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/deferredruntest.py	1970-01-01 00:00:00 +0000
@@ -1,335 +0,0 @@
-# Copyright (c) 2010 testtools developers. See LICENSE for details.
-
-"""Individual test case execution for tests that return Deferreds.
-
-This module is highly experimental and is liable to change in ways that cause
-subtle failures in tests.  Use at your own peril.
-"""
-
-__all__ = [
-    'assert_fails_with',
-    'AsynchronousDeferredRunTest',
-    'AsynchronousDeferredRunTestForBrokenTwisted',
-    'SynchronousDeferredRunTest',
-    ]
-
-import sys
-
-from testtools.compat import StringIO
-from testtools.content import (
-    Content,
-    text_content,
-    )
-from testtools.content_type import UTF8_TEXT
-from testtools.runtest import RunTest
-from testtools._spinner import (
-    extract_result,
-    NoResultError,
-    Spinner,
-    TimeoutError,
-    trap_unhandled_errors,
-    )
-
-from twisted.internet import defer
-from twisted.python import log
-from twisted.trial.unittest import _LogObserver
-
-
-class _DeferredRunTest(RunTest):
-    """Base for tests that return Deferreds."""
-
-    def _got_user_failure(self, failure, tb_label='traceback'):
-        """We got a failure from user code."""
-        return self._got_user_exception(
-            (failure.type, failure.value, failure.getTracebackObject()),
-            tb_label=tb_label)
-
-
-class SynchronousDeferredRunTest(_DeferredRunTest):
-    """Runner for tests that return synchronous Deferreds."""
-
-    def _run_user(self, function, *args):
-        d = defer.maybeDeferred(function, *args)
-        d.addErrback(self._got_user_failure)
-        result = extract_result(d)
-        return result
-
-
-def run_with_log_observers(observers, function, *args, **kwargs):
-    """Run 'function' with the given Twisted log observers."""
-    real_observers = log.theLogPublisher.observers
-    for observer in real_observers:
-        log.theLogPublisher.removeObserver(observer)
-    for observer in observers:
-        log.theLogPublisher.addObserver(observer)
-    try:
-        return function(*args, **kwargs)
-    finally:
-        for observer in observers:
-            log.theLogPublisher.removeObserver(observer)
-        for observer in real_observers:
-            log.theLogPublisher.addObserver(observer)
-
-
-# Observer of the Twisted log that we install during tests.
-_log_observer = _LogObserver()
-
-
-
-class AsynchronousDeferredRunTest(_DeferredRunTest):
-    """Runner for tests that return Deferreds that fire asynchronously.
-
-    That is, this test runner assumes that the Deferreds will only fire if the
-    reactor is left to spin for a while.
-
-    Do not rely too heavily on the nuances of the behaviour of this class.
-    What it does to the reactor is black magic, and if we can find nicer ways
-    of doing it we will gladly break backwards compatibility.
-
-    This is highly experimental code.  Use at your own risk.
-    """
-
-    def __init__(self, case, handlers=None, reactor=None, timeout=0.005,
-                 debug=False):
-        """Construct an `AsynchronousDeferredRunTest`.
-
-        :param case: The `TestCase` to run.
-        :param handlers: A list of exception handlers (ExceptionType, handler)
-            where 'handler' is a callable that takes a `TestCase`, a
-            ``testtools.TestResult`` and the exception raised.
-        :param reactor: The Twisted reactor to use.  If not given, we use the
-            default reactor.
-        :param timeout: The maximum time allowed for running a test.  The
-            default is 0.005s.
-        :param debug: Whether or not to enable Twisted's debugging.  Use this
-            to get information about unhandled Deferreds and left-over
-            DelayedCalls.  Defaults to False.
-        """
-        super(AsynchronousDeferredRunTest, self).__init__(case, handlers)
-        if reactor is None:
-            from twisted.internet import reactor
-        self._reactor = reactor
-        self._timeout = timeout
-        self._debug = debug
-
-    @classmethod
-    def make_factory(cls, reactor=None, timeout=0.005, debug=False):
-        """Make a factory that conforms to the RunTest factory interface."""
-        # This is horrible, but it means that the return value of the method
-        # will be able to be assigned to a class variable *and* also be
-        # invoked directly.
-        class AsynchronousDeferredRunTestFactory:
-            def __call__(self, case, handlers=None):
-                return cls(case, handlers, reactor, timeout, debug)
-        return AsynchronousDeferredRunTestFactory()
-
-    @defer.deferredGenerator
-    def _run_cleanups(self):
-        """Run the cleanups on the test case.
-
-        We expect that the cleanups on the test case can also return
-        asynchronous Deferreds.  As such, we take the responsibility for
-        running the cleanups, rather than letting TestCase do it.
-        """
-        while self.case._cleanups:
-            f, args, kwargs = self.case._cleanups.pop()
-            d = defer.maybeDeferred(f, *args, **kwargs)
-            thing = defer.waitForDeferred(d)
-            yield thing
-            try:
-                thing.getResult()
-            except Exception:
-                exc_info = sys.exc_info()
-                self.case._report_traceback(exc_info)
-                last_exception = exc_info[1]
-        yield last_exception
-
-    def _make_spinner(self):
-        """Make the `Spinner` to be used to run the tests."""
-        return Spinner(self._reactor, debug=self._debug)
-
-    def _run_deferred(self):
-        """Run the test, assuming everything in it is Deferred-returning.
-
-        This should return a Deferred that fires with True if the test was
-        successful and False if the test was not successful.  It should *not*
-        call addSuccess on the result, because there's reactor clean up that
-        we needs to be done afterwards.
-        """
-        fails = []
-
-        def fail_if_exception_caught(exception_caught):
-            if self.exception_caught == exception_caught:
-                fails.append(None)
-
-        def clean_up(ignored=None):
-            """Run the cleanups."""
-            d = self._run_cleanups()
-            def clean_up_done(result):
-                if result is not None:
-                    self._exceptions.append(result)
-                    fails.append(None)
-            return d.addCallback(clean_up_done)
-
-        def set_up_done(exception_caught):
-            """Set up is done, either clean up or run the test."""
-            if self.exception_caught == exception_caught:
-                fails.append(None)
-                return clean_up()
-            else:
-                d = self._run_user(self.case._run_test_method, self.result)
-                d.addCallback(fail_if_exception_caught)
-                d.addBoth(tear_down)
-                return d
-
-        def tear_down(ignored):
-            d = self._run_user(self.case._run_teardown, self.result)
-            d.addCallback(fail_if_exception_caught)
-            d.addBoth(clean_up)
-            return d
-
-        d = self._run_user(self.case._run_setup, self.result)
-        d.addCallback(set_up_done)
-        d.addBoth(lambda ignored: len(fails) == 0)
-        return d
-
-    def _log_user_exception(self, e):
-        """Raise 'e' and report it as a user exception."""
-        try:
-            raise e
-        except e.__class__:
-            self._got_user_exception(sys.exc_info())
-
-    def _blocking_run_deferred(self, spinner):
-        try:
-            return trap_unhandled_errors(
-                spinner.run, self._timeout, self._run_deferred)
-        except NoResultError:
-            # We didn't get a result at all!  This could be for any number of
-            # reasons, but most likely someone hit Ctrl-C during the test.
-            raise KeyboardInterrupt
-        except TimeoutError:
-            # The function took too long to run.
-            self._log_user_exception(TimeoutError(self.case, self._timeout))
-            return False, []
-
-    def _run_core(self):
-        # Add an observer to trap all logged errors.
-        error_observer = _log_observer
-        full_log = StringIO()
-        full_observer = log.FileLogObserver(full_log)
-        spinner = self._make_spinner()
-        successful, unhandled = run_with_log_observers(
-            [error_observer.gotEvent, full_observer.emit],
-            self._blocking_run_deferred, spinner)
-
-        self.case.addDetail(
-            'twisted-log', Content(UTF8_TEXT, full_log.readlines))
-
-        logged_errors = error_observer.flushErrors()
-        for logged_error in logged_errors:
-            successful = False
-            self._got_user_failure(logged_error, tb_label='logged-error')
-
-        if unhandled:
-            successful = False
-            for debug_info in unhandled:
-                f = debug_info.failResult
-                info = debug_info._getDebugTracebacks()
-                if info:
-                    self.case.addDetail(
-                        'unhandled-error-in-deferred-debug',
-                        text_content(info))
-                self._got_user_failure(f, 'unhandled-error-in-deferred')
-
-        junk = spinner.clear_junk()
-        if junk:
-            successful = False
-            self._log_user_exception(UncleanReactorError(junk))
-
-        if successful:
-            self.result.addSuccess(self.case, details=self.case.getDetails())
-
-    def _run_user(self, function, *args):
-        """Run a user-supplied function.
-
-        This just makes sure that it returns a Deferred, regardless of how the
-        user wrote it.
-        """
-        d = defer.maybeDeferred(function, *args)
-        return d.addErrback(self._got_user_failure)
-
-
-class AsynchronousDeferredRunTestForBrokenTwisted(AsynchronousDeferredRunTest):
-    """Test runner that works around Twisted brokenness re reactor junk.
-
-    There are many APIs within Twisted itself where a Deferred fires but
-    leaves cleanup work scheduled for the reactor to do.  Arguably, many of
-    these are bugs.  This runner iterates the reactor event loop a number of
-    times after every test, in order to shake out these buggy-but-commonplace
-    events.
-    """
-
-    def _make_spinner(self):
-        spinner = super(
-            AsynchronousDeferredRunTestForBrokenTwisted, self)._make_spinner()
-        spinner._OBLIGATORY_REACTOR_ITERATIONS = 2
-        return spinner
-
-
-def assert_fails_with(d, *exc_types, **kwargs):
-    """Assert that 'd' will fail with one of 'exc_types'.
-
-    The normal way to use this is to return the result of 'assert_fails_with'
-    from your unit test.
-
-    Note that this function is experimental and unstable.  Use at your own
-    peril; expect the API to change.
-
-    :param d: A Deferred that is expected to fail.
-    :param exc_types: The exception types that the Deferred is expected to
-        fail with.
-    :param failureException: An optional keyword argument.  If provided, will
-        raise that exception instead of
-        ``testtools.TestCase.failureException``.
-    :return: A Deferred that will fail with an ``AssertionError`` if 'd' does
-        not fail with one of the exception types.
-    """
-    failureException = kwargs.pop('failureException', None)
-    if failureException is None:
-        # Avoid circular imports.
-        from testtools import TestCase
-        failureException = TestCase.failureException
-    expected_names = ", ".join(exc_type.__name__ for exc_type in exc_types)
-    def got_success(result):
-        raise failureException(
-            "%s not raised (%r returned)" % (expected_names, result))
-    def got_failure(failure):
-        if failure.check(*exc_types):
-            return failure.value
-        raise failureException("%s raised instead of %s:\n %s" % (
-            failure.type.__name__, expected_names, failure.getTraceback()))
-    return d.addCallbacks(got_success, got_failure)
-
-
-def flush_logged_errors(*error_types):
-    return _log_observer.flushErrors(*error_types)
-
-
-class UncleanReactorError(Exception):
-    """Raised when the reactor has junk in it."""
-
-    def __init__(self, junk):
-        Exception.__init__(self,
-            "The reactor still thinks it needs to do things. Close all "
-            "connections, kill all processes and make sure all delayed "
-            "calls have either fired or been cancelled:\n%s"
-            % ''.join(map(self._get_junk_info, junk)))
-
-    def _get_junk_info(self, junk):
-        from twisted.internet.base import DelayedCall
-        if isinstance(junk, DelayedCall):
-            ret = str(junk)
-        else:
-            ret = repr(junk)
-        return '  %s\n' % (ret,)

=== removed file 'python-for-subunit2junitxml/testtools/distutilscmd.py'
--- python-for-subunit2junitxml/testtools/distutilscmd.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/distutilscmd.py	1970-01-01 00:00:00 +0000
@@ -1,62 +0,0 @@
-# Copyright (c) 2010-2011 testtools developers . See LICENSE for details.
-
-"""Extensions to the standard Python unittest library."""
-
-import sys
-
-from distutils.core import Command
-from distutils.errors import DistutilsOptionError
-
-from testtools.run import TestProgram, TestToolsTestRunner
-
-
-class TestCommand(Command):
-    """Command to run unit tests with testtools"""
-
-    description = "run unit tests with testtools"
-
-    user_options = [
-        ('catch', 'c', "Catch ctrl-C and display results so far"),
-        ('buffer', 'b', "Buffer stdout and stderr during tests"),
-        ('failfast', 'f', "Stop on first fail or error"),
-        ('test-module=','m', "Run 'test_suite' in specified module"),
-        ('test-suite=','s',
-         "Test suite to run (e.g. 'some_module.test_suite')")
-    ]
-
-    def __init__(self, dist):
-        Command.__init__(self, dist)
-        self.runner = TestToolsTestRunner(sys.stdout)
-
-
-    def initialize_options(self):
-        self.test_suite = None
-        self.test_module = None
-        self.catch = None
-        self.buffer = None
-        self.failfast = None
-
-    def finalize_options(self):
-        if self.test_suite is None:
-            if self.test_module is None:
-                raise DistutilsOptionError(
-                    "You must specify a module or a suite to run tests from")
-            else:
-                self.test_suite = self.test_module+".test_suite"
-        elif self.test_module:
-            raise DistutilsOptionError(
-                "You may specify a module or a suite, but not both")
-        self.test_args = [self.test_suite]
-        if self.verbose:
-            self.test_args.insert(0, '--verbose')
-        if self.buffer:
-            self.test_args.insert(0, '--buffer')
-        if self.catch:
-            self.test_args.insert(0, '--catch')
-        if self.failfast:
-            self.test_args.insert(0, '--failfast')
-
-    def run(self):
-        self.program = TestProgram(
-            argv=self.test_args, testRunner=self.runner, stdout=sys.stdout,
-            exit=False)

=== removed file 'python-for-subunit2junitxml/testtools/helpers.py'
--- python-for-subunit2junitxml/testtools/helpers.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/helpers.py	1970-01-01 00:00:00 +0000
@@ -1,64 +0,0 @@
-# Copyright (c) 2010 testtools developers. See LICENSE for details.
-
-__all__ = [
-    'try_import',
-    'try_imports',
-    ]
-
-
-def try_import(name, alternative=None):
-    """Attempt to import ``name``.  If it fails, return ``alternative``.
-
-    When supporting multiple versions of Python or optional dependencies, it
-    is useful to be able to try to import a module.
-
-    :param name: The name of the object to import, e.g. ``os.path`` or
-        ``os.path.join``.
-    :param alternative: The value to return if no module can be imported.
-        Defaults to None.
-    """
-    module_segments = name.split('.')
-    while module_segments:
-        module_name = '.'.join(module_segments)
-        try:
-            module = __import__(module_name)
-        except ImportError:
-            module_segments.pop()
-            continue
-        else:
-            break
-    else:
-        return alternative
-    nonexistent = object()
-    for segment in name.split('.')[1:]:
-        module = getattr(module, segment, nonexistent)
-        if module is nonexistent:
-            return alternative
-    return module
-
-
-_RAISE_EXCEPTION = object()
-def try_imports(module_names, alternative=_RAISE_EXCEPTION):
-    """Attempt to import modules.
-
-    Tries to import the first module in ``module_names``.  If it can be
-    imported, we return it.  If not, we go on to the second module and try
-    that.  The process continues until we run out of modules to try.  If none
-    of the modules can be imported, either raise an exception or return the
-    provided ``alternative`` value.
-
-    :param module_names: A sequence of module names to try to import.
-    :param alternative: The value to return if no module can be imported.
-        If unspecified, we raise an ImportError.
-    :raises ImportError: If none of the modules can be imported and no
-        alternative value was specified.
-    """
-    module_names = list(module_names)
-    for module_name in module_names:
-        module = try_import(module_name)
-        if module:
-            return module
-    if alternative is _RAISE_EXCEPTION:
-        raise ImportError(
-            "Could not import any of: %s" % ', '.join(module_names))
-    return alternative

=== removed file 'python-for-subunit2junitxml/testtools/matchers.py'
--- python-for-subunit2junitxml/testtools/matchers.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/matchers.py	1970-01-01 00:00:00 +0000
@@ -1,785 +0,0 @@
-# Copyright (c) 2009-2011 testtools developers. See LICENSE for details.
-
-"""Matchers, a way to express complex assertions outside the testcase.
-
-Inspired by 'hamcrest'.
-
-Matcher provides the abstract API that all matchers need to implement.
-
-Bundled matchers are listed in __all__: a list can be obtained by running
-$ python -c 'import testtools.matchers; print testtools.matchers.__all__'
-"""
-
-__metaclass__ = type
-__all__ = [
-    'Annotate',
-    'DocTestMatches',
-    'EndsWith',
-    'Equals',
-    'Is',
-    'KeysEqual',
-    'LessThan',
-    'MatchesAll',
-    'MatchesAny',
-    'MatchesException',
-    'NotEquals',
-    'Not',
-    'Raises',
-    'raises',
-    'StartsWith',
-    ]
-
-import doctest
-import operator
-from pprint import pformat
-import re
-import sys
-import types
-
-from testtools.compat import classtypes, _error_repr, isbaseexception
-
-
-class Matcher(object):
-    """A pattern matcher.
-
-    A Matcher must implement match and __str__ to be used by
-    testtools.TestCase.assertThat. Matcher.match(thing) returns None when
-    thing is completely matched, and a Mismatch object otherwise.
-
-    Matchers can be useful outside of test cases, as they are simply a
-    pattern matching language expressed as objects.
-
-    testtools.matchers is inspired by hamcrest, but is pythonic rather than
-    a Java transcription.
-    """
-
-    def match(self, something):
-        """Return None if this matcher matches something, a Mismatch otherwise.
-        """
-        raise NotImplementedError(self.match)
-
-    def __str__(self):
-        """Get a sensible human representation of the matcher.
-
-        This should include the parameters given to the matcher and any
-        state that would affect the matches operation.
-        """
-        raise NotImplementedError(self.__str__)
-
-
-class Mismatch(object):
-    """An object describing a mismatch detected by a Matcher."""
-
-    def __init__(self, description=None, details=None):
-        """Construct a `Mismatch`.
-
-        :param description: A description to use.  If not provided,
-            `Mismatch.describe` must be implemented.
-        :param details: Extra details about the mismatch.  Defaults
-            to the empty dict.
-        """
-        if description:
-            self._description = description
-        if details is None:
-            details = {}
-        self._details = details
-
-    def describe(self):
-        """Describe the mismatch.
-
-        This should be either a human-readable string or castable to a string.
-        """
-        try:
-            return self._description
-        except AttributeError:
-            raise NotImplementedError(self.describe)
-
-    def get_details(self):
-        """Get extra details about the mismatch.
-
-        This allows the mismatch to provide extra information beyond the basic
-        description, including large text or binary files, or debugging internals
-        without having to force it to fit in the output of 'describe'.
-
-        The testtools assertion assertThat will query get_details and attach
-        all its values to the test, permitting them to be reported in whatever
-        manner the test environment chooses.
-
-        :return: a dict mapping names to Content objects. name is a string to
-            name the detail, and the Content object is the detail to add
-            to the result. For more information see the API to which items from
-            this dict are passed testtools.TestCase.addDetail.
-        """
-        return getattr(self, '_details', {})
-
-    def __repr__(self):
-        return  "<testtools.matchers.Mismatch object at %x attributes=%r>" % (
-            id(self), self.__dict__)
-
-
-class MismatchDecorator(object):
-    """Decorate a ``Mismatch``.
-
-    Forwards all messages to the original mismatch object.  Probably the best
-    way to use this is inherit from this class and then provide your own
-    custom decoration logic.
-    """
-
-    def __init__(self, original):
-        """Construct a `MismatchDecorator`.
-
-        :param original: A `Mismatch` object to decorate.
-        """
-        self.original = original
-
-    def __repr__(self):
-        return '<testtools.matchers.MismatchDecorator(%r)>' % (self.original,)
-
-    def describe(self):
-        return self.original.describe()
-
-    def get_details(self):
-        return self.original.get_details()
-
-
-class DocTestMatches(object):
-    """See if a string matches a doctest example."""
-
-    def __init__(self, example, flags=0):
-        """Create a DocTestMatches to match example.
-
-        :param example: The example to match e.g. 'foo bar baz'
-        :param flags: doctest comparison flags to match on. e.g.
-            doctest.ELLIPSIS.
-        """
-        if not example.endswith('\n'):
-            example += '\n'
-        self.want = example # required variable name by doctest.
-        self.flags = flags
-        self._checker = doctest.OutputChecker()
-
-    def __str__(self):
-        if self.flags:
-            flagstr = ", flags=%d" % self.flags
-        else:
-            flagstr = ""
-        return 'DocTestMatches(%r%s)' % (self.want, flagstr)
-
-    def _with_nl(self, actual):
-        result = str(actual)
-        if not result.endswith('\n'):
-            result += '\n'
-        return result
-
-    def match(self, actual):
-        with_nl = self._with_nl(actual)
-        if self._checker.check_output(self.want, with_nl, self.flags):
-            return None
-        return DocTestMismatch(self, with_nl)
-
-    def _describe_difference(self, with_nl):
-        return self._checker.output_difference(self, with_nl, self.flags)
-
-
-class DocTestMismatch(Mismatch):
-    """Mismatch object for DocTestMatches."""
-
-    def __init__(self, matcher, with_nl):
-        self.matcher = matcher
-        self.with_nl = with_nl
-
-    def describe(self):
-        return self.matcher._describe_difference(self.with_nl)
-
-
-class DoesNotStartWith(Mismatch):
-
-    def __init__(self, matchee, expected):
-        """Create a DoesNotStartWith Mismatch.
-
-        :param matchee: the string that did not match.
-        :param expected: the string that 'matchee' was expected to start with.
-        """
-        self.matchee = matchee
-        self.expected = expected
-
-    def describe(self):
-        return "'%s' does not start with '%s'." % (
-            self.matchee, self.expected)
-
-
-class DoesNotEndWith(Mismatch):
-
-    def __init__(self, matchee, expected):
-        """Create a DoesNotEndWith Mismatch.
-
-        :param matchee: the string that did not match.
-        :param expected: the string that 'matchee' was expected to end with.
-        """
-        self.matchee = matchee
-        self.expected = expected
-
-    def describe(self):
-        return "'%s' does not end with '%s'." % (
-            self.matchee, self.expected)
-
-
-class _BinaryComparison(object):
-    """Matcher that compares an object to another object."""
-
-    def __init__(self, expected):
-        self.expected = expected
-
-    def __str__(self):
-        return "%s(%r)" % (self.__class__.__name__, self.expected)
-
-    def match(self, other):
-        if self.comparator(other, self.expected):
-            return None
-        return _BinaryMismatch(self.expected, self.mismatch_string, other)
-
-    def comparator(self, expected, other):
-        raise NotImplementedError(self.comparator)
-
-
-class _BinaryMismatch(Mismatch):
-    """Two things did not match."""
-
-    def __init__(self, expected, mismatch_string, other):
-        self.expected = expected
-        self._mismatch_string = mismatch_string
-        self.other = other
-
-    def describe(self):
-        left = repr(self.expected)
-        right = repr(self.other)
-        if len(left) + len(right) > 70:
-            return "%s:\nreference = %s\nactual = %s\n" % (
-                self._mismatch_string, pformat(self.expected),
-                pformat(self.other))
-        else:
-            return "%s %s %s" % (left, self._mismatch_string,right)
-
-
-class Equals(_BinaryComparison):
-    """Matches if the items are equal."""
-
-    comparator = operator.eq
-    mismatch_string = '!='
-
-
-class NotEquals(_BinaryComparison):
-    """Matches if the items are not equal.
-
-    In most cases, this is equivalent to ``Not(Equals(foo))``. The difference
-    only matters when testing ``__ne__`` implementations.
-    """
-
-    comparator = operator.ne
-    mismatch_string = '=='
-
-
-class Is(_BinaryComparison):
-    """Matches if the items are identical."""
-
-    comparator = operator.is_
-    mismatch_string = 'is not'
-
-
-class LessThan(_BinaryComparison):
-    """Matches if the item is less than the matchers reference object."""
-
-    comparator = operator.__lt__
-    mismatch_string = 'is >='
-
-
-class MatchesAny(object):
-    """Matches if any of the matchers it is created with match."""
-
-    def __init__(self, *matchers):
-        self.matchers = matchers
-
-    def match(self, matchee):
-        results = []
-        for matcher in self.matchers:
-            mismatch = matcher.match(matchee)
-            if mismatch is None:
-                return None
-            results.append(mismatch)
-        return MismatchesAll(results)
-
-    def __str__(self):
-        return "MatchesAny(%s)" % ', '.join([
-            str(matcher) for matcher in self.matchers])
-
-
-class MatchesAll(object):
-    """Matches if all of the matchers it is created with match."""
-
-    def __init__(self, *matchers):
-        self.matchers = matchers
-
-    def __str__(self):
-        return 'MatchesAll(%s)' % ', '.join(map(str, self.matchers))
-
-    def match(self, matchee):
-        results = []
-        for matcher in self.matchers:
-            mismatch = matcher.match(matchee)
-            if mismatch is not None:
-                results.append(mismatch)
-        if results:
-            return MismatchesAll(results)
-        else:
-            return None
-
-
-class MismatchesAll(Mismatch):
-    """A mismatch with many child mismatches."""
-
-    def __init__(self, mismatches):
-        self.mismatches = mismatches
-
-    def describe(self):
-        descriptions = ["Differences: ["]
-        for mismatch in self.mismatches:
-            descriptions.append(mismatch.describe())
-        descriptions.append("]")
-        return '\n'.join(descriptions)
-
-
-class Not(object):
-    """Inverts a matcher."""
-
-    def __init__(self, matcher):
-        self.matcher = matcher
-
-    def __str__(self):
-        return 'Not(%s)' % (self.matcher,)
-
-    def match(self, other):
-        mismatch = self.matcher.match(other)
-        if mismatch is None:
-            return MatchedUnexpectedly(self.matcher, other)
-        else:
-            return None
-
-
-class MatchedUnexpectedly(Mismatch):
-    """A thing matched when it wasn't supposed to."""
-
-    def __init__(self, matcher, other):
-        self.matcher = matcher
-        self.other = other
-
-    def describe(self):
-        return "%r matches %s" % (self.other, self.matcher)
-
-
-class MatchesException(Matcher):
-    """Match an exc_info tuple against an exception instance or type."""
-
-    def __init__(self, exception, value_re=None):
-        """Create a MatchesException that will match exc_info's for exception.
-
-        :param exception: Either an exception instance or type.
-            If an instance is given, the type and arguments of the exception
-            are checked. If a type is given only the type of the exception is
-            checked.
-        :param value_re: If 'exception' is a type, and the matchee exception
-            is of the right type, then the 'str()' of the matchee exception
-            is matched against this regular expression.
-        """
-        Matcher.__init__(self)
-        self.expected = exception
-        self.value_re = value_re
-        self._is_instance = type(self.expected) not in classtypes()
-
-    def match(self, other):
-        if type(other) != tuple:
-            return Mismatch('%r is not an exc_info tuple' % other)
-        expected_class = self.expected
-        if self._is_instance:
-            expected_class = expected_class.__class__
-        if not issubclass(other[0], expected_class):
-            return Mismatch('%r is not a %r' % (other[0], expected_class))
-        if self._is_instance:
-            if other[1].args != self.expected.args:
-                return Mismatch('%s has different arguments to %s.' % (
-                        _error_repr(other[1]), _error_repr(self.expected)))
-        elif self.value_re is not None:
-            str_exc_value = str(other[1])
-            if not re.match(self.value_re, str_exc_value):
-                return Mismatch(
-                    '"%s" does not match "%s".'
-                    % (str_exc_value, self.value_re))
-
-    def __str__(self):
-        if self._is_instance:
-            return "MatchesException(%s)" % _error_repr(self.expected)
-        return "MatchesException(%s)" % repr(self.expected)
-
-
-class StartsWith(Matcher):
-    """Checks whether one string starts with another."""
-
-    def __init__(self, expected):
-        """Create a StartsWith Matcher.
-
-        :param expected: the string that matchees should start with.
-        """
-        self.expected = expected
-
-    def __str__(self):
-        return "Starts with '%s'." % self.expected
-
-    def match(self, matchee):
-        if not matchee.startswith(self.expected):
-            return DoesNotStartWith(matchee, self.expected)
-        return None
-
-
-class EndsWith(Matcher):
-    """Checks whether one string starts with another."""
-
-    def __init__(self, expected):
-        """Create a EndsWith Matcher.
-
-        :param expected: the string that matchees should end with.
-        """
-        self.expected = expected
-
-    def __str__(self):
-        return "Ends with '%s'." % self.expected
-
-    def match(self, matchee):
-        if not matchee.endswith(self.expected):
-            return DoesNotEndWith(matchee, self.expected)
-        return None
-
-
-class KeysEqual(Matcher):
-    """Checks whether a dict has particular keys."""
-
-    def __init__(self, *expected):
-        """Create a `KeysEqual` Matcher.
-
-        :param expected: The keys the dict is expected to have.  If a dict,
-            then we use the keys of that dict, if a collection, we assume it
-            is a collection of expected keys.
-        """
-        try:
-            self.expected = expected.keys()
-        except AttributeError:
-            self.expected = list(expected)
-
-    def __str__(self):
-        return "KeysEqual(%s)" % ', '.join(map(repr, self.expected))
-
-    def match(self, matchee):
-        expected = sorted(self.expected)
-        matched = Equals(expected).match(sorted(matchee.keys()))
-        if matched:
-            return AnnotatedMismatch(
-                'Keys not equal',
-                _BinaryMismatch(expected, 'does not match', matchee))
-        return None
-
-
-class Annotate(object):
-    """Annotates a matcher with a descriptive string.
-
-    Mismatches are then described as '<mismatch>: <annotation>'.
-    """
-
-    def __init__(self, annotation, matcher):
-        self.annotation = annotation
-        self.matcher = matcher
-
-    def __str__(self):
-        return 'Annotate(%r, %s)' % (self.annotation, self.matcher)
-
-    def match(self, other):
-        mismatch = self.matcher.match(other)
-        if mismatch is not None:
-            return AnnotatedMismatch(self.annotation, mismatch)
-
-
-class AnnotatedMismatch(MismatchDecorator):
-    """A mismatch annotated with a descriptive string."""
-
-    def __init__(self, annotation, mismatch):
-        super(AnnotatedMismatch, self).__init__(mismatch)
-        self.annotation = annotation
-        self.mismatch = mismatch
-
-    def describe(self):
-        return '%s: %s' % (self.original.describe(), self.annotation)
-
-
-class Raises(Matcher):
-    """Match if the matchee raises an exception when called.
-
-    Exceptions which are not subclasses of Exception propogate out of the
-    Raises.match call unless they are explicitly matched.
-    """
-
-    def __init__(self, exception_matcher=None):
-        """Create a Raises matcher.
-
-        :param exception_matcher: Optional validator for the exception raised
-            by matchee. If supplied the exc_info tuple for the exception raised
-            is passed into that matcher. If no exception_matcher is supplied
-            then the simple fact of raising an exception is considered enough
-            to match on.
-        """
-        self.exception_matcher = exception_matcher
-
-    def match(self, matchee):
-        try:
-            result = matchee()
-            return Mismatch('%r returned %r' % (matchee, result))
-        # Catch all exceptions: Raises() should be able to match a
-        # KeyboardInterrupt or SystemExit.
-        except:
-            if self.exception_matcher:
-                mismatch = self.exception_matcher.match(sys.exc_info())
-                if not mismatch:
-                    return
-            else:
-                mismatch = None
-            # The exception did not match, or no explicit matching logic was
-            # performed. If the exception is a non-user exception (that is, not
-            # a subclass of Exception on Python 2.5+) then propogate it.
-            if isbaseexception(sys.exc_info()[1]):
-                raise
-            return mismatch
-
-    def __str__(self):
-        return 'Raises()'
-
-
-def raises(exception):
-    """Make a matcher that checks that a callable raises an exception.
-
-    This is a convenience function, exactly equivalent to::
-
-        return Raises(MatchesException(exception))
-
-    See `Raises` and `MatchesException` for more information.
-    """
-    return Raises(MatchesException(exception))
-
-
-class MatchesListwise(object):
-    """Matches if each matcher matches the corresponding value.
-
-    More easily explained by example than in words:
-
-    >>> MatchesListwise([Equals(1)]).match([1])
-    >>> MatchesListwise([Equals(1), Equals(2)]).match([1, 2])
-    >>> print MatchesListwise([Equals(1), Equals(2)]).match([2, 1]).describe()
-    Differences: [
-    1 != 2
-    2 != 1
-    ]
-    """
-
-    def __init__(self, matchers):
-        self.matchers = matchers
-
-    def match(self, values):
-        mismatches = []
-        length_mismatch = Annotate(
-            "Length mismatch", Equals(len(self.matchers))).match(len(values))
-        if length_mismatch:
-            mismatches.append(length_mismatch)
-        for matcher, value in zip(self.matchers, values):
-            mismatch = matcher.match(value)
-            if mismatch:
-                mismatches.append(mismatch)
-        if mismatches:
-            return MismatchesAll(mismatches)
-
-
-class MatchesStructure(object):
-    """Matcher that matches an object structurally.
-
-    'Structurally' here means that attributes of the object being matched are
-    compared against given matchers.
-
-    `fromExample` allows the creation of a matcher from a prototype object and
-    then modified versions can be created with `update`.
-    """
-
-    def __init__(self, **kwargs):
-        """Construct a `MatchesStructure`.
-
-        :param kwargs: A mapping of attributes to matchers.
-        """
-        self.kws = kwargs
-
-    @classmethod
-    def fromExample(cls, example, *attributes):
-        kwargs = {}
-        for attr in attributes:
-            kwargs[attr] = Equals(getattr(example, attr))
-        return cls(**kwargs)
-
-    def update(self, **kws):
-        new_kws = self.kws.copy()
-        for attr, matcher in kws.iteritems():
-            if matcher is None:
-                new_kws.pop(attr, None)
-            else:
-                new_kws[attr] = matcher
-        return type(self)(**new_kws)
-
-    def __str__(self):
-        kws = []
-        for attr, matcher in sorted(self.kws.iteritems()):
-            kws.append("%s=%s" % (attr, matcher))
-        return "%s(%s)" % (self.__class__.__name__, ', '.join(kws))
-
-    def match(self, value):
-        matchers = []
-        values = []
-        for attr, matcher in sorted(self.kws.iteritems()):
-            matchers.append(Annotate(attr, matcher))
-            values.append(getattr(value, attr))
-        return MatchesListwise(matchers).match(values)
-
-
-class MatchesRegex(object):
-    """Matches if the matchee is matched by a regular expression."""
-
-    def __init__(self, pattern, flags=0):
-        self.pattern = pattern
-        self.flags = flags
-
-    def __str__(self):
-        args = ['%r' % self.pattern]
-        flag_arg = []
-        # dir() sorts the attributes for us, so we don't need to do it again.
-        for flag in dir(re):
-            if len(flag) == 1:
-                if self.flags & getattr(re, flag):
-                    flag_arg.append('re.%s' % flag)
-        if flag_arg:
-            args.append('|'.join(flag_arg))
-        return '%s(%s)' % (self.__class__.__name__, ', '.join(args))
-
-    def match(self, value):
-        if not re.match(self.pattern, value, self.flags):
-            return Mismatch("%r did not match %r" % (self.pattern, value))
-
-
-class MatchesSetwise(object):
-    """Matches if all the matchers match elements of the value being matched.
-
-    That is, each element in the 'observed' set must match exactly one matcher
-    from the set of matchers, with no matchers left over.
-
-    The difference compared to `MatchesListwise` is that the order of the
-    matchings does not matter.
-    """
-
-    def __init__(self, *matchers):
-        self.matchers = matchers
-
-    def match(self, observed):
-        remaining_matchers = set(self.matchers)
-        not_matched = []
-        for value in observed:
-            for matcher in remaining_matchers:
-                if matcher.match(value) is None:
-                    remaining_matchers.remove(matcher)
-                    break
-            else:
-                not_matched.append(value)
-        if not_matched or remaining_matchers:
-            remaining_matchers = list(remaining_matchers)
-            # There are various cases that all should be reported somewhat
-            # differently.
-
-            # There are two trivial cases:
-            # 1) There are just some matchers left over.
-            # 2) There are just some values left over.
-
-            # Then there are three more interesting cases:
-            # 3) There are the same number of matchers and values left over.
-            # 4) There are more matchers left over than values.
-            # 5) There are more values left over than matchers.
-
-            if len(not_matched) == 0:
-                if len(remaining_matchers) > 1:
-                    msg = "There were %s matchers left over: " % (
-                        len(remaining_matchers),)
-                else:
-                    msg = "There was 1 matcher left over: "
-                msg += ', '.join(map(str, remaining_matchers))
-                return Mismatch(msg)
-            elif len(remaining_matchers) == 0:
-                if len(not_matched) > 1:
-                    return Mismatch(
-                        "There were %s values left over: %s" % (
-                            len(not_matched), not_matched))
-                else:
-                    return Mismatch(
-                        "There was 1 value left over: %s" % (
-                            not_matched, ))
-            else:
-                common_length = min(len(remaining_matchers), len(not_matched))
-                if common_length == 0:
-                    raise AssertionError("common_length can't be 0 here")
-                if common_length > 1:
-                    msg = "There were %s mismatches" % (common_length,)
-                else:
-                    msg = "There was 1 mismatch"
-                if len(remaining_matchers) > len(not_matched):
-                    extra_matchers = remaining_matchers[common_length:]
-                    msg += " and %s extra matcher" % (len(extra_matchers), )
-                    if len(extra_matchers) > 1:
-                        msg += "s"
-                    msg += ': ' + ', '.join(map(str, extra_matchers))
-                elif len(not_matched) > len(remaining_matchers):
-                    extra_values = not_matched[common_length:]
-                    msg += " and %s extra value" % (len(extra_values), )
-                    if len(extra_values) > 1:
-                        msg += "s"
-                    msg += ': ' + str(extra_values)
-                return Annotate(
-                    msg, MatchesListwise(remaining_matchers[:common_length])
-                    ).match(not_matched[:common_length])
-
-
-class AfterPreproccessing(object):
-    """Matches if the value matches after passing through a function.
-
-    This can be used to aid in creating trivial matchers as functions, for
-    example::
-
-      def PathHasFileContent(content):
-          def _read(path):
-              return open(path).read()
-          return AfterPreproccessing(_read, Equals(content))
-    """
-
-    def __init__(self, preprocessor, matcher):
-        self.preprocessor = preprocessor
-        self.matcher = matcher
-
-    def _str_preprocessor(self):
-        if isinstance(self.preprocessor, types.FunctionType):
-            return '<function %s>' % self.preprocessor.__name__
-        return str(self.preprocessor)
-
-    def __str__(self):
-        return "AfterPreproccessing(%s, %s)" % (
-            self._str_preprocessor(), self.matcher)
-
-    def match(self, value):
-        value = self.preprocessor(value)
-        return Annotate(
-            "after %s" % self._str_preprocessor(),
-            self.matcher).match(value)

=== removed file 'python-for-subunit2junitxml/testtools/monkey.py'
--- python-for-subunit2junitxml/testtools/monkey.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/monkey.py	1970-01-01 00:00:00 +0000
@@ -1,97 +0,0 @@
-# Copyright (c) 2010 testtools developers. See LICENSE for details.
-
-"""Helpers for monkey-patching Python code."""
-
-__all__ = [
-    'MonkeyPatcher',
-    'patch',
-    ]
-
-
-class MonkeyPatcher(object):
-    """A set of monkey-patches that can be applied and removed all together.
-
-    Use this to cover up attributes with new objects. Particularly useful for
-    testing difficult code.
-    """
-
-    # Marker used to indicate that the patched attribute did not exist on the
-    # object before we patched it.
-    _NO_SUCH_ATTRIBUTE = object()
-
-    def __init__(self, *patches):
-        """Construct a `MonkeyPatcher`.
-
-        :param patches: The patches to apply, each should be (obj, name,
-            new_value). Providing patches here is equivalent to calling
-            `add_patch`.
-        """
-        # List of patches to apply in (obj, name, value).
-        self._patches_to_apply = []
-        # List of the original values for things that have been patched.
-        # (obj, name, value) format.
-        self._originals = []
-        for patch in patches:
-            self.add_patch(*patch)
-
-    def add_patch(self, obj, name, value):
-        """Add a patch to overwrite 'name' on 'obj' with 'value'.
-
-        The attribute C{name} on C{obj} will be assigned to C{value} when
-        C{patch} is called or during C{run_with_patches}.
-
-        You can restore the original values with a call to restore().
-        """
-        self._patches_to_apply.append((obj, name, value))
-
-    def patch(self):
-        """Apply all of the patches that have been specified with `add_patch`.
-
-        Reverse this operation using L{restore}.
-        """
-        for obj, name, value in self._patches_to_apply:
-            original_value = getattr(obj, name, self._NO_SUCH_ATTRIBUTE)
-            self._originals.append((obj, name, original_value))
-            setattr(obj, name, value)
-
-    def restore(self):
-        """Restore all original values to any patched objects.
-
-        If the patched attribute did not exist on an object before it was
-        patched, `restore` will delete the attribute so as to return the
-        object to its original state.
-        """
-        while self._originals:
-            obj, name, value = self._originals.pop()
-            if value is self._NO_SUCH_ATTRIBUTE:
-                delattr(obj, name)
-            else:
-                setattr(obj, name, value)
-
-    def run_with_patches(self, f, *args, **kw):
-        """Run 'f' with the given args and kwargs with all patches applied.
-
-        Restores all objects to their original state when finished.
-        """
-        self.patch()
-        try:
-            return f(*args, **kw)
-        finally:
-            self.restore()
-
-
-def patch(obj, attribute, value):
-    """Set 'obj.attribute' to 'value' and return a callable to restore 'obj'.
-
-    If 'attribute' is not set on 'obj' already, then the returned callable
-    will delete the attribute when called.
-
-    :param obj: An object to monkey-patch.
-    :param attribute: The name of the attribute to patch.
-    :param value: The value to set 'obj.attribute' to.
-    :return: A nullary callable that, when run, will restore 'obj' to its
-        original state.
-    """
-    patcher = MonkeyPatcher((obj, attribute, value))
-    patcher.patch()
-    return patcher.restore

=== removed file 'python-for-subunit2junitxml/testtools/run.py'
--- python-for-subunit2junitxml/testtools/run.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/run.py	1970-01-01 00:00:00 +0000
@@ -1,332 +0,0 @@
-# Copyright (c) 2009 testtools developers. See LICENSE for details.
-
-"""python -m testtools.run testspec [testspec...]
-
-Run some tests with the testtools extended API.
-
-For instance, to run the testtools test suite.
- $ python -m testtools.run testtools.tests.test_suite
-"""
-
-import os
-import unittest
-import sys
-
-from testtools import TextTestResult
-from testtools.compat import classtypes, istext, unicode_output_stream
-from testtools.testsuite import iterate_tests
-
-
-defaultTestLoader = unittest.defaultTestLoader
-defaultTestLoaderCls = unittest.TestLoader
-
-if getattr(defaultTestLoader, 'discover', None) is None:
-    try:
-        import discover
-        defaultTestLoader = discover.DiscoveringTestLoader()
-        defaultTestLoaderCls = discover.DiscoveringTestLoader
-        have_discover = True
-    except ImportError:
-        have_discover = False
-else:
-    have_discover = True
-
-
-class TestToolsTestRunner(object):
-    """ A thunk object to support unittest.TestProgram."""
-
-    def __init__(self, stdout):
-        self.stdout = stdout
-
-    def run(self, test):
-        "Run the given test case or test suite."
-        result = TextTestResult(unicode_output_stream(self.stdout))
-        result.startTestRun()
-        try:
-            return test.run(result)
-        finally:
-            result.stopTestRun()
-
-
-####################
-# Taken from python 2.7 and slightly modified for compatibility with
-# older versions. Delete when 2.7 is the oldest supported version.
-# Modifications:
-#  - Use have_discover to raise an error if the user tries to use
-#    discovery on an old version and doesn't have discover installed.
-#  - If --catch is given check that installHandler is available, as
-#    it won't be on old python versions.
-#  - print calls have been been made single-source python3 compatibile.
-#  - exception handling likewise.
-#  - The default help has been changed to USAGE_AS_MAIN and USAGE_FROM_MODULE
-#    removed.
-#  - A tweak has been added to detect 'python -m *.run' and use a
-#    better progName in that case.
-#  - self.module is more comprehensively set to None when being invoked from
-#    the commandline - __name__ is used as a sentinel value.
-#  - --list has been added which can list tests (should be upstreamed).
-#  - --load-list has been added which can reduce the tests used (should be
-#    upstreamed).
-#  - The limitation of using getopt is declared to the user.
-
-FAILFAST     = "  -f, --failfast   Stop on first failure\n"
-CATCHBREAK   = "  -c, --catch      Catch control-C and display results\n"
-BUFFEROUTPUT = "  -b, --buffer     Buffer stdout and stderr during test runs\n"
-
-USAGE_AS_MAIN = """\
-Usage: %(progName)s [options] [tests]
-
-Options:
-  -h, --help       Show this message
-  -v, --verbose    Verbose output
-  -q, --quiet      Minimal output
-  -l, --list       List tests rather than executing them.
-  --load-list      Specifies a file containing test ids, only tests matching
-                   those ids are executed.
-%(failfast)s%(catchbreak)s%(buffer)s
-Examples:
-  %(progName)s test_module               - run tests from test_module
-  %(progName)s module.TestClass          - run tests from module.TestClass
-  %(progName)s module.Class.test_method  - run specified test method
-
-All options must come before [tests].  [tests] can be a list of any number of
-test modules, classes and test methods.
-
-Alternative Usage: %(progName)s discover [options]
-
-Options:
-  -v, --verbose    Verbose output
-%(failfast)s%(catchbreak)s%(buffer)s  -s directory     Directory to start discovery ('.' default)
-  -p pattern       Pattern to match test files ('test*.py' default)
-  -t directory     Top level directory of project (default to
-                   start directory)
-  -l, --list       List tests rather than executing them.
-  --load-list      Specifies a file containing test ids, only tests matching
-                   those ids are executed.
-
-For test discovery all test modules must be importable from the top
-level directory of the project.
-"""
-
-
-class TestProgram(object):
-    """A command-line program that runs a set of tests; this is primarily
-       for making test modules conveniently executable.
-    """
-    USAGE = USAGE_AS_MAIN
-
-    # defaults for testing
-    failfast = catchbreak = buffer = progName = None
-
-    def __init__(self, module=__name__, defaultTest=None, argv=None,
-                    testRunner=None, testLoader=defaultTestLoader,
-                    exit=True, verbosity=1, failfast=None, catchbreak=None,
-                    buffer=None, stdout=None):
-        if module == __name__:
-            self.module = None
-        elif istext(module):
-            self.module = __import__(module)
-            for part in module.split('.')[1:]:
-                self.module = getattr(self.module, part)
-        else:
-            self.module = module
-        if argv is None:
-            argv = sys.argv
-        if stdout is None:
-            stdout = sys.stdout
-
-        self.exit = exit
-        self.failfast = failfast
-        self.catchbreak = catchbreak
-        self.verbosity = verbosity
-        self.buffer = buffer
-        self.defaultTest = defaultTest
-        self.listtests = False
-        self.load_list = None
-        self.testRunner = testRunner
-        self.testLoader = testLoader
-        progName = argv[0]
-        if progName.endswith('%srun.py' % os.path.sep):
-            elements = progName.split(os.path.sep)
-            progName = '%s.run' % elements[-2]
-        else:
-            progName = os.path.basename(argv[0])
-        self.progName = progName
-        self.parseArgs(argv)
-        if self.load_list:
-            # TODO: preserve existing suites (like testresources does in
-            # OptimisingTestSuite.add, but with a standard protocol).
-            # This is needed because the load_tests hook allows arbitrary
-            # suites, even if that is rarely used.
-            source = file(self.load_list, 'rb')
-            try:
-                lines = source.readlines()
-            finally:
-                source.close()
-            test_ids = set(line.strip() for line in lines)
-            filtered = unittest.TestSuite()
-            for test in iterate_tests(self.test):
-                if test.id() in test_ids:
-                    filtered.addTest(test)
-            self.test = filtered
-        if not self.listtests:
-            self.runTests()
-        else:
-            for test in iterate_tests(self.test):
-                stdout.write('%s\n' % test.id())
-
-    def usageExit(self, msg=None):
-        if msg:
-            print(msg)
-        usage = {'progName': self.progName, 'catchbreak': '', 'failfast': '',
-                 'buffer': ''}
-        if self.failfast != False:
-            usage['failfast'] = FAILFAST
-        if self.catchbreak != False:
-            usage['catchbreak'] = CATCHBREAK
-        if self.buffer != False:
-            usage['buffer'] = BUFFEROUTPUT
-        print(self.USAGE % usage)
-        sys.exit(2)
-
-    def parseArgs(self, argv):
-        if len(argv) > 1 and argv[1].lower() == 'discover':
-            self._do_discovery(argv[2:])
-            return
-
-        import getopt
-        long_opts = ['help', 'verbose', 'quiet', 'failfast', 'catch', 'buffer',
-            'list', 'load-list=']
-        try:
-            options, args = getopt.getopt(argv[1:], 'hHvqfcbl', long_opts)
-            for opt, value in options:
-                if opt in ('-h','-H','--help'):
-                    self.usageExit()
-                if opt in ('-q','--quiet'):
-                    self.verbosity = 0
-                if opt in ('-v','--verbose'):
-                    self.verbosity = 2
-                if opt in ('-f','--failfast'):
-                    if self.failfast is None:
-                        self.failfast = True
-                    # Should this raise an exception if -f is not valid?
-                if opt in ('-c','--catch'):
-                    if self.catchbreak is None:
-                        self.catchbreak = True
-                    # Should this raise an exception if -c is not valid?
-                if opt in ('-b','--buffer'):
-                    if self.buffer is None:
-                        self.buffer = True
-                    # Should this raise an exception if -b is not valid?
-                if opt in ('-l', '--list'):
-                    self.listtests = True
-                if opt == '--load-list':
-                    self.load_list = value
-            if len(args) == 0 and self.defaultTest is None:
-                # createTests will load tests from self.module
-                self.testNames = None
-            elif len(args) > 0:
-                self.testNames = args
-            else:
-                self.testNames = (self.defaultTest,)
-            self.createTests()
-        except getopt.error:
-            self.usageExit(sys.exc_info()[1])
-
-    def createTests(self):
-        if self.testNames is None:
-            self.test = self.testLoader.loadTestsFromModule(self.module)
-        else:
-            self.test = self.testLoader.loadTestsFromNames(self.testNames,
-                                                           self.module)
-
-    def _do_discovery(self, argv, Loader=defaultTestLoaderCls):
-        # handle command line args for test discovery
-        if not have_discover:
-            raise AssertionError("Unable to use discovery, must use python 2.7 "
-                    "or greater, or install the discover package.")
-        self.progName = '%s discover' % self.progName
-        import optparse
-        parser = optparse.OptionParser()
-        parser.prog = self.progName
-        parser.add_option('-v', '--verbose', dest='verbose', default=False,
-                          help='Verbose output', action='store_true')
-        if self.failfast != False:
-            parser.add_option('-f', '--failfast', dest='failfast', default=False,
-                              help='Stop on first fail or error',
-                              action='store_true')
-        if self.catchbreak != False:
-            parser.add_option('-c', '--catch', dest='catchbreak', default=False,
-                              help='Catch ctrl-C and display results so far',
-                              action='store_true')
-        if self.buffer != False:
-            parser.add_option('-b', '--buffer', dest='buffer', default=False,
-                              help='Buffer stdout and stderr during tests',
-                              action='store_true')
-        parser.add_option('-s', '--start-directory', dest='start', default='.',
-                          help="Directory to start discovery ('.' default)")
-        parser.add_option('-p', '--pattern', dest='pattern', default='test*.py',
-                          help="Pattern to match tests ('test*.py' default)")
-        parser.add_option('-t', '--top-level-directory', dest='top', default=None,
-                          help='Top level directory of project (defaults to start directory)')
-        parser.add_option('-l', '--list', dest='listtests', default=False,
-                          help='List tests rather than running them.')
-        parser.add_option('--load-list', dest='load_list', default=None,
-                          help='Specify a filename containing the test ids to use.')
-
-        options, args = parser.parse_args(argv)
-        if len(args) > 3:
-            self.usageExit()
-
-        for name, value in zip(('start', 'pattern', 'top'), args):
-            setattr(options, name, value)
-
-        # only set options from the parsing here
-        # if they weren't set explicitly in the constructor
-        if self.failfast is None:
-            self.failfast = options.failfast
-        if self.catchbreak is None:
-            self.catchbreak = options.catchbreak
-        if self.buffer is None:
-            self.buffer = options.buffer
-        self.listtests = options.listtests
-        self.load_list = options.load_list
-
-        if options.verbose:
-            self.verbosity = 2
-
-        start_dir = options.start
-        pattern = options.pattern
-        top_level_dir = options.top
-
-        loader = Loader()
-        self.test = loader.discover(start_dir, pattern, top_level_dir)
-
-    def runTests(self):
-        if (self.catchbreak
-            and getattr(unittest, 'installHandler', None) is not None):
-            unittest.installHandler()
-        if self.testRunner is None:
-            self.testRunner = runner.TextTestRunner
-        if isinstance(self.testRunner, classtypes()):
-            try:
-                testRunner = self.testRunner(verbosity=self.verbosity,
-                                             failfast=self.failfast,
-                                             buffer=self.buffer)
-            except TypeError:
-                # didn't accept the verbosity, buffer or failfast arguments
-                testRunner = self.testRunner()
-        else:
-            # it is assumed to be a TestRunner instance
-            testRunner = self.testRunner
-        self.result = testRunner.run(self.test)
-        if self.exit:
-            sys.exit(not self.result.wasSuccessful())
-################
-
-def main(argv, stdout):
-    runner = TestToolsTestRunner(stdout)
-    program = TestProgram(argv=argv, testRunner=runner, stdout=stdout)
-
-if __name__ == '__main__':
-    main(sys.argv, sys.stdout)

=== removed file 'python-for-subunit2junitxml/testtools/runtest.py'
--- python-for-subunit2junitxml/testtools/runtest.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/runtest.py	1970-01-01 00:00:00 +0000
@@ -1,200 +0,0 @@
-# Copyright (c) 2009-2010 testtools developers. See LICENSE for details.
-
-"""Individual test case execution."""
-
-__all__ = [
-    'MultipleExceptions',
-    'RunTest',
-    ]
-
-import sys
-
-from testtools.testresult import ExtendedToOriginalDecorator
-
-
-class MultipleExceptions(Exception):
-    """Represents many exceptions raised from some operation.
-
-    :ivar args: The sys.exc_info() tuples for each exception.
-    """
-
-
-class RunTest(object):
-    """An object to run a test.
-
-    RunTest objects are used to implement the internal logic involved in
-    running a test. TestCase.__init__ stores _RunTest as the class of RunTest
-    to execute.  Passing the runTest= parameter to TestCase.__init__ allows a
-    different RunTest class to be used to execute the test.
-
-    Subclassing or replacing RunTest can be useful to add functionality to the
-    way that tests are run in a given project.
-
-    :ivar case: The test case that is to be run.
-    :ivar result: The result object a case is reporting to.
-    :ivar handlers: A list of (ExceptionClass, handler_function) for
-        exceptions that should be caught if raised from the user
-        code. Exceptions that are caught are checked against this list in
-        first to last order.  There is a catch-all of 'Exception' at the end
-        of the list, so to add a new exception to the list, insert it at the
-        front (which ensures that it will be checked before any existing base
-        classes in the list. If you add multiple exceptions some of which are
-        subclasses of each other, add the most specific exceptions last (so
-        they come before their parent classes in the list).
-    :ivar exception_caught: An object returned when _run_user catches an
-        exception.
-    :ivar _exceptions: A list of caught exceptions, used to do the single
-        reporting of error/failure/skip etc.
-    """
-
-    def __init__(self, case, handlers=None):
-        """Create a RunTest to run a case.
-
-        :param case: A testtools.TestCase test case object.
-        :param handlers: Exception handlers for this RunTest. These are stored
-            in self.handlers and can be modified later if needed.
-        """
-        self.case = case
-        self.handlers = handlers or []
-        self.exception_caught = object()
-        self._exceptions = []
-
-    def run(self, result=None):
-        """Run self.case reporting activity to result.
-
-        :param result: Optional testtools.TestResult to report activity to.
-        :return: The result object the test was run against.
-        """
-        if result is None:
-            actual_result = self.case.defaultTestResult()
-            actual_result.startTestRun()
-        else:
-            actual_result = result
-        try:
-            return self._run_one(actual_result)
-        finally:
-            if result is None:
-                actual_result.stopTestRun()
-
-    def _run_one(self, result):
-        """Run one test reporting to result.
-
-        :param result: A testtools.TestResult to report activity to.
-            This result object is decorated with an ExtendedToOriginalDecorator
-            to ensure that the latest TestResult API can be used with
-            confidence by client code.
-        :return: The result object the test was run against.
-        """
-        return self._run_prepared_result(ExtendedToOriginalDecorator(result))
-
-    def _run_prepared_result(self, result):
-        """Run one test reporting to result.
-
-        :param result: A testtools.TestResult to report activity to.
-        :return: The result object the test was run against.
-        """
-        result.startTest(self.case)
-        self.result = result
-        try:
-            self._exceptions = []
-            self._run_core()
-            if self._exceptions:
-                # One or more caught exceptions, now trigger the test's
-                # reporting method for just one.
-                e = self._exceptions.pop()
-                for exc_class, handler in self.handlers:
-                    if isinstance(e, exc_class):
-                        handler(self.case, self.result, e)
-                        break
-        finally:
-            result.stopTest(self.case)
-        return result
-
-    def _run_core(self):
-        """Run the user supplied test code."""
-        if self.exception_caught == self._run_user(self.case._run_setup,
-            self.result):
-            # Don't run the test method if we failed getting here.
-            self._run_cleanups(self.result)
-            return
-        # Run everything from here on in. If any of the methods raise an
-        # exception we'll have failed.
-        failed = False
-        try:
-            if self.exception_caught == self._run_user(
-                self.case._run_test_method, self.result):
-                failed = True
-        finally:
-            try:
-                if self.exception_caught == self._run_user(
-                    self.case._run_teardown, self.result):
-                    failed = True
-            finally:
-                try:
-                    if self.exception_caught == self._run_user(
-                        self._run_cleanups, self.result):
-                        failed = True
-                finally:
-                    if not failed:
-                        self.result.addSuccess(self.case,
-                            details=self.case.getDetails())
-
-    def _run_cleanups(self, result):
-        """Run the cleanups that have been added with addCleanup.
-
-        See the docstring for addCleanup for more information.
-
-        :return: None if all cleanups ran without error,
-            ``exception_caught`` if there was an error.
-        """
-        failing = False
-        while self.case._cleanups:
-            function, arguments, keywordArguments = self.case._cleanups.pop()
-            got_exception = self._run_user(
-                function, *arguments, **keywordArguments)
-            if got_exception == self.exception_caught:
-                failing = True
-        if failing:
-            return self.exception_caught
-
-    def _run_user(self, fn, *args, **kwargs):
-        """Run a user supplied function.
-
-        Exceptions are processed by `_got_user_exception`.
-
-        :return: Either whatever 'fn' returns or ``exception_caught`` if
-            'fn' raised an exception.
-        """
-        try:
-            return fn(*args, **kwargs)
-        except KeyboardInterrupt:
-            raise
-        except:
-            return self._got_user_exception(sys.exc_info())
-
-    def _got_user_exception(self, exc_info, tb_label='traceback'):
-        """Called when user code raises an exception.
-
-        If 'exc_info' is a `MultipleExceptions`, then we recurse into it
-        unpacking the errors that it's made up from.
-
-        :param exc_info: A sys.exc_info() tuple for the user error.
-        :param tb_label: An optional string label for the error.  If
-            not specified, will default to 'traceback'.
-        :return: 'exception_caught' if we catch one of the exceptions that
-            have handlers in 'handlers', otherwise raise the error.
-        """
-        if exc_info[0] is MultipleExceptions:
-            for sub_exc_info in exc_info[1].args:
-                self._got_user_exception(sub_exc_info, tb_label)
-            return self.exception_caught
-        try:
-            e = exc_info[1]
-            self.case.onException(exc_info, tb_label=tb_label)
-        finally:
-            del exc_info
-        for exc_class, handler in self.handlers:
-            if isinstance(e, exc_class):
-                self._exceptions.append(e)
-                return self.exception_caught
-        raise e

=== removed file 'python-for-subunit2junitxml/testtools/testcase.py'
--- python-for-subunit2junitxml/testtools/testcase.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/testcase.py	1970-01-01 00:00:00 +0000
@@ -1,724 +0,0 @@
-# Copyright (c) 2008-2011 testtools developers. See LICENSE for details.
-
-"""Test case related stuff."""
-
-__metaclass__ = type
-__all__ = [
-    'clone_test_with_new_id',
-    'ExpectedException',
-    'run_test_with',
-    'skip',
-    'skipIf',
-    'skipUnless',
-    'TestCase',
-    ]
-
-import copy
-import itertools
-import re
-import sys
-import types
-import unittest
-
-from testtools import (
-    content,
-    try_import,
-    )
-from testtools.compat import advance_iterator
-from testtools.matchers import (
-    Annotate,
-    Equals,
-    )
-from testtools.monkey import patch
-from testtools.runtest import RunTest
-from testtools.testresult import TestResult
-
-wraps = try_import('functools.wraps')
-
-class TestSkipped(Exception):
-    """Raised within TestCase.run() when a test is skipped."""
-testSkipped = try_import('unittest2.case.SkipTest', TestSkipped)
-TestSkipped = try_import('unittest.case.SkipTest', TestSkipped)
-
-
-class _UnexpectedSuccess(Exception):
-    """An unexpected success was raised.
-
-    Note that this exception is private plumbing in testtools' testcase
-    module.
-    """
-_UnexpectedSuccess = try_import(
-    'unittest2.case._UnexpectedSuccess', _UnexpectedSuccess)
-_UnexpectedSuccess = try_import(
-    'unittest.case._UnexpectedSuccess', _UnexpectedSuccess)
-
-class _ExpectedFailure(Exception):
-    """An expected failure occured.
-
-    Note that this exception is private plumbing in testtools' testcase
-    module.
-    """
-_ExpectedFailure = try_import(
-    'unittest2.case._ExpectedFailure', _ExpectedFailure)
-_ExpectedFailure = try_import(
-    'unittest.case._ExpectedFailure', _ExpectedFailure)
-
-
-def run_test_with(test_runner, **kwargs):
-    """Decorate a test as using a specific ``RunTest``.
-
-    e.g.::
-
-      @run_test_with(CustomRunner, timeout=42)
-      def test_foo(self):
-          self.assertTrue(True)
-
-    The returned decorator works by setting an attribute on the decorated
-    function.  `TestCase.__init__` looks for this attribute when deciding on a
-    ``RunTest`` factory.  If you wish to use multiple decorators on a test
-    method, then you must either make this one the top-most decorator, or you
-    must write your decorators so that they update the wrapping function with
-    the attributes of the wrapped function.  The latter is recommended style
-    anyway.  ``functools.wraps``, ``functools.wrapper`` and
-    ``twisted.python.util.mergeFunctionMetadata`` can help you do this.
-
-    :param test_runner: A ``RunTest`` factory that takes a test case and an
-        optional list of exception handlers.  See ``RunTest``.
-    :param kwargs: Keyword arguments to pass on as extra arguments to
-        'test_runner'.
-    :return: A decorator to be used for marking a test as needing a special
-        runner.
-    """
-    def decorator(function):
-        # Set an attribute on 'function' which will inform TestCase how to
-        # make the runner.
-        function._run_test_with = (
-            lambda case, handlers=None:
-                test_runner(case, handlers=handlers, **kwargs))
-        return function
-    return decorator
-
-
-class TestCase(unittest.TestCase):
-    """Extensions to the basic TestCase.
-
-    :ivar exception_handlers: Exceptions to catch from setUp, runTest and
-        tearDown. This list is able to be modified at any time and consists of
-        (exception_class, handler(case, result, exception_value)) pairs.
-    :cvar run_tests_with: A factory to make the ``RunTest`` to run tests with.
-        Defaults to ``RunTest``.  The factory is expected to take a test case
-        and an optional list of exception handlers.
-    """
-
-    skipException = TestSkipped
-
-    run_tests_with = RunTest
-
-    def __init__(self, *args, **kwargs):
-        """Construct a TestCase.
-
-        :param testMethod: The name of the method to run.
-        :keyword runTest: Optional class to use to execute the test. If not
-            supplied ``RunTest`` is used. The instance to be used is created
-            when run() is invoked, so will be fresh each time. Overrides
-            ``TestCase.run_tests_with`` if given.
-        """
-        runTest = kwargs.pop('runTest', None)
-        unittest.TestCase.__init__(self, *args, **kwargs)
-        self._cleanups = []
-        self._unique_id_gen = itertools.count(1)
-        # Generators to ensure unique traceback ids.  Maps traceback label to
-        # iterators.
-        self._traceback_id_gens = {}
-        self.__setup_called = False
-        self.__teardown_called = False
-        # __details is lazy-initialized so that a constructed-but-not-run
-        # TestCase is safe to use with clone_test_with_new_id.
-        self.__details = None
-        test_method = self._get_test_method()
-        if runTest is None:
-            runTest = getattr(
-                test_method, '_run_test_with', self.run_tests_with)
-        self.__RunTest = runTest
-        self.__exception_handlers = []
-        self.exception_handlers = [
-            (self.skipException, self._report_skip),
-            (self.failureException, self._report_failure),
-            (_ExpectedFailure, self._report_expected_failure),
-            (_UnexpectedSuccess, self._report_unexpected_success),
-            (Exception, self._report_error),
-            ]
-        if sys.version_info < (2, 6):
-            # Catch old-style string exceptions with None as the instance
-            self.exception_handlers.append((type(None), self._report_error))
-
-    def __eq__(self, other):
-        eq = getattr(unittest.TestCase, '__eq__', None)
-        if eq is not None and not unittest.TestCase.__eq__(self, other):
-            return False
-        return self.__dict__ == other.__dict__
-
-    def __repr__(self):
-        # We add id to the repr because it makes testing testtools easier.
-        return "<%s id=0x%0x>" % (self.id(), id(self))
-
-    def addDetail(self, name, content_object):
-        """Add a detail to be reported with this test's outcome.
-
-        For more details see pydoc testtools.TestResult.
-
-        :param name: The name to give this detail.
-        :param content_object: The content object for this detail. See
-            testtools.content for more detail.
-        """
-        if self.__details is None:
-            self.__details = {}
-        self.__details[name] = content_object
-
-    def getDetails(self):
-        """Get the details dict that will be reported with this test's outcome.
-
-        For more details see pydoc testtools.TestResult.
-        """
-        if self.__details is None:
-            self.__details = {}
-        return self.__details
-
-    def patch(self, obj, attribute, value):
-        """Monkey-patch 'obj.attribute' to 'value' while the test is running.
-
-        If 'obj' has no attribute, then the monkey-patch will still go ahead,
-        and the attribute will be deleted instead of restored to its original
-        value.
-
-        :param obj: The object to patch. Can be anything.
-        :param attribute: The attribute on 'obj' to patch.
-        :param value: The value to set 'obj.attribute' to.
-        """
-        self.addCleanup(patch(obj, attribute, value))
-
-    def shortDescription(self):
-        return self.id()
-
-    def skipTest(self, reason):
-        """Cause this test to be skipped.
-
-        This raises self.skipException(reason). skipException is raised
-        to permit a skip to be triggered at any point (during setUp or the
-        testMethod itself). The run() method catches skipException and
-        translates that into a call to the result objects addSkip method.
-
-        :param reason: The reason why the test is being skipped. This must
-            support being cast into a unicode string for reporting.
-        """
-        raise self.skipException(reason)
-
-    # skipTest is how python2.7 spells this. Sometime in the future
-    # This should be given a deprecation decorator - RBC 20100611.
-    skip = skipTest
-
-    def _formatTypes(self, classOrIterable):
-        """Format a class or a bunch of classes for display in an error."""
-        className = getattr(classOrIterable, '__name__', None)
-        if className is None:
-            className = ', '.join(klass.__name__ for klass in classOrIterable)
-        return className
-
-    def addCleanup(self, function, *arguments, **keywordArguments):
-        """Add a cleanup function to be called after tearDown.
-
-        Functions added with addCleanup will be called in reverse order of
-        adding after tearDown, or after setUp if setUp raises an exception.
-
-        If a function added with addCleanup raises an exception, the error
-        will be recorded as a test error, and the next cleanup will then be
-        run.
-
-        Cleanup functions are always called before a test finishes running,
-        even if setUp is aborted by an exception.
-        """
-        self._cleanups.append((function, arguments, keywordArguments))
-
-    def addOnException(self, handler):
-        """Add a handler to be called when an exception occurs in test code.
-
-        This handler cannot affect what result methods are called, and is
-        called before any outcome is called on the result object. An example
-        use for it is to add some diagnostic state to the test details dict
-        which is expensive to calculate and not interesting for reporting in
-        the success case.
-
-        Handlers are called before the outcome (such as addFailure) that
-        the exception has caused.
-
-        Handlers are called in first-added, first-called order, and if they
-        raise an exception, that will propogate out of the test running
-        machinery, halting test processing. As a result, do not call code that
-        may unreasonably fail.
-        """
-        self.__exception_handlers.append(handler)
-
-    def _add_reason(self, reason):
-        self.addDetail('reason', content.Content(
-            content.ContentType('text', 'plain'),
-            lambda: [reason.encode('utf8')]))
-
-    def assertEqual(self, expected, observed, message=''):
-        """Assert that 'expected' is equal to 'observed'.
-
-        :param expected: The expected value.
-        :param observed: The observed value.
-        :param message: An optional message to include in the error.
-        """
-        matcher = Equals(expected)
-        if message:
-            matcher = Annotate(message, matcher)
-        self.assertThat(observed, matcher)
-
-    failUnlessEqual = assertEquals = assertEqual
-
-    def assertIn(self, needle, haystack):
-        """Assert that needle is in haystack."""
-        self.assertTrue(
-            needle in haystack, '%r not in %r' % (needle, haystack))
-
-    def assertIs(self, expected, observed, message=''):
-        """Assert that 'expected' is 'observed'.
-
-        :param expected: The expected value.
-        :param observed: The observed value.
-        :param message: An optional message describing the error.
-        """
-        if message:
-            message = ': ' + message
-        self.assertTrue(
-            expected is observed,
-            '%r is not %r%s' % (expected, observed, message))
-
-    def assertIsNot(self, expected, observed, message=''):
-        """Assert that 'expected' is not 'observed'."""
-        if message:
-            message = ': ' + message
-        self.assertTrue(
-            expected is not observed,
-            '%r is %r%s' % (expected, observed, message))
-
-    def assertNotIn(self, needle, haystack):
-        """Assert that needle is not in haystack."""
-        self.assertTrue(
-            needle not in haystack, '%r in %r' % (needle, haystack))
-
-    def assertIsInstance(self, obj, klass, msg=None):
-        if msg is None:
-            msg = '%r is not an instance of %s' % (
-                obj, self._formatTypes(klass))
-        self.assertTrue(isinstance(obj, klass), msg)
-
-    def assertRaises(self, excClass, callableObj, *args, **kwargs):
-        """Fail unless an exception of class excClass is thrown
-           by callableObj when invoked with arguments args and keyword
-           arguments kwargs. If a different type of exception is
-           thrown, it will not be caught, and the test case will be
-           deemed to have suffered an error, exactly as for an
-           unexpected exception.
-        """
-        try:
-            ret = callableObj(*args, **kwargs)
-        except excClass:
-            return sys.exc_info()[1]
-        else:
-            excName = self._formatTypes(excClass)
-            self.fail("%s not raised, %r returned instead." % (excName, ret))
-    failUnlessRaises = assertRaises
-
-    def assertThat(self, matchee, matcher):
-        """Assert that matchee is matched by matcher.
-
-        :param matchee: An object to match with matcher.
-        :param matcher: An object meeting the testtools.Matcher protocol.
-        :raises self.failureException: When matcher does not match thing.
-        """
-        mismatch = matcher.match(matchee)
-        if not mismatch:
-            return
-        existing_details = self.getDetails()
-        for (name, content) in mismatch.get_details().items():
-            full_name = name
-            suffix = 1
-            while full_name in existing_details:
-                full_name = "%s-%d" % (name, suffix)
-                suffix += 1
-            self.addDetail(full_name, content)
-        self.fail('Match failed. Matchee: "%s"\nMatcher: %s\nDifference: %s\n'
-            % (matchee, matcher, mismatch.describe()))
-
-    def defaultTestResult(self):
-        return TestResult()
-
-    def expectFailure(self, reason, predicate, *args, **kwargs):
-        """Check that a test fails in a particular way.
-
-        If the test fails in the expected way, a KnownFailure is caused. If it
-        succeeds an UnexpectedSuccess is caused.
-
-        The expected use of expectFailure is as a barrier at the point in a
-        test where the test would fail. For example:
-        >>> def test_foo(self):
-        >>>    self.expectFailure("1 should be 0", self.assertNotEqual, 1, 0)
-        >>>    self.assertEqual(1, 0)
-
-        If in the future 1 were to equal 0, the expectFailure call can simply
-        be removed. This separation preserves the original intent of the test
-        while it is in the expectFailure mode.
-        """
-        self._add_reason(reason)
-        try:
-            predicate(*args, **kwargs)
-        except self.failureException:
-            # GZ 2010-08-12: Don't know how to avoid exc_info cycle as the new
-            #                unittest _ExpectedFailure wants old traceback
-            exc_info = sys.exc_info()
-            try:
-                self._report_traceback(exc_info)
-                raise _ExpectedFailure(exc_info)
-            finally:
-                del exc_info
-        else:
-            raise _UnexpectedSuccess(reason)
-
-    def getUniqueInteger(self):
-        """Get an integer unique to this test.
-
-        Returns an integer that is guaranteed to be unique to this instance.
-        Use this when you need an arbitrary integer in your test, or as a
-        helper for custom anonymous factory methods.
-        """
-        return advance_iterator(self._unique_id_gen)
-
-    def getUniqueString(self, prefix=None):
-        """Get a string unique to this test.
-
-        Returns a string that is guaranteed to be unique to this instance. Use
-        this when you need an arbitrary string in your test, or as a helper
-        for custom anonymous factory methods.
-
-        :param prefix: The prefix of the string. If not provided, defaults
-            to the id of the tests.
-        :return: A bytestring of '<prefix>-<unique_int>'.
-        """
-        if prefix is None:
-            prefix = self.id()
-        return '%s-%d' % (prefix, self.getUniqueInteger())
-
-    def onException(self, exc_info, tb_label='traceback'):
-        """Called when an exception propogates from test code.
-
-        :seealso addOnException:
-        """
-        if exc_info[0] not in [
-            TestSkipped, _UnexpectedSuccess, _ExpectedFailure]:
-            self._report_traceback(exc_info, tb_label=tb_label)
-        for handler in self.__exception_handlers:
-            handler(exc_info)
-
-    @staticmethod
-    def _report_error(self, result, err):
-        result.addError(self, details=self.getDetails())
-
-    @staticmethod
-    def _report_expected_failure(self, result, err):
-        result.addExpectedFailure(self, details=self.getDetails())
-
-    @staticmethod
-    def _report_failure(self, result, err):
-        result.addFailure(self, details=self.getDetails())
-
-    @staticmethod
-    def _report_skip(self, result, err):
-        if err.args:
-            reason = err.args[0]
-        else:
-            reason = "no reason given."
-        self._add_reason(reason)
-        result.addSkip(self, details=self.getDetails())
-
-    def _report_traceback(self, exc_info, tb_label='traceback'):
-        id_gen = self._traceback_id_gens.setdefault(
-            tb_label, itertools.count(0))
-        tb_id = advance_iterator(id_gen)
-        if tb_id:
-            tb_label = '%s-%d' % (tb_label, tb_id)
-        self.addDetail(tb_label, content.TracebackContent(exc_info, self))
-
-    @staticmethod
-    def _report_unexpected_success(self, result, err):
-        result.addUnexpectedSuccess(self, details=self.getDetails())
-
-    def run(self, result=None):
-        return self.__RunTest(self, self.exception_handlers).run(result)
-
-    def _run_setup(self, result):
-        """Run the setUp function for this test.
-
-        :param result: A testtools.TestResult to report activity to.
-        :raises ValueError: If the base class setUp is not called, a
-            ValueError is raised.
-        """
-        ret = self.setUp()
-        if not self.__setup_called:
-            raise ValueError(
-                "TestCase.setUp was not called. Have you upcalled all the "
-                "way up the hierarchy from your setUp? e.g. Call "
-                "super(%s, self).setUp() from your setUp()."
-                % self.__class__.__name__)
-        return ret
-
-    def _run_teardown(self, result):
-        """Run the tearDown function for this test.
-
-        :param result: A testtools.TestResult to report activity to.
-        :raises ValueError: If the base class tearDown is not called, a
-            ValueError is raised.
-        """
-        ret = self.tearDown()
-        if not self.__teardown_called:
-            raise ValueError(
-                "TestCase.tearDown was not called. Have you upcalled all the "
-                "way up the hierarchy from your tearDown? e.g. Call "
-                "super(%s, self).tearDown() from your tearDown()."
-                % self.__class__.__name__)
-        return ret
-
-    def _get_test_method(self):
-        absent_attr = object()
-        # Python 2.5+
-        method_name = getattr(self, '_testMethodName', absent_attr)
-        if method_name is absent_attr:
-            # Python 2.4
-            method_name = getattr(self, '_TestCase__testMethodName')
-        return getattr(self, method_name)
-
-    def _run_test_method(self, result):
-        """Run the test method for this test.
-
-        :param result: A testtools.TestResult to report activity to.
-        :return: None.
-        """
-        return self._get_test_method()()
-
-    def useFixture(self, fixture):
-        """Use fixture in a test case.
-
-        The fixture will be setUp, and self.addCleanup(fixture.cleanUp) called.
-
-        :param fixture: The fixture to use.
-        :return: The fixture, after setting it up and scheduling a cleanup for
-           it.
-        """
-        fixture.setUp()
-        self.addCleanup(fixture.cleanUp)
-        self.addCleanup(self._gather_details, fixture.getDetails)
-        return fixture
-
-    def _gather_details(self, getDetails):
-        """Merge the details from getDetails() into self.getDetails()."""
-        details = getDetails()
-        my_details = self.getDetails()
-        for name, content_object in details.items():
-            new_name = name
-            disambiguator = itertools.count(1)
-            while new_name in my_details:
-                new_name = '%s-%d' % (name, advance_iterator(disambiguator))
-            name = new_name
-            content_bytes = list(content_object.iter_bytes())
-            content_callback = lambda:content_bytes
-            self.addDetail(name,
-                content.Content(content_object.content_type, content_callback))
-
-    def setUp(self):
-        unittest.TestCase.setUp(self)
-        self.__setup_called = True
-
-    def tearDown(self):
-        unittest.TestCase.tearDown(self)
-        self.__teardown_called = True
-
-
-class PlaceHolder(object):
-    """A placeholder test.
-
-    `PlaceHolder` implements much of the same interface as TestCase and is
-    particularly suitable for being added to TestResults.
-    """
-
-    def __init__(self, test_id, short_description=None):
-        """Construct a `PlaceHolder`.
-
-        :param test_id: The id of the placeholder test.
-        :param short_description: The short description of the place holder
-            test. If not provided, the id will be used instead.
-        """
-        self._test_id = test_id
-        self._short_description = short_description
-
-    def __call__(self, result=None):
-        return self.run(result=result)
-
-    def __repr__(self):
-        internal = [self._test_id]
-        if self._short_description is not None:
-            internal.append(self._short_description)
-        return "<%s.%s(%s)>" % (
-            self.__class__.__module__,
-            self.__class__.__name__,
-            ", ".join(map(repr, internal)))
-
-    def __str__(self):
-        return self.id()
-
-    def countTestCases(self):
-        return 1
-
-    def debug(self):
-        pass
-
-    def id(self):
-        return self._test_id
-
-    def run(self, result=None):
-        if result is None:
-            result = TestResult()
-        result.startTest(self)
-        result.addSuccess(self)
-        result.stopTest(self)
-
-    def shortDescription(self):
-        if self._short_description is None:
-            return self.id()
-        else:
-            return self._short_description
-
-
-class ErrorHolder(PlaceHolder):
-    """A placeholder test that will error out when run."""
-
-    failureException = None
-
-    def __init__(self, test_id, error, short_description=None):
-        """Construct an `ErrorHolder`.
-
-        :param test_id: The id of the test.
-        :param error: The exc info tuple that will be used as the test's error.
-        :param short_description: An optional short description of the test.
-        """
-        super(ErrorHolder, self).__init__(
-            test_id, short_description=short_description)
-        self._error = error
-
-    def __repr__(self):
-        internal = [self._test_id, self._error]
-        if self._short_description is not None:
-            internal.append(self._short_description)
-        return "<%s.%s(%s)>" % (
-            self.__class__.__module__,
-            self.__class__.__name__,
-            ", ".join(map(repr, internal)))
-
-    def run(self, result=None):
-        if result is None:
-            result = TestResult()
-        result.startTest(self)
-        result.addError(self, self._error)
-        result.stopTest(self)
-
-
-# Python 2.4 did not know how to copy functions.
-if types.FunctionType not in copy._copy_dispatch:
-    copy._copy_dispatch[types.FunctionType] = copy._copy_immutable
-
-
-def clone_test_with_new_id(test, new_id):
-    """Copy a `TestCase`, and give the copied test a new id.
-
-    This is only expected to be used on tests that have been constructed but
-    not executed.
-    """
-    newTest = copy.copy(test)
-    newTest.id = lambda: new_id
-    return newTest
-
-
-def skip(reason):
-    """A decorator to skip unit tests.
-
-    This is just syntactic sugar so users don't have to change any of their
-    unit tests in order to migrate to python 2.7, which provides the
-    @unittest.skip decorator.
-    """
-    def decorator(test_item):
-        if wraps is not None:
-            @wraps(test_item)
-            def skip_wrapper(*args, **kwargs):
-                raise TestCase.skipException(reason)
-        else:
-            def skip_wrapper(test_item):
-                test_item.skip(reason)
-        return skip_wrapper
-    return decorator
-
-
-def skipIf(condition, reason):
-    """Skip a test if the condition is true."""
-    if condition:
-        return skip(reason)
-    def _id(obj):
-        return obj
-    return _id
-
-
-def skipUnless(condition, reason):
-    """Skip a test unless the condition is true."""
-    if not condition:
-        return skip(reason)
-    def _id(obj):
-        return obj
-    return _id
-
-
-class ExpectedException:
-    """A context manager to handle expected exceptions.
-
-    In Python 2.5 or later::
-
-      def test_foo(self):
-          with ExpectedException(ValueError, 'fo.*'):
-              raise ValueError('foo')
-
-    will pass.  If the raised exception has a type other than the specified
-    type, it will be re-raised.  If it has a 'str()' that does not match the
-    given regular expression, an AssertionError will be raised.  If no
-    exception is raised, an AssertionError will be raised.
-    """
-
-    def __init__(self, exc_type, value_re):
-        """Construct an `ExpectedException`.
-
-        :param exc_type: The type of exception to expect.
-        :param value_re: A regular expression to match against the
-            'str()' of the raised exception.
-        """
-        self.exc_type = exc_type
-        self.value_re = value_re
-
-    def __enter__(self):
-        pass
-
-    def __exit__(self, exc_type, exc_value, traceback):
-        if exc_type is None:
-            raise AssertionError('%s not raised.' % self.exc_type.__name__)
-        if exc_type != self.exc_type:
-            return False
-        if not re.match(self.value_re, str(exc_value)):
-            raise AssertionError('"%s" does not match "%s".' %
-                                 (str(exc_value), self.value_re))
-        return True

=== removed directory 'python-for-subunit2junitxml/testtools/testresult'
=== removed file 'python-for-subunit2junitxml/testtools/testresult/__init__.py'
--- python-for-subunit2junitxml/testtools/testresult/__init__.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/testresult/__init__.py	1970-01-01 00:00:00 +0000
@@ -1,19 +0,0 @@
-# Copyright (c) 2009 testtools developers. See LICENSE for details.
-
-"""Test result objects."""
-
-__all__ = [
-    'ExtendedToOriginalDecorator',
-    'MultiTestResult',
-    'TestResult',
-    'TextTestResult',
-    'ThreadsafeForwardingResult',
-    ]
-
-from testtools.testresult.real import (
-    ExtendedToOriginalDecorator,
-    MultiTestResult,
-    TestResult,
-    TextTestResult,
-    ThreadsafeForwardingResult,
-    )

=== removed file 'python-for-subunit2junitxml/testtools/testresult/doubles.py'
--- python-for-subunit2junitxml/testtools/testresult/doubles.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/testresult/doubles.py	1970-01-01 00:00:00 +0000
@@ -1,111 +0,0 @@
-# Copyright (c) 2009-2010 testtools developers. See LICENSE for details.
-
-"""Doubles of test result objects, useful for testing unittest code."""
-
-__all__ = [
-    'Python26TestResult',
-    'Python27TestResult',
-    'ExtendedTestResult',
-    ]
-
-
-class LoggingBase(object):
-    """Basic support for logging of results."""
-
-    def __init__(self):
-        self._events = []
-        self.shouldStop = False
-        self._was_successful = True
-
-
-class Python26TestResult(LoggingBase):
-    """A precisely python 2.6 like test result, that logs."""
-
-    def addError(self, test, err):
-        self._was_successful = False
-        self._events.append(('addError', test, err))
-
-    def addFailure(self, test, err):
-        self._was_successful = False
-        self._events.append(('addFailure', test, err))
-
-    def addSuccess(self, test):
-        self._events.append(('addSuccess', test))
-
-    def startTest(self, test):
-        self._events.append(('startTest', test))
-
-    def stop(self):
-        self.shouldStop = True
-
-    def stopTest(self, test):
-        self._events.append(('stopTest', test))
-
-    def wasSuccessful(self):
-        return self._was_successful
-
-
-class Python27TestResult(Python26TestResult):
-    """A precisely python 2.7 like test result, that logs."""
-
-    def addExpectedFailure(self, test, err):
-        self._events.append(('addExpectedFailure', test, err))
-
-    def addSkip(self, test, reason):
-        self._events.append(('addSkip', test, reason))
-
-    def addUnexpectedSuccess(self, test):
-        self._events.append(('addUnexpectedSuccess', test))
-
-    def startTestRun(self):
-        self._events.append(('startTestRun',))
-
-    def stopTestRun(self):
-        self._events.append(('stopTestRun',))
-
-
-class ExtendedTestResult(Python27TestResult):
-    """A test result like the proposed extended unittest result API."""
-
-    def addError(self, test, err=None, details=None):
-        self._was_successful = False
-        self._events.append(('addError', test, err or details))
-
-    def addFailure(self, test, err=None, details=None):
-        self._was_successful = False
-        self._events.append(('addFailure', test, err or details))
-
-    def addExpectedFailure(self, test, err=None, details=None):
-        self._events.append(('addExpectedFailure', test, err or details))
-
-    def addSkip(self, test, reason=None, details=None):
-        self._events.append(('addSkip', test, reason or details))
-
-    def addSuccess(self, test, details=None):
-        if details:
-            self._events.append(('addSuccess', test, details))
-        else:
-            self._events.append(('addSuccess', test))
-
-    def addUnexpectedSuccess(self, test, details=None):
-        self._was_successful = False
-        if details is not None:
-            self._events.append(('addUnexpectedSuccess', test, details))
-        else:
-            self._events.append(('addUnexpectedSuccess', test))
-
-    def progress(self, offset, whence):
-        self._events.append(('progress', offset, whence))
-
-    def startTestRun(self):
-        super(ExtendedTestResult, self).startTestRun()
-        self._was_successful = True
-
-    def tags(self, new_tags, gone_tags):
-        self._events.append(('tags', new_tags, gone_tags))
-
-    def time(self, time):
-        self._events.append(('time', time))
-
-    def wasSuccessful(self):
-        return self._was_successful

=== removed file 'python-for-subunit2junitxml/testtools/testresult/real.py'
--- python-for-subunit2junitxml/testtools/testresult/real.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/testresult/real.py	1970-01-01 00:00:00 +0000
@@ -1,621 +0,0 @@
-# Copyright (c) 2008 testtools developers. See LICENSE for details.
-
-"""Test results and related things."""
-
-__metaclass__ = type
-__all__ = [
-    'ExtendedToOriginalDecorator',
-    'MultiTestResult',
-    'TestResult',
-    'ThreadsafeForwardingResult',
-    ]
-
-import datetime
-import sys
-import unittest
-
-from testtools.compat import all, _format_exc_info, str_is_unicode, _u
-
-# From http://docs.python.org/library/datetime.html
-_ZERO = datetime.timedelta(0)
-
-# A UTC class.
-
-class UTC(datetime.tzinfo):
-    """UTC"""
-
-    def utcoffset(self, dt):
-        return _ZERO
-
-    def tzname(self, dt):
-        return "UTC"
-
-    def dst(self, dt):
-        return _ZERO
-
-utc = UTC()
-
-
-class TestResult(unittest.TestResult):
-    """Subclass of unittest.TestResult extending the protocol for flexability.
-
-    This test result supports an experimental protocol for providing additional
-    data to in test outcomes. All the outcome methods take an optional dict
-    'details'. If supplied any other detail parameters like 'err' or 'reason'
-    should not be provided. The details dict is a mapping from names to
-    MIME content objects (see testtools.content). This permits attaching
-    tracebacks, log files, or even large objects like databases that were
-    part of the test fixture. Until this API is accepted into upstream
-    Python it is considered experimental: it may be replaced at any point
-    by a newer version more in line with upstream Python. Compatibility would
-    be aimed for in this case, but may not be possible.
-
-    :ivar skip_reasons: A dict of skip-reasons -> list of tests. See addSkip.
-    """
-
-    def __init__(self):
-        # startTestRun resets all attributes, and older clients don't know to
-        # call startTestRun, so it is called once here.
-        # Because subclasses may reasonably not expect this, we call the 
-        # specific version we want to run.
-        TestResult.startTestRun(self)
-
-    def addExpectedFailure(self, test, err=None, details=None):
-        """Called when a test has failed in an expected manner.
-
-        Like with addSuccess and addError, testStopped should still be called.
-
-        :param test: The test that has been skipped.
-        :param err: The exc_info of the error that was raised.
-        :return: None
-        """
-        # This is the python 2.7 implementation
-        self.expectedFailures.append(
-            (test, self._err_details_to_string(test, err, details)))
-
-    def addError(self, test, err=None, details=None):
-        """Called when an error has occurred. 'err' is a tuple of values as
-        returned by sys.exc_info().
-
-        :param details: Alternative way to supply details about the outcome.
-            see the class docstring for more information.
-        """
-        self.errors.append((test,
-            self._err_details_to_string(test, err, details)))
-
-    def addFailure(self, test, err=None, details=None):
-        """Called when an error has occurred. 'err' is a tuple of values as
-        returned by sys.exc_info().
-
-        :param details: Alternative way to supply details about the outcome.
-            see the class docstring for more information.
-        """
-        self.failures.append((test,
-            self._err_details_to_string(test, err, details)))
-
-    def addSkip(self, test, reason=None, details=None):
-        """Called when a test has been skipped rather than running.
-
-        Like with addSuccess and addError, testStopped should still be called.
-
-        This must be called by the TestCase. 'addError' and 'addFailure' will
-        not call addSkip, since they have no assumptions about the kind of
-        errors that a test can raise.
-
-        :param test: The test that has been skipped.
-        :param reason: The reason for the test being skipped. For instance,
-            u"pyGL is not available".
-        :param details: Alternative way to supply details about the outcome.
-            see the class docstring for more information.
-        :return: None
-        """
-        if reason is None:
-            reason = details.get('reason')
-            if reason is None:
-                reason = 'No reason given'
-            else:
-                reason = ''.join(reason.iter_text())
-        skip_list = self.skip_reasons.setdefault(reason, [])
-        skip_list.append(test)
-
-    def addSuccess(self, test, details=None):
-        """Called when a test succeeded."""
-
-    def addUnexpectedSuccess(self, test, details=None):
-        """Called when a test was expected to fail, but succeed."""
-        self.unexpectedSuccesses.append(test)
-
-    def wasSuccessful(self):
-        """Has this result been successful so far?
-
-        If there have been any errors, failures or unexpected successes,
-        return False.  Otherwise, return True.
-
-        Note: This differs from standard unittest in that we consider
-        unexpected successes to be equivalent to failures, rather than
-        successes.
-        """
-        return not (self.errors or self.failures or self.unexpectedSuccesses)
-
-    if str_is_unicode:
-        # Python 3 and IronPython strings are unicode, use parent class method
-        _exc_info_to_unicode = unittest.TestResult._exc_info_to_string
-    else:
-        # For Python 2, need to decode components of traceback according to
-        # their source, so can't use traceback.format_exception
-        # Here follows a little deep magic to copy the existing method and
-        # replace the formatter with one that returns unicode instead
-        from types import FunctionType as __F, ModuleType as __M
-        __f = unittest.TestResult._exc_info_to_string.im_func
-        __g = dict(__f.func_globals)
-        __m = __M("__fake_traceback")
-        __m.format_exception = _format_exc_info
-        __g["traceback"] = __m
-        _exc_info_to_unicode = __F(__f.func_code, __g, "_exc_info_to_unicode")
-        del __F, __M, __f, __g, __m
-
-    def _err_details_to_string(self, test, err=None, details=None):
-        """Convert an error in exc_info form or a contents dict to a string."""
-        if err is not None:
-            return self._exc_info_to_unicode(err, test)
-        return _details_to_str(details)
-
-    def _now(self):
-        """Return the current 'test time'.
-
-        If the time() method has not been called, this is equivalent to
-        datetime.now(), otherwise its the last supplied datestamp given to the
-        time() method.
-        """
-        if self.__now is None:
-            return datetime.datetime.now(utc)
-        else:
-            return self.__now
-
-    def startTestRun(self):
-        """Called before a test run starts.
-
-        New in Python 2.7. The testtools version resets the result to a
-        pristine condition ready for use in another test run.  Note that this
-        is different from Python 2.7's startTestRun, which does nothing.
-        """
-        super(TestResult, self).__init__()
-        self.skip_reasons = {}
-        self.__now = None
-        # -- Start: As per python 2.7 --
-        self.expectedFailures = []
-        self.unexpectedSuccesses = []
-        # -- End:   As per python 2.7 --
-
-    def stopTestRun(self):
-        """Called after a test run completes
-
-        New in python 2.7
-        """
-
-    def time(self, a_datetime):
-        """Provide a timestamp to represent the current time.
-
-        This is useful when test activity is time delayed, or happening
-        concurrently and getting the system time between API calls will not
-        accurately represent the duration of tests (or the whole run).
-
-        Calling time() sets the datetime used by the TestResult object.
-        Time is permitted to go backwards when using this call.
-
-        :param a_datetime: A datetime.datetime object with TZ information or
-            None to reset the TestResult to gathering time from the system.
-        """
-        self.__now = a_datetime
-
-    def done(self):
-        """Called when the test runner is done.
-
-        deprecated in favour of stopTestRun.
-        """
-
-
-class MultiTestResult(TestResult):
-    """A test result that dispatches to many test results."""
-
-    def __init__(self, *results):
-        TestResult.__init__(self)
-        self._results = list(map(ExtendedToOriginalDecorator, results))
-
-    def _dispatch(self, message, *args, **kwargs):
-        return tuple(
-            getattr(result, message)(*args, **kwargs)
-            for result in self._results)
-
-    def startTest(self, test):
-        return self._dispatch('startTest', test)
-
-    def stopTest(self, test):
-        return self._dispatch('stopTest', test)
-
-    def addError(self, test, error=None, details=None):
-        return self._dispatch('addError', test, error, details=details)
-
-    def addExpectedFailure(self, test, err=None, details=None):
-        return self._dispatch(
-            'addExpectedFailure', test, err, details=details)
-
-    def addFailure(self, test, err=None, details=None):
-        return self._dispatch('addFailure', test, err, details=details)
-
-    def addSkip(self, test, reason=None, details=None):
-        return self._dispatch('addSkip', test, reason, details=details)
-
-    def addSuccess(self, test, details=None):
-        return self._dispatch('addSuccess', test, details=details)
-
-    def addUnexpectedSuccess(self, test, details=None):
-        return self._dispatch('addUnexpectedSuccess', test, details=details)
-
-    def startTestRun(self):
-        return self._dispatch('startTestRun')
-
-    def stopTestRun(self):
-        return self._dispatch('stopTestRun')
-
-    def time(self, a_datetime):
-        return self._dispatch('time', a_datetime)
-
-    def done(self):
-        return self._dispatch('done')
-
-    def wasSuccessful(self):
-        """Was this result successful?
-
-        Only returns True if every constituent result was successful.
-        """
-        return all(self._dispatch('wasSuccessful'))
-
-
-class TextTestResult(TestResult):
-    """A TestResult which outputs activity to a text stream."""
-
-    def __init__(self, stream):
-        """Construct a TextTestResult writing to stream."""
-        super(TextTestResult, self).__init__()
-        self.stream = stream
-        self.sep1 = '=' * 70 + '\n'
-        self.sep2 = '-' * 70 + '\n'
-
-    def _delta_to_float(self, a_timedelta):
-        return (a_timedelta.days * 86400.0 + a_timedelta.seconds +
-            a_timedelta.microseconds / 1000000.0)
-
-    def _show_list(self, label, error_list):
-        for test, output in error_list:
-            self.stream.write(self.sep1)
-            self.stream.write("%s: %s\n" % (label, test.id()))
-            self.stream.write(self.sep2)
-            self.stream.write(output)
-
-    def startTestRun(self):
-        super(TextTestResult, self).startTestRun()
-        self.__start = self._now()
-        self.stream.write("Tests running...\n")
-
-    def stopTestRun(self):
-        if self.testsRun != 1:
-            plural = 's'
-        else:
-            plural = ''
-        stop = self._now()
-        self._show_list('ERROR', self.errors)
-        self._show_list('FAIL', self.failures)
-        for test in self.unexpectedSuccesses:
-            self.stream.write(
-                "%sUNEXPECTED SUCCESS: %s\n%s" % (
-                    self.sep1, test.id(), self.sep2))
-        self.stream.write("Ran %d test%s in %.3fs\n\n" %
-            (self.testsRun, plural,
-             self._delta_to_float(stop - self.__start)))
-        if self.wasSuccessful():
-            self.stream.write("OK\n")
-        else:
-            self.stream.write("FAILED (")
-            details = []
-            details.append("failures=%d" % (
-                sum(map(len, (
-                    self.failures, self.errors, self.unexpectedSuccesses)))))
-            self.stream.write(", ".join(details))
-            self.stream.write(")\n")
-        super(TextTestResult, self).stopTestRun()
-
-
-class ThreadsafeForwardingResult(TestResult):
-    """A TestResult which ensures the target does not receive mixed up calls.
-
-    This is used when receiving test results from multiple sources, and batches
-    up all the activity for a single test into a thread-safe batch where all
-    other ThreadsafeForwardingResult objects sharing the same semaphore will be
-    locked out.
-
-    Typical use of ThreadsafeForwardingResult involves creating one
-    ThreadsafeForwardingResult per thread in a ConcurrentTestSuite. These
-    forward to the TestResult that the ConcurrentTestSuite run method was
-    called with.
-
-    target.done() is called once for each ThreadsafeForwardingResult that
-    forwards to the same target. If the target's done() takes special action,
-    care should be taken to accommodate this.
-    """
-
-    def __init__(self, target, semaphore):
-        """Create a ThreadsafeForwardingResult forwarding to target.
-
-        :param target: A TestResult.
-        :param semaphore: A threading.Semaphore with limit 1.
-        """
-        TestResult.__init__(self)
-        self.result = ExtendedToOriginalDecorator(target)
-        self.semaphore = semaphore
-
-    def _add_result_with_semaphore(self, method, test, *args, **kwargs):
-        self.semaphore.acquire()
-        try:
-            self.result.time(self._test_start)
-            self.result.startTest(test)
-            self.result.time(self._now())
-            try:
-                method(test, *args, **kwargs)
-            finally:
-                self.result.stopTest(test)
-        finally:
-            self.semaphore.release()
-
-    def addError(self, test, err=None, details=None):
-        self._add_result_with_semaphore(self.result.addError,
-            test, err, details=details)
-
-    def addExpectedFailure(self, test, err=None, details=None):
-        self._add_result_with_semaphore(self.result.addExpectedFailure,
-            test, err, details=details)
-
-    def addFailure(self, test, err=None, details=None):
-        self._add_result_with_semaphore(self.result.addFailure,
-            test, err, details=details)
-
-    def addSkip(self, test, reason=None, details=None):
-        self._add_result_with_semaphore(self.result.addSkip,
-            test, reason, details=details)
-
-    def addSuccess(self, test, details=None):
-        self._add_result_with_semaphore(self.result.addSuccess,
-            test, details=details)
-
-    def addUnexpectedSuccess(self, test, details=None):
-        self._add_result_with_semaphore(self.result.addUnexpectedSuccess,
-            test, details=details)
-
-    def startTestRun(self):
-        self.semaphore.acquire()
-        try:
-            self.result.startTestRun()
-        finally:
-            self.semaphore.release()
-
-    def stopTestRun(self):
-        self.semaphore.acquire()
-        try:
-            self.result.stopTestRun()
-        finally:
-            self.semaphore.release()
-
-    def done(self):
-        self.semaphore.acquire()
-        try:
-            self.result.done()
-        finally:
-            self.semaphore.release()
-
-    def startTest(self, test):
-        self._test_start = self._now()
-        super(ThreadsafeForwardingResult, self).startTest(test)
-
-    def wasSuccessful(self):
-        return self.result.wasSuccessful()
-
-
-class ExtendedToOriginalDecorator(object):
-    """Permit new TestResult API code to degrade gracefully with old results.
-
-    This decorates an existing TestResult and converts missing outcomes
-    such as addSkip to older outcomes such as addSuccess. It also supports
-    the extended details protocol. In all cases the most recent protocol
-    is attempted first, and fallbacks only occur when the decorated result
-    does not support the newer style of calling.
-    """
-
-    def __init__(self, decorated):
-        self.decorated = decorated
-
-    def __getattr__(self, name):
-        return getattr(self.decorated, name)
-
-    def addError(self, test, err=None, details=None):
-        self._check_args(err, details)
-        if details is not None:
-            try:
-                return self.decorated.addError(test, details=details)
-            except TypeError:
-                # have to convert
-                err = self._details_to_exc_info(details)
-        return self.decorated.addError(test, err)
-
-    def addExpectedFailure(self, test, err=None, details=None):
-        self._check_args(err, details)
-        addExpectedFailure = getattr(
-            self.decorated, 'addExpectedFailure', None)
-        if addExpectedFailure is None:
-            return self.addSuccess(test)
-        if details is not None:
-            try:
-                return addExpectedFailure(test, details=details)
-            except TypeError:
-                # have to convert
-                err = self._details_to_exc_info(details)
-        return addExpectedFailure(test, err)
-
-    def addFailure(self, test, err=None, details=None):
-        self._check_args(err, details)
-        if details is not None:
-            try:
-                return self.decorated.addFailure(test, details=details)
-            except TypeError:
-                # have to convert
-                err = self._details_to_exc_info(details)
-        return self.decorated.addFailure(test, err)
-
-    def addSkip(self, test, reason=None, details=None):
-        self._check_args(reason, details)
-        addSkip = getattr(self.decorated, 'addSkip', None)
-        if addSkip is None:
-            return self.decorated.addSuccess(test)
-        if details is not None:
-            try:
-                return addSkip(test, details=details)
-            except TypeError:
-                # extract the reason if it's available
-                try:
-                    reason = ''.join(details['reason'].iter_text())
-                except KeyError:
-                    reason = _details_to_str(details)
-        return addSkip(test, reason)
-
-    def addUnexpectedSuccess(self, test, details=None):
-        outcome = getattr(self.decorated, 'addUnexpectedSuccess', None)
-        if outcome is None:
-            try:
-                test.fail("")
-            except test.failureException:
-                return self.addFailure(test, sys.exc_info())
-        if details is not None:
-            try:
-                return outcome(test, details=details)
-            except TypeError:
-                pass
-        return outcome(test)
-
-    def addSuccess(self, test, details=None):
-        if details is not None:
-            try:
-                return self.decorated.addSuccess(test, details=details)
-            except TypeError:
-                pass
-        return self.decorated.addSuccess(test)
-
-    def _check_args(self, err, details):
-        param_count = 0
-        if err is not None:
-            param_count += 1
-        if details is not None:
-            param_count += 1
-        if param_count != 1:
-            raise ValueError("Must pass only one of err '%s' and details '%s"
-                % (err, details))
-
-    def _details_to_exc_info(self, details):
-        """Convert a details dict to an exc_info tuple."""
-        return (_StringException,
-            _StringException(_details_to_str(details)), None)
-
-    def done(self):
-        try:
-            return self.decorated.done()
-        except AttributeError:
-            return
-
-    def progress(self, offset, whence):
-        method = getattr(self.decorated, 'progress', None)
-        if method is None:
-            return
-        return method(offset, whence)
-
-    @property
-    def shouldStop(self):
-        return self.decorated.shouldStop
-
-    def startTest(self, test):
-        return self.decorated.startTest(test)
-
-    def startTestRun(self):
-        try:
-            return self.decorated.startTestRun()
-        except AttributeError:
-            return
-
-    def stop(self):
-        return self.decorated.stop()
-
-    def stopTest(self, test):
-        return self.decorated.stopTest(test)
-
-    def stopTestRun(self):
-        try:
-            return self.decorated.stopTestRun()
-        except AttributeError:
-            return
-
-    def tags(self, new_tags, gone_tags):
-        method = getattr(self.decorated, 'tags', None)
-        if method is None:
-            return
-        return method(new_tags, gone_tags)
-
-    def time(self, a_datetime):
-        method = getattr(self.decorated, 'time', None)
-        if method is None:
-            return
-        return method(a_datetime)
-
-    def wasSuccessful(self):
-        return self.decorated.wasSuccessful()
-
-
-class _StringException(Exception):
-    """An exception made from an arbitrary string."""
-
-    if not str_is_unicode:
-        def __init__(self, string):
-            if type(string) is not unicode:
-                raise TypeError("_StringException expects unicode, got %r" %
-                    (string,))
-            Exception.__init__(self, string)
-
-        def __str__(self):
-            return self.args[0].encode("utf-8")
-
-        def __unicode__(self):
-            return self.args[0]
-    # For 3.0 and above the default __str__ is fine, so we don't define one.
-
-    def __hash__(self):
-        return id(self)
-
-    def __eq__(self, other):
-        try:
-            return self.args == other.args
-        except AttributeError:
-            return False
-
-
-def _details_to_str(details):
-    """Convert a details dict to a string."""
-    chars = []
-    # sorted is for testing, may want to remove that and use a dict
-    # subclass with defined order for items instead.
-    for key, content in sorted(details.items()):
-        if content.content_type.type != 'text':
-            chars.append('Binary content: %s\n' % key)
-            continue
-        chars.append('Text attachment: %s\n' % key)
-        chars.append('------------\n')
-        chars.extend(content.iter_text())
-        if not chars[-1].endswith('\n'):
-            chars.append('\n')
-        chars.append('------------\n')
-    return _u('').join(chars)

=== removed directory 'python-for-subunit2junitxml/testtools/tests'
=== removed file 'python-for-subunit2junitxml/testtools/tests/__init__.py'
--- python-for-subunit2junitxml/testtools/tests/__init__.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/__init__.py	1970-01-01 00:00:00 +0000
@@ -1,44 +0,0 @@
-"""Tests for testtools itself."""
-
-# See README for copyright and licensing details.
-
-import unittest
-
-
-def test_suite():
-    from testtools.tests import (
-        test_compat,
-        test_content,
-        test_content_type,
-        test_deferredruntest,
-        test_distutilscmd,
-        test_fixturesupport,
-        test_helpers,
-        test_matchers,
-        test_monkey,
-        test_run,
-        test_runtest,
-        test_spinner,
-        test_testresult,
-        test_testsuite,
-        test_testtools,
-        )
-    modules = [
-        test_compat,
-        test_content,
-        test_content_type,
-        test_deferredruntest,
-        test_distutilscmd,
-        test_fixturesupport,
-        test_helpers,
-        test_matchers,
-        test_monkey,
-        test_run,
-        test_runtest,
-        test_spinner,
-        test_testresult,
-        test_testsuite,
-        test_testtools,
-        ]
-    suites = map(lambda x: x.test_suite(), modules)
-    return unittest.TestSuite(suites)

=== removed file 'python-for-subunit2junitxml/testtools/tests/helpers.py'
--- python-for-subunit2junitxml/testtools/tests/helpers.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/helpers.py	1970-01-01 00:00:00 +0000
@@ -1,72 +0,0 @@
-# Copyright (c) 2008 testtools developers. See LICENSE for details.
-
-"""Helpers for tests."""
-
-import sys
-
-__metaclass__ = type
-__all__ = [
-    'LoggingResult',
-    ]
-
-from testtools import TestResult
-
-
-# GZ 2010-08-12: Don't do this, pointlessly creates an exc_info cycle
-try:
-    raise Exception
-except Exception:
-    an_exc_info = sys.exc_info()
-
-# Deprecated: This classes attributes are somewhat non deterministic which
-# leads to hard to predict tests (because Python upstream are changing things.
-class LoggingResult(TestResult):
-    """TestResult that logs its event to a list."""
-
-    def __init__(self, log):
-        self._events = log
-        super(LoggingResult, self).__init__()
-
-    def startTest(self, test):
-        self._events.append(('startTest', test))
-        super(LoggingResult, self).startTest(test)
-
-    def stopTest(self, test):
-        self._events.append(('stopTest', test))
-        super(LoggingResult, self).stopTest(test)
-
-    def addFailure(self, test, error):
-        self._events.append(('addFailure', test, error))
-        super(LoggingResult, self).addFailure(test, error)
-
-    def addError(self, test, error):
-        self._events.append(('addError', test, error))
-        super(LoggingResult, self).addError(test, error)
-
-    def addSkip(self, test, reason):
-        self._events.append(('addSkip', test, reason))
-        super(LoggingResult, self).addSkip(test, reason)
-
-    def addSuccess(self, test):
-        self._events.append(('addSuccess', test))
-        super(LoggingResult, self).addSuccess(test)
-
-    def startTestRun(self):
-        self._events.append('startTestRun')
-        super(LoggingResult, self).startTestRun()
-
-    def stopTestRun(self):
-        self._events.append('stopTestRun')
-        super(LoggingResult, self).stopTestRun()
-
-    def done(self):
-        self._events.append('done')
-        super(LoggingResult, self).done()
-
-    def time(self, a_datetime):
-        self._events.append(('time', a_datetime))
-        super(LoggingResult, self).time(a_datetime)
-
-# Note, the following three classes are different to LoggingResult by
-# being fully defined exact matches rather than supersets.
-from testtools.testresult.doubles import *

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_compat.py'
--- python-for-subunit2junitxml/testtools/tests/test_compat.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_compat.py	1970-01-01 00:00:00 +0000
@@ -1,257 +0,0 @@
-# Copyright (c) 2010 testtools developers. See LICENSE for details.
-
-"""Tests for miscellaneous compatibility functions"""
-
-import linecache
-import os
-import sys
-import tempfile
-import traceback
-
-import testtools
-
-from testtools.compat import (
-    _b,
-    _detect_encoding,
-    _get_source_encoding,
-    _u,
-    unicode_output_stream,
-    )
-from testtools.matchers import (
-    MatchesException,
-    Not,
-    Raises,
-    )
-
-
-class TestDetectEncoding(testtools.TestCase):
-    """Test detection of Python source encodings"""
-
-    def _check_encoding(self, expected, lines, possibly_invalid=False):
-        """Check lines are valid Python and encoding is as expected"""
-        if not possibly_invalid:
-            compile(_b("".join(lines)), "<str>", "exec")
-        encoding = _detect_encoding(lines)
-        self.assertEqual(expected, encoding,
-            "Encoding %r expected but got %r from lines %r" %
-                (expected, encoding, lines))
-
-    def test_examples_from_pep(self):
-        """Check the examples given in PEP 263 all work as specified
-
-        See 'Examples' section of <http://www.python.org/dev/peps/pep-0263/>
-        """
-        # With interpreter binary and using Emacs style file encoding comment:
-        self._check_encoding("latin-1", (
-            "#!/usr/bin/python\n",
-            "# -*- coding: latin-1 -*-\n",
-            "import os, sys\n"))
-        self._check_encoding("iso-8859-15", (
-            "#!/usr/bin/python\n",
-            "# -*- coding: iso-8859-15 -*-\n",
-            "import os, sys\n"))
-        self._check_encoding("ascii", (
-            "#!/usr/bin/python\n",
-            "# -*- coding: ascii -*-\n",
-            "import os, sys\n"))
-        # Without interpreter line, using plain text:
-        self._check_encoding("utf-8", (
-            "# This Python file uses the following encoding: utf-8\n",
-            "import os, sys\n"))
-        # Text editors might have different ways of defining the file's
-        # encoding, e.g.
-        self._check_encoding("latin-1", (
-            "#!/usr/local/bin/python\n",
-            "# coding: latin-1\n",
-            "import os, sys\n"))
-        # Without encoding comment, Python's parser will assume ASCII text:
-        self._check_encoding("ascii", (
-            "#!/usr/local/bin/python\n",
-            "import os, sys\n"))
-        # Encoding comments which don't work:
-        #   Missing "coding:" prefix:
-        self._check_encoding("ascii", (
-            "#!/usr/local/bin/python\n",
-            "# latin-1\n",
-            "import os, sys\n"))
-        #   Encoding comment not on line 1 or 2:
-        self._check_encoding("ascii", (
-            "#!/usr/local/bin/python\n",
-            "#\n",
-            "# -*- coding: latin-1 -*-\n",
-            "import os, sys\n"))
-        #   Unsupported encoding:
-        self._check_encoding("ascii", (
-            "#!/usr/local/bin/python\n",
-            "# -*- coding: utf-42 -*-\n",
-            "import os, sys\n"),
-            possibly_invalid=True)
-
-    def test_bom(self):
-        """Test the UTF-8 BOM counts as an encoding declaration"""
-        self._check_encoding("utf-8", (
-            "\xef\xbb\xbfimport sys\n",
-            ))
-        self._check_encoding("utf-8", (
-            "\xef\xbb\xbf# File encoding: UTF-8\n",
-            ))
-        self._check_encoding("utf-8", (
-            '\xef\xbb\xbf"""Module docstring\n',
-            '\xef\xbb\xbfThat should just be a ZWNB"""\n'))
-        self._check_encoding("latin-1", (
-            '"""Is this coding: latin-1 or coding: utf-8 instead?\n',
-            '\xef\xbb\xbfThose should be latin-1 bytes"""\n'))
-        self._check_encoding("utf-8", (
-            "\xef\xbb\xbf# Is the coding: utf-8 or coding: euc-jp instead?\n",
-            '"""Module docstring say \xe2\x98\x86"""\n'))
-
-    def test_multiple_coding_comments(self):
-        """Test only the first of multiple coding declarations counts"""
-        self._check_encoding("iso-8859-1", (
-            "# Is the coding: iso-8859-1\n",
-            "# Or is it coding: iso-8859-2\n"),
-            possibly_invalid=True)
-        self._check_encoding("iso-8859-1", (
-            "#!/usr/bin/python\n",
-            "# Is the coding: iso-8859-1\n",
-            "# Or is it coding: iso-8859-2\n"))
-        self._check_encoding("iso-8859-1", (
-            "# Is the coding: iso-8859-1 or coding: iso-8859-2\n",
-            "# Or coding: iso-8859-3 or coding: iso-8859-4\n"),
-            possibly_invalid=True)
-        self._check_encoding("iso-8859-2", (
-            "# Is the coding iso-8859-1 or coding: iso-8859-2\n",
-            "# Spot the missing colon above\n"))
-
-
-class TestGetSourceEncoding(testtools.TestCase):
-    """Test reading and caching the encodings of source files"""
-
-    def setUp(self):
-        testtools.TestCase.setUp(self)
-        dir = tempfile.mkdtemp()
-        self.addCleanup(os.rmdir, dir)
-        self.filename = os.path.join(dir, self.id().rsplit(".", 1)[1] + ".py")
-        self._written = False
-
-    def put_source(self, text):
-        f = open(self.filename, "w")
-        try:
-            f.write(text)
-        finally:
-            f.close()
-            if not self._written:
-                self._written = True
-                self.addCleanup(os.remove, self.filename)
-                self.addCleanup(linecache.cache.pop, self.filename, None)
-
-    def test_nonexistant_file_as_ascii(self):
-        """When file can't be found, the encoding should default to ascii"""
-        self.assertEquals("ascii", _get_source_encoding(self.filename))
-
-    def test_encoding_is_cached(self):
-        """The encoding should stay the same if the cache isn't invalidated"""
-        self.put_source(
-            "# coding: iso-8859-13\n"
-            "import os\n")
-        self.assertEquals("iso-8859-13", _get_source_encoding(self.filename))
-        self.put_source(
-            "# coding: rot-13\n"
-            "vzcbeg bf\n")
-        self.assertEquals("iso-8859-13", _get_source_encoding(self.filename))
-
-    def test_traceback_rechecks_encoding(self):
-        """A traceback function checks the cache and resets the encoding"""
-        self.put_source(
-            "# coding: iso-8859-8\n"
-            "import os\n")
-        self.assertEquals("iso-8859-8", _get_source_encoding(self.filename))
-        self.put_source(
-            "# coding: utf-8\n"
-            "import os\n")
-        try:
-            exec (compile("raise RuntimeError\n", self.filename, "exec"))
-        except RuntimeError:
-            traceback.extract_tb(sys.exc_info()[2])
-        else:
-            self.fail("RuntimeError not raised")
-        self.assertEquals("utf-8", _get_source_encoding(self.filename))
-
-
-class _FakeOutputStream(object):
-    """A simple file-like object for testing"""
-
-    def __init__(self):
-        self.writelog = []
-
-    def write(self, obj):
-        self.writelog.append(obj)
-
-
-class TestUnicodeOutputStream(testtools.TestCase):
-    """Test wrapping output streams so they work with arbitrary unicode"""
-
-    uni = _u("pa\u026a\u03b8\u0259n")
-
-    def setUp(self):
-        super(TestUnicodeOutputStream, self).setUp()
-        if sys.platform == "cli":
-            self.skip("IronPython shouldn't wrap streams to do encoding")
-
-    def test_no_encoding_becomes_ascii(self):
-        """A stream with no encoding attribute gets ascii/replace strings"""
-        sout = _FakeOutputStream()
-        unicode_output_stream(sout).write(self.uni)
-        self.assertEqual([_b("pa???n")], sout.writelog)
-
-    def test_encoding_as_none_becomes_ascii(self):
-        """A stream with encoding value of None gets ascii/replace strings"""
-        sout = _FakeOutputStream()
-        sout.encoding = None
-        unicode_output_stream(sout).write(self.uni)
-        self.assertEqual([_b("pa???n")], sout.writelog)
-
-    def test_bogus_encoding_becomes_ascii(self):
-        """A stream with a bogus encoding gets ascii/replace strings"""
-        sout = _FakeOutputStream()
-        sout.encoding = "bogus"
-        unicode_output_stream(sout).write(self.uni)
-        self.assertEqual([_b("pa???n")], sout.writelog)
-
-    def test_partial_encoding_replace(self):
-        """A string which can be partly encoded correctly should be"""
-        sout = _FakeOutputStream()
-        sout.encoding = "iso-8859-7"
-        unicode_output_stream(sout).write(self.uni)
-        self.assertEqual([_b("pa?\xe8?n")], sout.writelog)
-
-    def test_unicode_encodings_not_wrapped(self):
-        """A unicode encoding is left unwrapped as needs no error handler"""
-        sout = _FakeOutputStream()
-        sout.encoding = "utf-8"
-        self.assertIs(unicode_output_stream(sout), sout)
-        sout = _FakeOutputStream()
-        sout.encoding = "utf-16-be"
-        self.assertIs(unicode_output_stream(sout), sout)
-
-    def test_stringio(self):
-        """A StringIO object should maybe get an ascii native str type"""
-        try:
-            from cStringIO import StringIO
-            newio = False
-        except ImportError:
-            from io import StringIO
-            newio = True
-        sout = StringIO()
-        soutwrapper = unicode_output_stream(sout)
-        if newio:
-            self.expectFailure("Python 3 StringIO expects text not bytes",
-                self.assertThat, lambda: soutwrapper.write(self.uni),
-                Not(Raises(MatchesException(TypeError))))
-        soutwrapper.write(self.uni)
-        self.assertEqual("pa???n", sout.getvalue())
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_content.py'
--- python-for-subunit2junitxml/testtools/tests/test_content.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_content.py	1970-01-01 00:00:00 +0000
@@ -1,223 +0,0 @@
-# Copyright (c) 2008-2011 testtools developers. See LICENSE for details.
-
-import os
-import tempfile
-import unittest
-
-from testtools import TestCase
-from testtools.compat import (
-    _b,
-    _u,
-    StringIO,
-    )
-from testtools.content import (
-    attach_file,
-    Content,
-    content_from_file,
-    content_from_stream,
-    TracebackContent,
-    text_content,
-    )
-from testtools.content_type import (
-    ContentType,
-    UTF8_TEXT,
-    )
-from testtools.matchers import (
-    Equals,
-    MatchesException,
-    Raises,
-    raises,
-    )
-from testtools.tests.helpers import an_exc_info
-
-
-raises_value_error = Raises(MatchesException(ValueError))
-
-
-class TestContent(TestCase):
-
-    def test___init___None_errors(self):
-        self.assertThat(lambda: Content(None, None), raises_value_error)
-        self.assertThat(
-            lambda: Content(None, lambda: ["traceback"]), raises_value_error)
-        self.assertThat(
-            lambda: Content(ContentType("text", "traceback"), None),
-            raises_value_error)
-
-    def test___init___sets_ivars(self):
-        content_type = ContentType("foo", "bar")
-        content = Content(content_type, lambda: ["bytes"])
-        self.assertEqual(content_type, content.content_type)
-        self.assertEqual(["bytes"], list(content.iter_bytes()))
-
-    def test___eq__(self):
-        content_type = ContentType("foo", "bar")
-        one_chunk = lambda: [_b("bytes")]
-        two_chunk = lambda: [_b("by"), _b("tes")]
-        content1 = Content(content_type, one_chunk)
-        content2 = Content(content_type, one_chunk)
-        content3 = Content(content_type, two_chunk)
-        content4 = Content(content_type, lambda: [_b("by"), _b("te")])
-        content5 = Content(ContentType("f", "b"), two_chunk)
-        self.assertEqual(content1, content2)
-        self.assertEqual(content1, content3)
-        self.assertNotEqual(content1, content4)
-        self.assertNotEqual(content1, content5)
-
-    def test___repr__(self):
-        content = Content(ContentType("application", "octet-stream"),
-            lambda: [_b("\x00bin"), _b("ary\xff")])
-        self.assertIn("\\x00binary\\xff", repr(content))
-
-    def test_iter_text_not_text_errors(self):
-        content_type = ContentType("foo", "bar")
-        content = Content(content_type, lambda: ["bytes"])
-        self.assertThat(content.iter_text, raises_value_error)
-
-    def test_iter_text_decodes(self):
-        content_type = ContentType("text", "strange", {"charset": "utf8"})
-        content = Content(
-            content_type, lambda: [_u("bytes\xea").encode("utf8")])
-        self.assertEqual([_u("bytes\xea")], list(content.iter_text()))
-
-    def test_iter_text_default_charset_iso_8859_1(self):
-        content_type = ContentType("text", "strange")
-        text = _u("bytes\xea")
-        iso_version = text.encode("ISO-8859-1")
-        content = Content(content_type, lambda: [iso_version])
-        self.assertEqual([text], list(content.iter_text()))
-
-    def test_from_file(self):
-        fd, path = tempfile.mkstemp()
-        self.addCleanup(os.remove, path)
-        os.write(fd, 'some data')
-        os.close(fd)
-        content = content_from_file(path, UTF8_TEXT, chunk_size=2)
-        self.assertThat(
-            list(content.iter_bytes()), Equals(['so', 'me', ' d', 'at', 'a']))
-
-    def test_from_nonexistent_file(self):
-        directory = tempfile.mkdtemp()
-        nonexistent = os.path.join(directory, 'nonexistent-file')
-        content = content_from_file(nonexistent)
-        self.assertThat(content.iter_bytes, raises(IOError))
-
-    def test_from_file_default_type(self):
-        content = content_from_file('/nonexistent/path')
-        self.assertThat(content.content_type, Equals(UTF8_TEXT))
-
-    def test_from_file_eager_loading(self):
-        fd, path = tempfile.mkstemp()
-        os.write(fd, 'some data')
-        os.close(fd)
-        content = content_from_file(path, UTF8_TEXT, buffer_now=True)
-        os.remove(path)
-        self.assertThat(
-            _b('').join(content.iter_bytes()), Equals('some data'))
-
-    def test_from_stream(self):
-        data = StringIO('some data')
-        content = content_from_stream(data, UTF8_TEXT, chunk_size=2)
-        self.assertThat(
-            list(content.iter_bytes()), Equals(['so', 'me', ' d', 'at', 'a']))
-
-    def test_from_stream_default_type(self):
-        data = StringIO('some data')
-        content = content_from_stream(data)
-        self.assertThat(content.content_type, Equals(UTF8_TEXT))
-
-    def test_from_stream_eager_loading(self):
-        fd, path = tempfile.mkstemp()
-        self.addCleanup(os.remove, path)
-        os.write(fd, 'some data')
-        stream = open(path, 'rb')
-        content = content_from_stream(stream, UTF8_TEXT, buffer_now=True)
-        os.write(fd, 'more data')
-        os.close(fd)
-        self.assertThat(
-            _b('').join(content.iter_bytes()), Equals('some data'))
-
-    def test_from_text(self):
-        data = _u("some data")
-        expected = Content(UTF8_TEXT, lambda: [data.encode('utf8')])
-        self.assertEqual(expected, text_content(data))
-
-
-class TestTracebackContent(TestCase):
-
-    def test___init___None_errors(self):
-        self.assertThat(
-            lambda: TracebackContent(None, None), raises_value_error)
-
-    def test___init___sets_ivars(self):
-        content = TracebackContent(an_exc_info, self)
-        content_type = ContentType("text", "x-traceback",
-            {"language": "python", "charset": "utf8"})
-        self.assertEqual(content_type, content.content_type)
-        result = unittest.TestResult()
-        expected = result._exc_info_to_string(an_exc_info, self)
-        self.assertEqual(expected, ''.join(list(content.iter_text())))
-
-
-class TestAttachFile(TestCase):
-
-    def make_file(self, data):
-        fd, path = tempfile.mkstemp()
-        self.addCleanup(os.remove, path)
-        os.write(fd, data)
-        os.close(fd)
-        return path
-
-    def test_simple(self):
-        class SomeTest(TestCase):
-            def test_foo(self):
-                pass
-        test = SomeTest('test_foo')
-        data = 'some data'
-        path = self.make_file(data)
-        my_content = text_content(data)
-        attach_file(test, path, name='foo')
-        self.assertEqual({'foo': my_content}, test.getDetails())
-
-    def test_optional_name(self):
-        # If no name is provided, attach_file just uses the base name of the
-        # file.
-        class SomeTest(TestCase):
-            def test_foo(self):
-                pass
-        test = SomeTest('test_foo')
-        path = self.make_file('some data')
-        base_path = os.path.basename(path)
-        attach_file(test, path)
-        self.assertEqual([base_path], list(test.getDetails()))
-
-    def test_lazy_read(self):
-        class SomeTest(TestCase):
-            def test_foo(self):
-                pass
-        test = SomeTest('test_foo')
-        path = self.make_file('some data')
-        attach_file(test, path, name='foo', buffer_now=False)
-        content = test.getDetails()['foo']
-        content_file = open(path, 'w')
-        content_file.write('new data')
-        content_file.close()
-        self.assertEqual(''.join(content.iter_bytes()), 'new data')
-
-    def test_eager_read_by_default(self):
-        class SomeTest(TestCase):
-            def test_foo(self):
-                pass
-        test = SomeTest('test_foo')
-        path = self.make_file('some data')
-        attach_file(test, path, name='foo')
-        content = test.getDetails()['foo']
-        content_file = open(path, 'w')
-        content_file.write('new data')
-        content_file.close()
-        self.assertEqual(''.join(content.iter_bytes()), 'some data')
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_content_type.py'
--- python-for-subunit2junitxml/testtools/tests/test_content_type.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_content_type.py	1970-01-01 00:00:00 +0000
@@ -1,46 +0,0 @@
-# Copyright (c) 2008 testtools developers. See LICENSE for details.
-
-from testtools import TestCase
-from testtools.matchers import Equals, MatchesException, Raises
-from testtools.content_type import ContentType, UTF8_TEXT
-
-
-class TestContentType(TestCase):
-
-    def test___init___None_errors(self):
-        raises_value_error = Raises(MatchesException(ValueError))
-        self.assertThat(lambda:ContentType(None, None), raises_value_error)
-        self.assertThat(lambda:ContentType(None, "traceback"),
-            raises_value_error)
-        self.assertThat(lambda:ContentType("text", None), raises_value_error)
-
-    def test___init___sets_ivars(self):
-        content_type = ContentType("foo", "bar")
-        self.assertEqual("foo", content_type.type)
-        self.assertEqual("bar", content_type.subtype)
-        self.assertEqual({}, content_type.parameters)
-
-    def test___init___with_parameters(self):
-        content_type = ContentType("foo", "bar", {"quux": "thing"})
-        self.assertEqual({"quux": "thing"}, content_type.parameters)
-
-    def test___eq__(self):
-        content_type1 = ContentType("foo", "bar", {"quux": "thing"})
-        content_type2 = ContentType("foo", "bar", {"quux": "thing"})
-        content_type3 = ContentType("foo", "bar", {"quux": "thing2"})
-        self.assertTrue(content_type1.__eq__(content_type2))
-        self.assertFalse(content_type1.__eq__(content_type3))
-
-
-class TestBuiltinContentTypes(TestCase):
-
-    def test_plain_text(self):
-        # The UTF8_TEXT content type represents UTF-8 encoded text/plain.
-        self.assertThat(UTF8_TEXT.type, Equals('text'))
-        self.assertThat(UTF8_TEXT.subtype, Equals('plain'))
-        self.assertThat(UTF8_TEXT.parameters, Equals({'charset': 'utf8'}))
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_deferredruntest.py'
--- python-for-subunit2junitxml/testtools/tests/test_deferredruntest.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_deferredruntest.py	1970-01-01 00:00:00 +0000
@@ -1,738 +0,0 @@
-# Copyright (c) 2010 testtools developers. See LICENSE for details.
-
-"""Tests for the DeferredRunTest single test execution logic."""
-
-import os
-import signal
-
-from testtools import (
-    skipIf,
-    TestCase,
-    )
-from testtools.content import (
-    text_content,
-    )
-from testtools.helpers import try_import
-from testtools.tests.helpers import ExtendedTestResult
-from testtools.matchers import (
-    Equals,
-    KeysEqual,
-    MatchesException,
-    Raises,
-    )
-from testtools.runtest import RunTest
-from testtools.tests.test_spinner import NeedsTwistedTestCase
-
-assert_fails_with = try_import('testtools.deferredruntest.assert_fails_with')
-AsynchronousDeferredRunTest = try_import(
-    'testtools.deferredruntest.AsynchronousDeferredRunTest')
-flush_logged_errors = try_import(
-    'testtools.deferredruntest.flush_logged_errors')
-SynchronousDeferredRunTest = try_import(
-    'testtools.deferredruntest.SynchronousDeferredRunTest')
-
-defer = try_import('twisted.internet.defer')
-failure = try_import('twisted.python.failure')
-log = try_import('twisted.python.log')
-DelayedCall = try_import('twisted.internet.base.DelayedCall')
-
-
-class X(object):
-    """Tests that we run as part of our tests, nested to avoid discovery."""
-
-    class Base(TestCase):
-        def setUp(self):
-            super(X.Base, self).setUp()
-            self.calls = ['setUp']
-            self.addCleanup(self.calls.append, 'clean-up')
-        def test_something(self):
-            self.calls.append('test')
-        def tearDown(self):
-            self.calls.append('tearDown')
-            super(X.Base, self).tearDown()
-
-    class ErrorInSetup(Base):
-        expected_calls = ['setUp', 'clean-up']
-        expected_results = [('addError', RuntimeError)]
-        def setUp(self):
-            super(X.ErrorInSetup, self).setUp()
-            raise RuntimeError("Error in setUp")
-
-    class ErrorInTest(Base):
-        expected_calls = ['setUp', 'tearDown', 'clean-up']
-        expected_results = [('addError', RuntimeError)]
-        def test_something(self):
-            raise RuntimeError("Error in test")
-
-    class FailureInTest(Base):
-        expected_calls = ['setUp', 'tearDown', 'clean-up']
-        expected_results = [('addFailure', AssertionError)]
-        def test_something(self):
-            self.fail("test failed")
-
-    class ErrorInTearDown(Base):
-        expected_calls = ['setUp', 'test', 'clean-up']
-        expected_results = [('addError', RuntimeError)]
-        def tearDown(self):
-            raise RuntimeError("Error in tearDown")
-
-    class ErrorInCleanup(Base):
-        expected_calls = ['setUp', 'test', 'tearDown', 'clean-up']
-        expected_results = [('addError', ZeroDivisionError)]
-        def test_something(self):
-            self.calls.append('test')
-            self.addCleanup(lambda: 1/0)
-
-    class TestIntegration(NeedsTwistedTestCase):
-
-        def assertResultsMatch(self, test, result):
-            events = list(result._events)
-            self.assertEqual(('startTest', test), events.pop(0))
-            for expected_result in test.expected_results:
-                result = events.pop(0)
-                if len(expected_result) == 1:
-                    self.assertEqual((expected_result[0], test), result)
-                else:
-                    self.assertEqual((expected_result[0], test), result[:2])
-                    error_type = expected_result[1]
-                    self.assertIn(error_type.__name__, str(result[2]))
-            self.assertEqual([('stopTest', test)], events)
-
-        def test_runner(self):
-            result = ExtendedTestResult()
-            test = self.test_factory('test_something', runTest=self.runner)
-            test.run(result)
-            self.assertEqual(test.calls, self.test_factory.expected_calls)
-            self.assertResultsMatch(test, result)
-
-
-def make_integration_tests():
-    from unittest import TestSuite
-    from testtools import clone_test_with_new_id
-    runners = [
-        ('RunTest', RunTest),
-        ('SynchronousDeferredRunTest', SynchronousDeferredRunTest),
-        ('AsynchronousDeferredRunTest', AsynchronousDeferredRunTest),
-        ]
-
-    tests = [
-        X.ErrorInSetup,
-        X.ErrorInTest,
-        X.ErrorInTearDown,
-        X.FailureInTest,
-        X.ErrorInCleanup,
-        ]
-    base_test = X.TestIntegration('test_runner')
-    integration_tests = []
-    for runner_name, runner in runners:
-        for test in tests:
-            new_test = clone_test_with_new_id(
-                base_test, '%s(%s, %s)' % (
-                    base_test.id(),
-                    runner_name,
-                    test.__name__))
-            new_test.test_factory = test
-            new_test.runner = runner
-            integration_tests.append(new_test)
-    return TestSuite(integration_tests)
-
-
-class TestSynchronousDeferredRunTest(NeedsTwistedTestCase):
-
-    def make_result(self):
-        return ExtendedTestResult()
-
-    def make_runner(self, test):
-        return SynchronousDeferredRunTest(test, test.exception_handlers)
-
-    def test_success(self):
-        class SomeCase(TestCase):
-            def test_success(self):
-                return defer.succeed(None)
-        test = SomeCase('test_success')
-        runner = self.make_runner(test)
-        result = self.make_result()
-        runner.run(result)
-        self.assertThat(
-            result._events, Equals([
-                ('startTest', test),
-                ('addSuccess', test),
-                ('stopTest', test)]))
-
-    def test_failure(self):
-        class SomeCase(TestCase):
-            def test_failure(self):
-                return defer.maybeDeferred(self.fail, "Egads!")
-        test = SomeCase('test_failure')
-        runner = self.make_runner(test)
-        result = self.make_result()
-        runner.run(result)
-        self.assertThat(
-            [event[:2] for event in result._events], Equals([
-                ('startTest', test),
-                ('addFailure', test),
-                ('stopTest', test)]))
-
-    def test_setUp_followed_by_test(self):
-        class SomeCase(TestCase):
-            def setUp(self):
-                super(SomeCase, self).setUp()
-                return defer.succeed(None)
-            def test_failure(self):
-                return defer.maybeDeferred(self.fail, "Egads!")
-        test = SomeCase('test_failure')
-        runner = self.make_runner(test)
-        result = self.make_result()
-        runner.run(result)
-        self.assertThat(
-            [event[:2] for event in result._events], Equals([
-                ('startTest', test),
-                ('addFailure', test),
-                ('stopTest', test)]))
-
-
-class TestAsynchronousDeferredRunTest(NeedsTwistedTestCase):
-
-    def make_reactor(self):
-        from twisted.internet import reactor
-        return reactor
-
-    def make_result(self):
-        return ExtendedTestResult()
-
-    def make_runner(self, test, timeout=None):
-        if timeout is None:
-            timeout = self.make_timeout()
-        return AsynchronousDeferredRunTest(
-            test, test.exception_handlers, timeout=timeout)
-
-    def make_timeout(self):
-        return 0.005
-
-    def test_setUp_returns_deferred_that_fires_later(self):
-        # setUp can return a Deferred that might fire at any time.
-        # AsynchronousDeferredRunTest will not go on to running the test until
-        # the Deferred returned by setUp actually fires.
-        call_log = []
-        marker = object()
-        d = defer.Deferred().addCallback(call_log.append)
-        class SomeCase(TestCase):
-            def setUp(self):
-                super(SomeCase, self).setUp()
-                call_log.append('setUp')
-                return d
-            def test_something(self):
-                call_log.append('test')
-        def fire_deferred():
-            self.assertThat(call_log, Equals(['setUp']))
-            d.callback(marker)
-        test = SomeCase('test_something')
-        timeout = self.make_timeout()
-        runner = self.make_runner(test, timeout=timeout)
-        result = self.make_result()
-        reactor = self.make_reactor()
-        reactor.callLater(timeout, fire_deferred)
-        runner.run(result)
-        self.assertThat(call_log, Equals(['setUp', marker, 'test']))
-
-    def test_calls_setUp_test_tearDown_in_sequence(self):
-        # setUp, the test method and tearDown can all return
-        # Deferreds. AsynchronousDeferredRunTest will make sure that each of
-        # these are run in turn, only going on to the next stage once the
-        # Deferred from the previous stage has fired.
-        call_log = []
-        a = defer.Deferred()
-        a.addCallback(lambda x: call_log.append('a'))
-        b = defer.Deferred()
-        b.addCallback(lambda x: call_log.append('b'))
-        c = defer.Deferred()
-        c.addCallback(lambda x: call_log.append('c'))
-        class SomeCase(TestCase):
-            def setUp(self):
-                super(SomeCase, self).setUp()
-                call_log.append('setUp')
-                return a
-            def test_success(self):
-                call_log.append('test')
-                return b
-            def tearDown(self):
-                super(SomeCase, self).tearDown()
-                call_log.append('tearDown')
-                return c
-        test = SomeCase('test_success')
-        timeout = self.make_timeout()
-        runner = self.make_runner(test, timeout)
-        result = self.make_result()
-        reactor = self.make_reactor()
-        def fire_a():
-            self.assertThat(call_log, Equals(['setUp']))
-            a.callback(None)
-        def fire_b():
-            self.assertThat(call_log, Equals(['setUp', 'a', 'test']))
-            b.callback(None)
-        def fire_c():
-            self.assertThat(
-                call_log, Equals(['setUp', 'a', 'test', 'b', 'tearDown']))
-            c.callback(None)
-        reactor.callLater(timeout * 0.25, fire_a)
-        reactor.callLater(timeout * 0.5, fire_b)
-        reactor.callLater(timeout * 0.75, fire_c)
-        runner.run(result)
-        self.assertThat(
-            call_log, Equals(['setUp', 'a', 'test', 'b', 'tearDown', 'c']))
-
-    def test_async_cleanups(self):
-        # Cleanups added with addCleanup can return
-        # Deferreds. AsynchronousDeferredRunTest will run each of them in
-        # turn.
-        class SomeCase(TestCase):
-            def test_whatever(self):
-                pass
-        test = SomeCase('test_whatever')
-        call_log = []
-        a = defer.Deferred().addCallback(lambda x: call_log.append('a'))
-        b = defer.Deferred().addCallback(lambda x: call_log.append('b'))
-        c = defer.Deferred().addCallback(lambda x: call_log.append('c'))
-        test.addCleanup(lambda: a)
-        test.addCleanup(lambda: b)
-        test.addCleanup(lambda: c)
-        def fire_a():
-            self.assertThat(call_log, Equals([]))
-            a.callback(None)
-        def fire_b():
-            self.assertThat(call_log, Equals(['a']))
-            b.callback(None)
-        def fire_c():
-            self.assertThat(call_log, Equals(['a', 'b']))
-            c.callback(None)
-        timeout = self.make_timeout()
-        reactor = self.make_reactor()
-        reactor.callLater(timeout * 0.25, fire_a)
-        reactor.callLater(timeout * 0.5, fire_b)
-        reactor.callLater(timeout * 0.75, fire_c)
-        runner = self.make_runner(test, timeout)
-        result = self.make_result()
-        runner.run(result)
-        self.assertThat(call_log, Equals(['a', 'b', 'c']))
-
-    def test_clean_reactor(self):
-        # If there's cruft left over in the reactor, the test fails.
-        reactor = self.make_reactor()
-        timeout = self.make_timeout()
-        class SomeCase(TestCase):
-            def test_cruft(self):
-                reactor.callLater(timeout * 10.0, lambda: None)
-        test = SomeCase('test_cruft')
-        runner = self.make_runner(test, timeout)
-        result = self.make_result()
-        runner.run(result)
-        self.assertThat(
-            [event[:2] for event in result._events],
-            Equals(
-                [('startTest', test),
-                 ('addError', test),
-                 ('stopTest', test)]))
-        error = result._events[1][2]
-        self.assertThat(error, KeysEqual('traceback', 'twisted-log'))
-
-    def test_unhandled_error_from_deferred(self):
-        # If there's a Deferred with an unhandled error, the test fails.  Each
-        # unhandled error is reported with a separate traceback.
-        class SomeCase(TestCase):
-            def test_cruft(self):
-                # Note we aren't returning the Deferred so that the error will
-                # be unhandled.
-                defer.maybeDeferred(lambda: 1/0)
-                defer.maybeDeferred(lambda: 2/0)
-        test = SomeCase('test_cruft')
-        runner = self.make_runner(test)
-        result = self.make_result()
-        runner.run(result)
-        error = result._events[1][2]
-        result._events[1] = ('addError', test, None)
-        self.assertThat(result._events, Equals(
-            [('startTest', test),
-             ('addError', test, None),
-             ('stopTest', test)]))
-        self.assertThat(
-            error, KeysEqual(
-                'twisted-log',
-                'unhandled-error-in-deferred',
-                'unhandled-error-in-deferred-1',
-                ))
-
-    def test_unhandled_error_from_deferred_combined_with_error(self):
-        # If there's a Deferred with an unhandled error, the test fails.  Each
-        # unhandled error is reported with a separate traceback, and the error
-        # is still reported.
-        class SomeCase(TestCase):
-            def test_cruft(self):
-                # Note we aren't returning the Deferred so that the error will
-                # be unhandled.
-                defer.maybeDeferred(lambda: 1/0)
-                2 / 0
-        test = SomeCase('test_cruft')
-        runner = self.make_runner(test)
-        result = self.make_result()
-        runner.run(result)
-        error = result._events[1][2]
-        result._events[1] = ('addError', test, None)
-        self.assertThat(result._events, Equals(
-            [('startTest', test),
-             ('addError', test, None),
-             ('stopTest', test)]))
-        self.assertThat(
-            error, KeysEqual(
-                'traceback',
-                'twisted-log',
-                'unhandled-error-in-deferred',
-                ))
-
-    @skipIf(os.name != "posix", "Sending SIGINT with os.kill is posix only")
-    def test_keyboard_interrupt_stops_test_run(self):
-        # If we get a SIGINT during a test run, the test stops and no more
-        # tests run.
-        SIGINT = getattr(signal, 'SIGINT', None)
-        if not SIGINT:
-            raise self.skipTest("SIGINT unavailable")
-        class SomeCase(TestCase):
-            def test_pause(self):
-                return defer.Deferred()
-        test = SomeCase('test_pause')
-        reactor = self.make_reactor()
-        timeout = self.make_timeout()
-        runner = self.make_runner(test, timeout * 5)
-        result = self.make_result()
-        reactor.callLater(timeout, os.kill, os.getpid(), SIGINT)
-        self.assertThat(lambda:runner.run(result),
-            Raises(MatchesException(KeyboardInterrupt)))
-
-    @skipIf(os.name != "posix", "Sending SIGINT with os.kill is posix only")
-    def test_fast_keyboard_interrupt_stops_test_run(self):
-        # If we get a SIGINT during a test run, the test stops and no more
-        # tests run.
-        SIGINT = getattr(signal, 'SIGINT', None)
-        if not SIGINT:
-            raise self.skipTest("SIGINT unavailable")
-        class SomeCase(TestCase):
-            def test_pause(self):
-                return defer.Deferred()
-        test = SomeCase('test_pause')
-        reactor = self.make_reactor()
-        timeout = self.make_timeout()
-        runner = self.make_runner(test, timeout * 5)
-        result = self.make_result()
-        reactor.callWhenRunning(os.kill, os.getpid(), SIGINT)
-        self.assertThat(lambda:runner.run(result),
-            Raises(MatchesException(KeyboardInterrupt)))
-
-    def test_timeout_causes_test_error(self):
-        # If a test times out, it reports itself as having failed with a
-        # TimeoutError.
-        class SomeCase(TestCase):
-            def test_pause(self):
-                return defer.Deferred()
-        test = SomeCase('test_pause')
-        runner = self.make_runner(test)
-        result = self.make_result()
-        runner.run(result)
-        error = result._events[1][2]
-        self.assertThat(
-            [event[:2] for event in result._events], Equals(
-            [('startTest', test),
-             ('addError', test),
-             ('stopTest', test)]))
-        self.assertIn('TimeoutError', str(error['traceback']))
-
-    def test_convenient_construction(self):
-        # As a convenience method, AsynchronousDeferredRunTest has a
-        # classmethod that returns an AsynchronousDeferredRunTest
-        # factory. This factory has the same API as the RunTest constructor.
-        reactor = object()
-        timeout = object()
-        handler = object()
-        factory = AsynchronousDeferredRunTest.make_factory(reactor, timeout)
-        runner = factory(self, [handler])
-        self.assertIs(reactor, runner._reactor)
-        self.assertIs(timeout, runner._timeout)
-        self.assertIs(self, runner.case)
-        self.assertEqual([handler], runner.handlers)
-
-    def test_use_convenient_factory(self):
-        # Make sure that the factory can actually be used.
-        factory = AsynchronousDeferredRunTest.make_factory()
-        class SomeCase(TestCase):
-            run_tests_with = factory
-            def test_something(self):
-                pass
-        case = SomeCase('test_something')
-        case.run()
-
-    def test_convenient_construction_default_reactor(self):
-        # As a convenience method, AsynchronousDeferredRunTest has a
-        # classmethod that returns an AsynchronousDeferredRunTest
-        # factory. This factory has the same API as the RunTest constructor.
-        reactor = object()
-        handler = object()
-        factory = AsynchronousDeferredRunTest.make_factory(reactor=reactor)
-        runner = factory(self, [handler])
-        self.assertIs(reactor, runner._reactor)
-        self.assertIs(self, runner.case)
-        self.assertEqual([handler], runner.handlers)
-
-    def test_convenient_construction_default_timeout(self):
-        # As a convenience method, AsynchronousDeferredRunTest has a
-        # classmethod that returns an AsynchronousDeferredRunTest
-        # factory. This factory has the same API as the RunTest constructor.
-        timeout = object()
-        handler = object()
-        factory = AsynchronousDeferredRunTest.make_factory(timeout=timeout)
-        runner = factory(self, [handler])
-        self.assertIs(timeout, runner._timeout)
-        self.assertIs(self, runner.case)
-        self.assertEqual([handler], runner.handlers)
-
-    def test_convenient_construction_default_debugging(self):
-        # As a convenience method, AsynchronousDeferredRunTest has a
-        # classmethod that returns an AsynchronousDeferredRunTest
-        # factory. This factory has the same API as the RunTest constructor.
-        handler = object()
-        factory = AsynchronousDeferredRunTest.make_factory(debug=True)
-        runner = factory(self, [handler])
-        self.assertIs(self, runner.case)
-        self.assertEqual([handler], runner.handlers)
-        self.assertEqual(True, runner._debug)
-
-    def test_deferred_error(self):
-        class SomeTest(TestCase):
-            def test_something(self):
-                return defer.maybeDeferred(lambda: 1/0)
-        test = SomeTest('test_something')
-        runner = self.make_runner(test)
-        result = self.make_result()
-        runner.run(result)
-        self.assertThat(
-            [event[:2] for event in result._events],
-            Equals([
-                ('startTest', test),
-                ('addError', test),
-                ('stopTest', test)]))
-        error = result._events[1][2]
-        self.assertThat(error, KeysEqual('traceback', 'twisted-log'))
-
-    def test_only_addError_once(self):
-        # Even if the reactor is unclean and the test raises an error and the
-        # cleanups raise errors, we only called addError once per test.
-        reactor = self.make_reactor()
-        class WhenItRains(TestCase):
-            def it_pours(self):
-                # Add a dirty cleanup.
-                self.addCleanup(lambda: 3 / 0)
-                # Dirty the reactor.
-                from twisted.internet.protocol import ServerFactory
-                reactor.listenTCP(0, ServerFactory())
-                # Unhandled error.
-                defer.maybeDeferred(lambda: 2 / 0)
-                # Actual error.
-                raise RuntimeError("Excess precipitation")
-        test = WhenItRains('it_pours')
-        runner = self.make_runner(test)
-        result = self.make_result()
-        runner.run(result)
-        self.assertThat(
-            [event[:2] for event in result._events],
-            Equals([
-                ('startTest', test),
-                ('addError', test),
-                ('stopTest', test)]))
-        error = result._events[1][2]
-        self.assertThat(
-            error, KeysEqual(
-                'traceback',
-                'traceback-1',
-                'traceback-2',
-                'twisted-log',
-                'unhandled-error-in-deferred',
-                ))
-
-    def test_log_err_is_error(self):
-        # An error logged during the test run is recorded as an error in the
-        # tests.
-        class LogAnError(TestCase):
-            def test_something(self):
-                try:
-                    1/0
-                except ZeroDivisionError:
-                    f = failure.Failure()
-                log.err(f)
-        test = LogAnError('test_something')
-        runner = self.make_runner(test)
-        result = self.make_result()
-        runner.run(result)
-        self.assertThat(
-            [event[:2] for event in result._events],
-            Equals([
-                ('startTest', test),
-                ('addError', test),
-                ('stopTest', test)]))
-        error = result._events[1][2]
-        self.assertThat(error, KeysEqual('logged-error', 'twisted-log'))
-
-    def test_log_err_flushed_is_success(self):
-        # An error logged during the test run is recorded as an error in the
-        # tests.
-        class LogAnError(TestCase):
-            def test_something(self):
-                try:
-                    1/0
-                except ZeroDivisionError:
-                    f = failure.Failure()
-                log.err(f)
-                flush_logged_errors(ZeroDivisionError)
-        test = LogAnError('test_something')
-        runner = self.make_runner(test)
-        result = self.make_result()
-        runner.run(result)
-        self.assertThat(
-            result._events,
-            Equals([
-                ('startTest', test),
-                ('addSuccess', test, {'twisted-log': text_content('')}),
-                ('stopTest', test)]))
-
-    def test_log_in_details(self):
-        class LogAnError(TestCase):
-            def test_something(self):
-                log.msg("foo")
-                1/0
-        test = LogAnError('test_something')
-        runner = self.make_runner(test)
-        result = self.make_result()
-        runner.run(result)
-        self.assertThat(
-            [event[:2] for event in result._events],
-            Equals([
-                ('startTest', test),
-                ('addError', test),
-                ('stopTest', test)]))
-        error = result._events[1][2]
-        self.assertThat(error, KeysEqual('traceback', 'twisted-log'))
-
-    def test_debugging_unchanged_during_test_by_default(self):
-        debugging = [(defer.Deferred.debug, DelayedCall.debug)]
-        class SomeCase(TestCase):
-            def test_debugging_enabled(self):
-                debugging.append((defer.Deferred.debug, DelayedCall.debug))
-        test = SomeCase('test_debugging_enabled')
-        runner = AsynchronousDeferredRunTest(
-            test, handlers=test.exception_handlers,
-            reactor=self.make_reactor(), timeout=self.make_timeout())
-        runner.run(self.make_result())
-        self.assertEqual(debugging[0], debugging[1])
-
-    def test_debugging_enabled_during_test_with_debug_flag(self):
-        self.patch(defer.Deferred, 'debug', False)
-        self.patch(DelayedCall, 'debug', False)
-        debugging = []
-        class SomeCase(TestCase):
-            def test_debugging_enabled(self):
-                debugging.append((defer.Deferred.debug, DelayedCall.debug))
-        test = SomeCase('test_debugging_enabled')
-        runner = AsynchronousDeferredRunTest(
-            test, handlers=test.exception_handlers,
-            reactor=self.make_reactor(), timeout=self.make_timeout(),
-            debug=True)
-        runner.run(self.make_result())
-        self.assertEqual([(True, True)], debugging)
-        self.assertEqual(False, defer.Deferred.debug)
-        self.assertEqual(False, defer.Deferred.debug)
-
-
-class TestAssertFailsWith(NeedsTwistedTestCase):
-    """Tests for `assert_fails_with`."""
-
-    if SynchronousDeferredRunTest is not None:
-        run_tests_with = SynchronousDeferredRunTest
-
-    def test_assert_fails_with_success(self):
-        # assert_fails_with fails the test if it's given a Deferred that
-        # succeeds.
-        marker = object()
-        d = assert_fails_with(defer.succeed(marker), RuntimeError)
-        def check_result(failure):
-            failure.trap(self.failureException)
-            self.assertThat(
-                str(failure.value),
-                Equals("RuntimeError not raised (%r returned)" % (marker,)))
-        d.addCallbacks(
-            lambda x: self.fail("Should not have succeeded"), check_result)
-        return d
-
-    def test_assert_fails_with_success_multiple_types(self):
-        # assert_fails_with fails the test if it's given a Deferred that
-        # succeeds.
-        marker = object()
-        d = assert_fails_with(
-            defer.succeed(marker), RuntimeError, ZeroDivisionError)
-        def check_result(failure):
-            failure.trap(self.failureException)
-            self.assertThat(
-                str(failure.value),
-                Equals("RuntimeError, ZeroDivisionError not raised "
-                       "(%r returned)" % (marker,)))
-        d.addCallbacks(
-            lambda x: self.fail("Should not have succeeded"), check_result)
-        return d
-
-    def test_assert_fails_with_wrong_exception(self):
-        # assert_fails_with fails the test if it's given a Deferred that
-        # succeeds.
-        d = assert_fails_with(
-            defer.maybeDeferred(lambda: 1/0), RuntimeError, KeyboardInterrupt)
-        def check_result(failure):
-            failure.trap(self.failureException)
-            lines = str(failure.value).splitlines()
-            self.assertThat(
-                lines[:2],
-                Equals([
-                    ("ZeroDivisionError raised instead of RuntimeError, "
-                     "KeyboardInterrupt:"),
-                    " Traceback (most recent call last):",
-                    ]))
-        d.addCallbacks(
-            lambda x: self.fail("Should not have succeeded"), check_result)
-        return d
-
-    def test_assert_fails_with_expected_exception(self):
-        # assert_fails_with calls back with the value of the failure if it's
-        # one of the expected types of failures.
-        try:
-            1/0
-        except ZeroDivisionError:
-            f = failure.Failure()
-        d = assert_fails_with(defer.fail(f), ZeroDivisionError)
-        return d.addCallback(self.assertThat, Equals(f.value))
-
-    def test_custom_failure_exception(self):
-        # If assert_fails_with is passed a 'failureException' keyword
-        # argument, then it will raise that instead of `AssertionError`.
-        class CustomException(Exception):
-            pass
-        marker = object()
-        d = assert_fails_with(
-            defer.succeed(marker), RuntimeError,
-            failureException=CustomException)
-        def check_result(failure):
-            failure.trap(CustomException)
-            self.assertThat(
-                str(failure.value),
-                Equals("RuntimeError not raised (%r returned)" % (marker,)))
-        return d.addCallbacks(
-            lambda x: self.fail("Should not have succeeded"), check_result)
-
-
-def test_suite():
-    from unittest import TestLoader, TestSuite
-    return TestSuite(
-        [TestLoader().loadTestsFromName(__name__),
-         make_integration_tests()])

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_distutilscmd.py'
--- python-for-subunit2junitxml/testtools/tests/test_distutilscmd.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_distutilscmd.py	1970-01-01 00:00:00 +0000
@@ -1,90 +0,0 @@
-# Copyright (c) 2010-2011 Testtools authors. See LICENSE for details.
-
-"""Tests for the distutils test command logic."""
-
-from distutils.dist import Distribution
-
-from testtools.helpers import try_import, try_imports
-fixtures = try_import('fixtures')
-StringIO = try_imports(['StringIO.StringIO', 'io.StringIO'])
-
-import testtools
-from testtools import TestCase
-from testtools.distutilscmd import TestCommand
-
-
-if fixtures:
-    class SampleTestFixture(fixtures.Fixture):
-        """Creates testtools.runexample temporarily."""
-
-        def __init__(self):
-            self.package = fixtures.PythonPackage(
-            'runexample', [('__init__.py', """
-from testtools import TestCase
-
-class TestFoo(TestCase):
-    def test_bar(self):
-        pass
-    def test_quux(self):
-        pass
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)
-""")])
-
-        def setUp(self):
-            super(SampleTestFixture, self).setUp()
-            self.useFixture(self.package)
-            testtools.__path__.append(self.package.base)
-            self.addCleanup(testtools.__path__.remove, self.package.base)
-
-
-class TestCommandTest(TestCase):
-
-    def setUp(self):
-        super(TestCommandTest, self).setUp()
-        if fixtures is None:
-            self.skipTest("Need fixtures")
-
-    def test_test_module(self):
-        self.useFixture(SampleTestFixture())
-        stream = StringIO()
-        dist = Distribution()
-        dist.script_name = 'setup.py'
-        dist.script_args = ['test']
-        dist.cmdclass = {'test': TestCommand}
-        dist.command_options = {
-            'test': {'test_module': ('command line', 'testtools.runexample')}}
-        cmd = dist.reinitialize_command('test')
-        cmd.runner.stdout = stream
-        dist.run_command('test')
-        self.assertEqual("""Tests running...
-Ran 2 tests in 0.000s
-
-OK
-""", stream.getvalue())
-
-    def test_test_suite(self):
-        self.useFixture(SampleTestFixture())
-        stream = StringIO()
-        dist = Distribution()
-        dist.script_name = 'setup.py'
-        dist.script_args = ['test']
-        dist.cmdclass = {'test': TestCommand}
-        dist.command_options = {
-            'test': {
-                'test_suite': (
-                    'command line', 'testtools.runexample.test_suite')}}
-        cmd = dist.reinitialize_command('test')
-        cmd.runner.stdout = stream
-        dist.run_command('test')
-        self.assertEqual("""Tests running...
-Ran 2 tests in 0.000s
-
-OK
-""", stream.getvalue())
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_fixturesupport.py'
--- python-for-subunit2junitxml/testtools/tests/test_fixturesupport.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_fixturesupport.py	1970-01-01 00:00:00 +0000
@@ -1,79 +0,0 @@
-# Copyright (c) 2010 testtools developers. See LICENSE for details.
-
-import unittest
-
-from testtools import (
-    TestCase,
-    content,
-    content_type,
-    )
-from testtools.helpers import try_import
-from testtools.tests.helpers import (
-    ExtendedTestResult,
-    )
-
-fixtures = try_import('fixtures')
-LoggingFixture = try_import('fixtures.tests.helpers.LoggingFixture')
-
-
-class TestFixtureSupport(TestCase):
-
-    def setUp(self):
-        super(TestFixtureSupport, self).setUp()
-        if fixtures is None or LoggingFixture is None:
-            self.skipTest("Need fixtures")
-
-    def test_useFixture(self):
-        fixture = LoggingFixture()
-        class SimpleTest(TestCase):
-            def test_foo(self):
-                self.useFixture(fixture)
-        result = unittest.TestResult()
-        SimpleTest('test_foo').run(result)
-        self.assertTrue(result.wasSuccessful())
-        self.assertEqual(['setUp', 'cleanUp'], fixture.calls)
-
-    def test_useFixture_cleanups_raise_caught(self):
-        calls = []
-        def raiser(ignored):
-            calls.append('called')
-            raise Exception('foo')
-        fixture = fixtures.FunctionFixture(lambda:None, raiser)
-        class SimpleTest(TestCase):
-            def test_foo(self):
-                self.useFixture(fixture)
-        result = unittest.TestResult()
-        SimpleTest('test_foo').run(result)
-        self.assertFalse(result.wasSuccessful())
-        self.assertEqual(['called'], calls)
-
-    def test_useFixture_details_captured(self):
-        class DetailsFixture(fixtures.Fixture):
-            def setUp(self):
-                fixtures.Fixture.setUp(self)
-                self.addCleanup(delattr, self, 'content')
-                self.content = ['content available until cleanUp']
-                self.addDetail('content',
-                    content.Content(content_type.UTF8_TEXT, self.get_content))
-            def get_content(self):
-                return self.content
-        fixture = DetailsFixture()
-        class SimpleTest(TestCase):
-            def test_foo(self):
-                self.useFixture(fixture)
-                # Add a colliding detail (both should show up)
-                self.addDetail('content',
-                    content.Content(content_type.UTF8_TEXT, lambda:['foo']))
-        result = ExtendedTestResult()
-        SimpleTest('test_foo').run(result)
-        self.assertEqual('addSuccess', result._events[-2][0])
-        details = result._events[-2][2]
-        self.assertEqual(['content', 'content-1'], sorted(details.keys()))
-        self.assertEqual('foo', ''.join(details['content'].iter_text()))
-        self.assertEqual('content available until cleanUp',
-            ''.join(details['content-1'].iter_text()))
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_helpers.py'
--- python-for-subunit2junitxml/testtools/tests/test_helpers.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_helpers.py	1970-01-01 00:00:00 +0000
@@ -1,106 +0,0 @@
-# Copyright (c) 2010 testtools developers. See LICENSE for details.
-
-from testtools import TestCase
-from testtools.helpers import (
-    try_import,
-    try_imports,
-    )
-from testtools.matchers import (
-    Equals,
-    Is,
-    )
-
-
-class TestTryImport(TestCase):
-
-    def test_doesnt_exist(self):
-        # try_import('thing', foo) returns foo if 'thing' doesn't exist.
-        marker = object()
-        result = try_import('doesntexist', marker)
-        self.assertThat(result, Is(marker))
-
-    def test_None_is_default_alternative(self):
-        # try_import('thing') returns None if 'thing' doesn't exist.
-        result = try_import('doesntexist')
-        self.assertThat(result, Is(None))
-
-    def test_existing_module(self):
-        # try_import('thing', foo) imports 'thing' and returns it if it's a
-        # module that exists.
-        result = try_import('os', object())
-        import os
-        self.assertThat(result, Is(os))
-
-    def test_existing_submodule(self):
-        # try_import('thing.another', foo) imports 'thing' and returns it if
-        # it's a module that exists.
-        result = try_import('os.path', object())
-        import os
-        self.assertThat(result, Is(os.path))
-
-    def test_nonexistent_submodule(self):
-        # try_import('thing.another', foo) imports 'thing' and returns foo if
-        # 'another' doesn't exist.
-        marker = object()
-        result = try_import('os.doesntexist', marker)
-        self.assertThat(result, Is(marker))
-
-    def test_object_from_module(self):
-        # try_import('thing.object') imports 'thing' and returns
-        # 'thing.object' if 'thing' is a module and 'object' is not.
-        result = try_import('os.path.join')
-        import os
-        self.assertThat(result, Is(os.path.join))
-
-
-class TestTryImports(TestCase):
-
-    def test_doesnt_exist(self):
-        # try_imports('thing', foo) returns foo if 'thing' doesn't exist.
-        marker = object()
-        result = try_imports(['doesntexist'], marker)
-        self.assertThat(result, Is(marker))
-
-    def test_fallback(self):
-        result = try_imports(['doesntexist', 'os'])
-        import os
-        self.assertThat(result, Is(os))
-
-    def test_None_is_default_alternative(self):
-        # try_imports('thing') returns None if 'thing' doesn't exist.
-        e = self.assertRaises(
-            ImportError, try_imports, ['doesntexist', 'noreally'])
-        self.assertThat(
-            str(e),
-            Equals("Could not import any of: doesntexist, noreally"))
-
-    def test_existing_module(self):
-        # try_imports('thing', foo) imports 'thing' and returns it if it's a
-        # module that exists.
-        result = try_imports(['os'], object())
-        import os
-        self.assertThat(result, Is(os))
-
-    def test_existing_submodule(self):
-        # try_imports('thing.another', foo) imports 'thing' and returns it if
-        # it's a module that exists.
-        result = try_imports(['os.path'], object())
-        import os
-        self.assertThat(result, Is(os.path))
-
-    def test_nonexistent_submodule(self):
-        # try_imports('thing.another', foo) imports 'thing' and returns foo if
-        # 'another' doesn't exist.
-        marker = object()
-        result = try_imports(['os.doesntexist'], marker)
-        self.assertThat(result, Is(marker))
-
-    def test_fallback_submodule(self):
-        result = try_imports(['os.doesntexist', 'os.path'])
-        import os
-        self.assertThat(result, Is(os.path))
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_matchers.py'
--- python-for-subunit2junitxml/testtools/tests/test_matchers.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_matchers.py	1970-01-01 00:00:00 +0000
@@ -1,695 +0,0 @@
-# Copyright (c) 2008-2010 testtools developers. See LICENSE for details.
-
-"""Tests for matchers."""
-
-import doctest
-import re
-import StringIO
-import sys
-
-from testtools import (
-    Matcher, # check that Matcher is exposed at the top level for docs.
-    TestCase,
-    )
-from testtools.matchers import (
-    AfterPreproccessing,
-    Annotate,
-    AnnotatedMismatch,
-    Equals,
-    DocTestMatches,
-    DoesNotEndWith,
-    DoesNotStartWith,
-    EndsWith,
-    KeysEqual,
-    Is,
-    LessThan,
-    MatchesAny,
-    MatchesAll,
-    MatchesException,
-    MatchesListwise,
-    MatchesRegex,
-    MatchesSetwise,
-    MatchesStructure,
-    Mismatch,
-    MismatchDecorator,
-    Not,
-    NotEquals,
-    Raises,
-    raises,
-    StartsWith,
-    )
-
-# Silence pyflakes.
-Matcher
-
-
-class TestMismatch(TestCase):
-
-    def test_constructor_arguments(self):
-        mismatch = Mismatch("some description", {'detail': "things"})
-        self.assertEqual("some description", mismatch.describe())
-        self.assertEqual({'detail': "things"}, mismatch.get_details())
-
-    def test_constructor_no_arguments(self):
-        mismatch = Mismatch()
-        self.assertThat(mismatch.describe,
-            Raises(MatchesException(NotImplementedError)))
-        self.assertEqual({}, mismatch.get_details())
-
-
-class TestMatchersInterface(object):
-
-    def test_matches_match(self):
-        matcher = self.matches_matcher
-        matches = self.matches_matches
-        mismatches = self.matches_mismatches
-        for candidate in matches:
-            self.assertEqual(None, matcher.match(candidate))
-        for candidate in mismatches:
-            mismatch = matcher.match(candidate)
-            self.assertNotEqual(None, mismatch)
-            self.assertNotEqual(None, getattr(mismatch, 'describe', None))
-
-    def test__str__(self):
-        # [(expected, object to __str__)].
-        examples = self.str_examples
-        for expected, matcher in examples:
-            self.assertThat(matcher, DocTestMatches(expected))
-
-    def test_describe_difference(self):
-        # [(expected, matchee, matcher), ...]
-        examples = self.describe_examples
-        for difference, matchee, matcher in examples:
-            mismatch = matcher.match(matchee)
-            self.assertEqual(difference, mismatch.describe())
-
-    def test_mismatch_details(self):
-        # The mismatch object must provide get_details, which must return a
-        # dictionary mapping names to Content objects.
-        examples = self.describe_examples
-        for difference, matchee, matcher in examples:
-            mismatch = matcher.match(matchee)
-            details = mismatch.get_details()
-            self.assertEqual(dict(details), details)
-
-
-class TestDocTestMatchesInterface(TestCase, TestMatchersInterface):
-
-    matches_matcher = DocTestMatches("Ran 1 test in ...s", doctest.ELLIPSIS)
-    matches_matches = ["Ran 1 test in 0.000s", "Ran 1 test in 1.234s"]
-    matches_mismatches = ["Ran 1 tests in 0.000s", "Ran 2 test in 0.000s"]
-
-    str_examples = [("DocTestMatches('Ran 1 test in ...s\\n')",
-        DocTestMatches("Ran 1 test in ...s")),
-        ("DocTestMatches('foo\\n', flags=8)", DocTestMatches("foo", flags=8)),
-        ]
-
-    describe_examples = [('Expected:\n    Ran 1 tests in ...s\nGot:\n'
-        '    Ran 1 test in 0.123s\n', "Ran 1 test in 0.123s",
-        DocTestMatches("Ran 1 tests in ...s", doctest.ELLIPSIS))]
-
-
-class TestDocTestMatchesSpecific(TestCase):
-
-    def test___init__simple(self):
-        matcher = DocTestMatches("foo")
-        self.assertEqual("foo\n", matcher.want)
-
-    def test___init__flags(self):
-        matcher = DocTestMatches("bar\n", doctest.ELLIPSIS)
-        self.assertEqual("bar\n", matcher.want)
-        self.assertEqual(doctest.ELLIPSIS, matcher.flags)
-
-
-class TestEqualsInterface(TestCase, TestMatchersInterface):
-
-    matches_matcher = Equals(1)
-    matches_matches = [1]
-    matches_mismatches = [2]
-
-    str_examples = [("Equals(1)", Equals(1)), ("Equals('1')", Equals('1'))]
-
-    describe_examples = [("1 != 2", 2, Equals(1))]
-
-
-class TestNotEqualsInterface(TestCase, TestMatchersInterface):
-
-    matches_matcher = NotEquals(1)
-    matches_matches = [2]
-    matches_mismatches = [1]
-
-    str_examples = [
-        ("NotEquals(1)", NotEquals(1)), ("NotEquals('1')", NotEquals('1'))]
-
-    describe_examples = [("1 == 1", 1, NotEquals(1))]
-
-
-class TestIsInterface(TestCase, TestMatchersInterface):
-
-    foo = object()
-    bar = object()
-
-    matches_matcher = Is(foo)
-    matches_matches = [foo]
-    matches_mismatches = [bar, 1]
-
-    str_examples = [("Is(2)", Is(2))]
-
-    describe_examples = [("1 is not 2", 2, Is(1))]
-
-
-class TestLessThanInterface(TestCase, TestMatchersInterface):
-
-    matches_matcher = LessThan(4)
-    matches_matches = [-5, 3]
-    matches_mismatches = [4, 5, 5000]
-
-    str_examples = [
-        ("LessThan(12)", LessThan(12)),
-        ]
-
-    describe_examples = [('4 is >= 4', 4, LessThan(4))]
-
-
-def make_error(type, *args, **kwargs):
-    try:
-        raise type(*args, **kwargs)
-    except type:
-        return sys.exc_info()
-
-
-class TestMatchesExceptionInstanceInterface(TestCase, TestMatchersInterface):
-
-    matches_matcher = MatchesException(ValueError("foo"))
-    error_foo = make_error(ValueError, 'foo')
-    error_bar = make_error(ValueError, 'bar')
-    error_base_foo = make_error(Exception, 'foo')
-    matches_matches = [error_foo]
-    matches_mismatches = [error_bar, error_base_foo]
-
-    str_examples = [
-        ("MatchesException(Exception('foo',))",
-         MatchesException(Exception('foo')))
-        ]
-    describe_examples = [
-        ("%r is not a %r" % (Exception, ValueError),
-         error_base_foo,
-         MatchesException(ValueError("foo"))),
-        ("ValueError('bar',) has different arguments to ValueError('foo',).",
-         error_bar,
-         MatchesException(ValueError("foo"))),
-        ]
-
-
-class TestMatchesExceptionTypeInterface(TestCase, TestMatchersInterface):
-
-    matches_matcher = MatchesException(ValueError)
-    error_foo = make_error(ValueError, 'foo')
-    error_sub = make_error(UnicodeError, 'bar')
-    error_base_foo = make_error(Exception, 'foo')
-    matches_matches = [error_foo, error_sub]
-    matches_mismatches = [error_base_foo]
-
-    str_examples = [
-        ("MatchesException(%r)" % Exception,
-         MatchesException(Exception))
-        ]
-    describe_examples = [
-        ("%r is not a %r" % (Exception, ValueError),
-         error_base_foo,
-         MatchesException(ValueError)),
-        ]
-
-
-class TestMatchesExceptionTypeReInterface(TestCase, TestMatchersInterface):
-
-    matches_matcher = MatchesException(ValueError, 'fo.')
-    error_foo = make_error(ValueError, 'foo')
-    error_sub = make_error(UnicodeError, 'foo')
-    error_bar = make_error(ValueError, 'bar')
-    matches_matches = [error_foo, error_sub]
-    matches_mismatches = [error_bar]
-
-    str_examples = [
-        ("MatchesException(%r)" % Exception,
-         MatchesException(Exception))
-        ]
-    describe_examples = [
-        ('"bar" does not match "fo.".',
-         error_bar, MatchesException(ValueError, "fo.")),
-        ]
-
-
-class TestNotInterface(TestCase, TestMatchersInterface):
-
-    matches_matcher = Not(Equals(1))
-    matches_matches = [2]
-    matches_mismatches = [1]
-
-    str_examples = [
-        ("Not(Equals(1))", Not(Equals(1))),
-        ("Not(Equals('1'))", Not(Equals('1')))]
-
-    describe_examples = [('1 matches Equals(1)', 1, Not(Equals(1)))]
-
-
-class TestMatchersAnyInterface(TestCase, TestMatchersInterface):
-
-    matches_matcher = MatchesAny(DocTestMatches("1"), DocTestMatches("2"))
-    matches_matches = ["1", "2"]
-    matches_mismatches = ["3"]
-
-    str_examples = [(
-        "MatchesAny(DocTestMatches('1\\n'), DocTestMatches('2\\n'))",
-        MatchesAny(DocTestMatches("1"), DocTestMatches("2"))),
-        ]
-
-    describe_examples = [("""Differences: [
-Expected:
-    1
-Got:
-    3
-
-Expected:
-    2
-Got:
-    3
-
-]""",
-        "3", MatchesAny(DocTestMatches("1"), DocTestMatches("2")))]
-
-
-class TestMatchesAllInterface(TestCase, TestMatchersInterface):
-
-    matches_matcher = MatchesAll(NotEquals(1), NotEquals(2))
-    matches_matches = [3, 4]
-    matches_mismatches = [1, 2]
-
-    str_examples = [
-        ("MatchesAll(NotEquals(1), NotEquals(2))",
-         MatchesAll(NotEquals(1), NotEquals(2)))]
-
-    describe_examples = [("""Differences: [
-1 == 1
-]""",
-                          1, MatchesAll(NotEquals(1), NotEquals(2)))]
-
-
-class TestKeysEqual(TestCase, TestMatchersInterface):
-
-    matches_matcher = KeysEqual('foo', 'bar')
-    matches_matches = [
-        {'foo': 0, 'bar': 1},
-        ]
-    matches_mismatches = [
-        {},
-        {'foo': 0},
-        {'bar': 1},
-        {'foo': 0, 'bar': 1, 'baz': 2},
-        {'a': None, 'b': None, 'c': None},
-        ]
-
-    str_examples = [
-        ("KeysEqual('foo', 'bar')", KeysEqual('foo', 'bar')),
-        ]
-
-    describe_examples = [
-        ("['bar', 'foo'] does not match {'baz': 2, 'foo': 0, 'bar': 1}: "
-         "Keys not equal",
-         {'foo': 0, 'bar': 1, 'baz': 2}, KeysEqual('foo', 'bar')),
-        ]
-
-
-class TestAnnotate(TestCase, TestMatchersInterface):
-
-    matches_matcher = Annotate("foo", Equals(1))
-    matches_matches = [1]
-    matches_mismatches = [2]
-
-    str_examples = [
-        ("Annotate('foo', Equals(1))", Annotate("foo", Equals(1)))]
-
-    describe_examples = [("1 != 2: foo", 2, Annotate('foo', Equals(1)))]
-
-
-class TestAnnotatedMismatch(TestCase):
-
-    def test_forwards_details(self):
-        x = Mismatch('description', {'foo': 'bar'})
-        annotated = AnnotatedMismatch("annotation", x)
-        self.assertEqual(x.get_details(), annotated.get_details())
-
-
-class TestRaisesInterface(TestCase, TestMatchersInterface):
-
-    matches_matcher = Raises()
-    def boom():
-        raise Exception('foo')
-    matches_matches = [boom]
-    matches_mismatches = [lambda:None]
-
-    # Tricky to get function objects to render constantly, and the interfaces
-    # helper uses assertEqual rather than (for instance) DocTestMatches.
-    str_examples = []
-
-    describe_examples = []
-
-
-class TestRaisesExceptionMatcherInterface(TestCase, TestMatchersInterface):
-
-    matches_matcher = Raises(
-        exception_matcher=MatchesException(Exception('foo')))
-    def boom_bar():
-        raise Exception('bar')
-    def boom_foo():
-        raise Exception('foo')
-    matches_matches = [boom_foo]
-    matches_mismatches = [lambda:None, boom_bar]
-
-    # Tricky to get function objects to render constantly, and the interfaces
-    # helper uses assertEqual rather than (for instance) DocTestMatches.
-    str_examples = []
-
-    describe_examples = []
-
-
-class TestRaisesBaseTypes(TestCase):
-
-    def raiser(self):
-        raise KeyboardInterrupt('foo')
-
-    def test_KeyboardInterrupt_matched(self):
-        # When KeyboardInterrupt is matched, it is swallowed.
-        matcher = Raises(MatchesException(KeyboardInterrupt))
-        self.assertThat(self.raiser, matcher)
-
-    def test_KeyboardInterrupt_propogates(self):
-        # The default 'it raised' propogates KeyboardInterrupt.
-        match_keyb = Raises(MatchesException(KeyboardInterrupt))
-        def raise_keyb_from_match():
-            matcher = Raises()
-            matcher.match(self.raiser)
-        self.assertThat(raise_keyb_from_match, match_keyb)
-
-    def test_KeyboardInterrupt_match_Exception_propogates(self):
-        # If the raised exception isn't matched, and it is not a subclass of
-        # Exception, it is propogated.
-        match_keyb = Raises(MatchesException(KeyboardInterrupt))
-        def raise_keyb_from_match():
-            if sys.version_info > (2, 5):
-                matcher = Raises(MatchesException(Exception))
-            else:
-                # On Python 2.4 KeyboardInterrupt is a StandardError subclass
-                # but should propogate from less generic exception matchers
-                matcher = Raises(MatchesException(EnvironmentError))
-            matcher.match(self.raiser)
-        self.assertThat(raise_keyb_from_match, match_keyb)
-
-
-class TestRaisesConvenience(TestCase):
-
-    def test_exc_type(self):
-        self.assertThat(lambda: 1/0, raises(ZeroDivisionError))
-
-    def test_exc_value(self):
-        e = RuntimeError("You lose!")
-        def raiser():
-            raise e
-        self.assertThat(raiser, raises(e))
-
-
-class DoesNotStartWithTests(TestCase):
-
-    def test_describe(self):
-        mismatch = DoesNotStartWith("fo", "bo")
-        self.assertEqual("'fo' does not start with 'bo'.", mismatch.describe())
-
-
-class StartsWithTests(TestCase):
-
-    def test_str(self):
-        matcher = StartsWith("bar")
-        self.assertEqual("Starts with 'bar'.", str(matcher))
-
-    def test_match(self):
-        matcher = StartsWith("bar")
-        self.assertIs(None, matcher.match("barf"))
-
-    def test_mismatch_returns_does_not_start_with(self):
-        matcher = StartsWith("bar")
-        self.assertIsInstance(matcher.match("foo"), DoesNotStartWith)
-
-    def test_mismatch_sets_matchee(self):
-        matcher = StartsWith("bar")
-        mismatch = matcher.match("foo")
-        self.assertEqual("foo", mismatch.matchee)
-
-    def test_mismatch_sets_expected(self):
-        matcher = StartsWith("bar")
-        mismatch = matcher.match("foo")
-        self.assertEqual("bar", mismatch.expected)
-
-
-class DoesNotEndWithTests(TestCase):
-
-    def test_describe(self):
-        mismatch = DoesNotEndWith("fo", "bo")
-        self.assertEqual("'fo' does not end with 'bo'.", mismatch.describe())
-
-
-class EndsWithTests(TestCase):
-
-    def test_str(self):
-        matcher = EndsWith("bar")
-        self.assertEqual("Ends with 'bar'.", str(matcher))
-
-    def test_match(self):
-        matcher = EndsWith("arf")
-        self.assertIs(None, matcher.match("barf"))
-
-    def test_mismatch_returns_does_not_end_with(self):
-        matcher = EndsWith("bar")
-        self.assertIsInstance(matcher.match("foo"), DoesNotEndWith)
-
-    def test_mismatch_sets_matchee(self):
-        matcher = EndsWith("bar")
-        mismatch = matcher.match("foo")
-        self.assertEqual("foo", mismatch.matchee)
-
-    def test_mismatch_sets_expected(self):
-        matcher = EndsWith("bar")
-        mismatch = matcher.match("foo")
-        self.assertEqual("bar", mismatch.expected)
-
-
-def run_doctest(obj, name):
-    p = doctest.DocTestParser()
-    t = p.get_doctest(
-        obj.__doc__, sys.modules[obj.__module__].__dict__, name, '', 0)
-    r = doctest.DocTestRunner()
-    output = StringIO.StringIO()
-    r.run(t, out=output.write)
-    return r.failures, output.getvalue()
-
-
-class TestMatchesListwise(TestCase):
-
-    def test_docstring(self):
-        failure_count, output = run_doctest(
-            MatchesListwise, "MatchesListwise")
-        if failure_count:
-            self.fail("Doctest failed with %s" % output)
-
-
-class TestMatchesStructure(TestCase, TestMatchersInterface):
-
-    class SimpleClass:
-        def __init__(self, x, y):
-            self.x = x
-            self.y = y
-
-    matches_matcher = MatchesStructure(x=Equals(1), y=Equals(2))
-    matches_matches = [SimpleClass(1, 2)]
-    matches_mismatches = [
-        SimpleClass(2, 2),
-        SimpleClass(1, 1),
-        SimpleClass(3, 3),
-        ]
-
-    str_examples = [
-        ("MatchesStructure(x=Equals(1))", MatchesStructure(x=Equals(1))),
-        ("MatchesStructure(y=Equals(2))", MatchesStructure(y=Equals(2))),
-        ("MatchesStructure(x=Equals(1), y=Equals(2))",
-         MatchesStructure(x=Equals(1), y=Equals(2))),
-        ]
-
-    describe_examples = [
-        ("""\
-Differences: [
-3 != 1: x
-]""", SimpleClass(1, 2), MatchesStructure(x=Equals(3), y=Equals(2))),
-        ("""\
-Differences: [
-3 != 2: y
-]""", SimpleClass(1, 2), MatchesStructure(x=Equals(1), y=Equals(3))),
-        ("""\
-Differences: [
-0 != 1: x
-0 != 2: y
-]""", SimpleClass(1, 2), MatchesStructure(x=Equals(0), y=Equals(0))),
-        ]
-
-    def test_fromExample(self):
-        self.assertThat(
-            self.SimpleClass(1, 2),
-            MatchesStructure.fromExample(self.SimpleClass(1, 3), 'x'))
-
-    def test_update(self):
-        self.assertThat(
-            self.SimpleClass(1, 2),
-            MatchesStructure(x=NotEquals(1)).update(x=Equals(1)))
-
-    def test_update_none(self):
-        self.assertThat(
-            self.SimpleClass(1, 2),
-            MatchesStructure(x=Equals(1), z=NotEquals(42)).update(
-                z=None))
-
-
-class TestMatchesRegex(TestCase, TestMatchersInterface):
-
-    matches_matcher = MatchesRegex('a|b')
-    matches_matches = ['a', 'b']
-    matches_mismatches = ['c']
-
-    str_examples = [
-        ("MatchesRegex('a|b')", MatchesRegex('a|b')),
-        ("MatchesRegex('a|b', re.M)", MatchesRegex('a|b', re.M)),
-        ("MatchesRegex('a|b', re.I|re.M)", MatchesRegex('a|b', re.I|re.M)),
-        ]
-
-    describe_examples = [
-        ("'a|b' did not match 'c'", 'c', MatchesRegex('a|b')),
-        ]
-
-
-class TestMatchesSetwise(TestCase):
-
-    def assertMismatchWithDescriptionMatching(self, value, matcher,
-                                              description_matcher):
-        mismatch = matcher.match(value)
-        if mismatch is None:
-            self.fail("%s matched %s" % (matcher, value))
-        actual_description = mismatch.describe()
-        self.assertThat(
-            actual_description,
-            Annotate(
-                "%s matching %s" % (matcher, value),
-                description_matcher))
-
-    def test_matches(self):
-        self.assertIs(
-            None, MatchesSetwise(Equals(1), Equals(2)).match([2, 1]))
-
-    def test_mismatches(self):
-        self.assertMismatchWithDescriptionMatching(
-            [2, 3], MatchesSetwise(Equals(1), Equals(2)),
-            MatchesRegex('.*There was 1 mismatch$', re.S))
-
-    def test_too_many_matchers(self):
-        self.assertMismatchWithDescriptionMatching(
-            [2, 3], MatchesSetwise(Equals(1), Equals(2), Equals(3)),
-            Equals('There was 1 matcher left over: Equals(1)'))
-
-    def test_too_many_values(self):
-        self.assertMismatchWithDescriptionMatching(
-            [1, 2, 3], MatchesSetwise(Equals(1), Equals(2)),
-            Equals('There was 1 value left over: [3]'))
-
-    def test_two_too_many_matchers(self):
-        self.assertMismatchWithDescriptionMatching(
-            [3], MatchesSetwise(Equals(1), Equals(2), Equals(3)),
-            MatchesRegex(
-                'There were 2 matchers left over: Equals\([12]\), '
-                'Equals\([12]\)'))
-
-    def test_two_too_many_values(self):
-        self.assertMismatchWithDescriptionMatching(
-            [1, 2, 3, 4], MatchesSetwise(Equals(1), Equals(2)),
-            MatchesRegex(
-                'There were 2 values left over: \[[34], [34]\]'))
-
-    def test_mismatch_and_too_many_matchers(self):
-        self.assertMismatchWithDescriptionMatching(
-            [2, 3], MatchesSetwise(Equals(0), Equals(1), Equals(2)),
-            MatchesRegex(
-                '.*There was 1 mismatch and 1 extra matcher: Equals\([01]\)',
-                re.S))
-
-    def test_mismatch_and_too_many_values(self):
-        self.assertMismatchWithDescriptionMatching(
-            [2, 3, 4], MatchesSetwise(Equals(1), Equals(2)),
-            MatchesRegex(
-                '.*There was 1 mismatch and 1 extra value: \[[34]\]',
-                re.S))
-
-    def test_mismatch_and_two_too_many_matchers(self):
-        self.assertMismatchWithDescriptionMatching(
-            [3, 4], MatchesSetwise(
-                Equals(0), Equals(1), Equals(2), Equals(3)),
-            MatchesRegex(
-                '.*There was 1 mismatch and 2 extra matchers: '
-                'Equals\([012]\), Equals\([012]\)', re.S))
-
-    def test_mismatch_and_two_too_many_values(self):
-        self.assertMismatchWithDescriptionMatching(
-            [2, 3, 4, 5], MatchesSetwise(Equals(1), Equals(2)),
-            MatchesRegex(
-                '.*There was 1 mismatch and 2 extra values: \[[145], [145]\]',
-                re.S))
-
-
-class TestAfterPreproccessing(TestCase, TestMatchersInterface):
-
-    def parity(x):
-        return x % 2
-
-    matches_matcher = AfterPreproccessing(parity, Equals(1))
-    matches_matches = [3, 5]
-    matches_mismatches = [2]
-
-    str_examples = [
-        ("AfterPreproccessing(<function parity>, Equals(1))",
-         AfterPreproccessing(parity, Equals(1))),
-        ]
-
-    describe_examples = [
-        ("1 != 0: after <function parity>",
-         2,
-         AfterPreproccessing(parity, Equals(1))),
-        ]
-
-
-class TestMismatchDecorator(TestCase):
-
-    def test_forwards_description(self):
-        x = Mismatch("description", {'foo': 'bar'})
-        decorated = MismatchDecorator(x)
-        self.assertEqual(x.describe(), decorated.describe())
-
-    def test_forwards_details(self):
-        x = Mismatch("description", {'foo': 'bar'})
-        decorated = MismatchDecorator(x)
-        self.assertEqual(x.get_details(), decorated.get_details())
-
-    def test_repr(self):
-        x = Mismatch("description", {'foo': 'bar'})
-        decorated = MismatchDecorator(x)
-        self.assertEqual(
-            '<testtools.matchers.MismatchDecorator(%r)>' % (x,),
-            repr(decorated))
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_monkey.py'
--- python-for-subunit2junitxml/testtools/tests/test_monkey.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_monkey.py	1970-01-01 00:00:00 +0000
@@ -1,167 +0,0 @@
-# Copyright (c) 2010 Twisted Matrix Laboratories.
-# See LICENSE for details.
-
-"""Tests for testtools.monkey."""
-
-from testtools import TestCase
-from testtools.matchers import MatchesException, Raises
-from testtools.monkey import MonkeyPatcher, patch
-
-
-class TestObj:
-
-    def __init__(self):
-        self.foo = 'foo value'
-        self.bar = 'bar value'
-        self.baz = 'baz value'
-
-
-class MonkeyPatcherTest(TestCase):
-    """
-    Tests for 'MonkeyPatcher' monkey-patching class.
-    """
-
-    def setUp(self):
-        super(MonkeyPatcherTest, self).setUp()
-        self.test_object = TestObj()
-        self.original_object = TestObj()
-        self.monkey_patcher = MonkeyPatcher()
-
-    def test_empty(self):
-        # A monkey patcher without patches doesn't change a thing.
-        self.monkey_patcher.patch()
-
-        # We can't assert that all state is unchanged, but at least we can
-        # check our test object.
-        self.assertEquals(self.original_object.foo, self.test_object.foo)
-        self.assertEquals(self.original_object.bar, self.test_object.bar)
-        self.assertEquals(self.original_object.baz, self.test_object.baz)
-
-    def test_construct_with_patches(self):
-        # Constructing a 'MonkeyPatcher' with patches adds all of the given
-        # patches to the patch list.
-        patcher = MonkeyPatcher((self.test_object, 'foo', 'haha'),
-                                (self.test_object, 'bar', 'hehe'))
-        patcher.patch()
-        self.assertEquals('haha', self.test_object.foo)
-        self.assertEquals('hehe', self.test_object.bar)
-        self.assertEquals(self.original_object.baz, self.test_object.baz)
-
-    def test_patch_existing(self):
-        # Patching an attribute that exists sets it to the value defined in the
-        # patch.
-        self.monkey_patcher.add_patch(self.test_object, 'foo', 'haha')
-        self.monkey_patcher.patch()
-        self.assertEquals(self.test_object.foo, 'haha')
-
-    def test_patch_non_existing(self):
-        # Patching a non-existing attribute sets it to the value defined in
-        # the patch.
-        self.monkey_patcher.add_patch(self.test_object, 'doesntexist', 'value')
-        self.monkey_patcher.patch()
-        self.assertEquals(self.test_object.doesntexist, 'value')
-
-    def test_restore_non_existing(self):
-        # Restoring a value that didn't exist before the patch deletes the
-        # value.
-        self.monkey_patcher.add_patch(self.test_object, 'doesntexist', 'value')
-        self.monkey_patcher.patch()
-        self.monkey_patcher.restore()
-        marker = object()
-        self.assertIs(marker, getattr(self.test_object, 'doesntexist', marker))
-
-    def test_patch_already_patched(self):
-        # Adding a patch for an object and attribute that already have a patch
-        # overrides the existing patch.
-        self.monkey_patcher.add_patch(self.test_object, 'foo', 'blah')
-        self.monkey_patcher.add_patch(self.test_object, 'foo', 'BLAH')
-        self.monkey_patcher.patch()
-        self.assertEquals(self.test_object.foo, 'BLAH')
-        self.monkey_patcher.restore()
-        self.assertEquals(self.test_object.foo, self.original_object.foo)
-
-    def test_restore_twice_is_a_no_op(self):
-        # Restoring an already-restored monkey patch is a no-op.
-        self.monkey_patcher.add_patch(self.test_object, 'foo', 'blah')
-        self.monkey_patcher.patch()
-        self.monkey_patcher.restore()
-        self.assertEquals(self.test_object.foo, self.original_object.foo)
-        self.monkey_patcher.restore()
-        self.assertEquals(self.test_object.foo, self.original_object.foo)
-
-    def test_run_with_patches_decoration(self):
-        # run_with_patches runs the given callable, passing in all arguments
-        # and keyword arguments, and returns the return value of the callable.
-        log = []
-
-        def f(a, b, c=None):
-            log.append((a, b, c))
-            return 'foo'
-
-        result = self.monkey_patcher.run_with_patches(f, 1, 2, c=10)
-        self.assertEquals('foo', result)
-        self.assertEquals([(1, 2, 10)], log)
-
-    def test_repeated_run_with_patches(self):
-        # We can call the same function with run_with_patches more than
-        # once. All patches apply for each call.
-        def f():
-            return (self.test_object.foo, self.test_object.bar,
-                    self.test_object.baz)
-
-        self.monkey_patcher.add_patch(self.test_object, 'foo', 'haha')
-        result = self.monkey_patcher.run_with_patches(f)
-        self.assertEquals(
-            ('haha', self.original_object.bar, self.original_object.baz),
-            result)
-        result = self.monkey_patcher.run_with_patches(f)
-        self.assertEquals(
-            ('haha', self.original_object.bar, self.original_object.baz),
-            result)
-
-    def test_run_with_patches_restores(self):
-        # run_with_patches restores the original values after the function has
-        # executed.
-        self.monkey_patcher.add_patch(self.test_object, 'foo', 'haha')
-        self.assertEquals(self.original_object.foo, self.test_object.foo)
-        self.monkey_patcher.run_with_patches(lambda: None)
-        self.assertEquals(self.original_object.foo, self.test_object.foo)
-
-    def test_run_with_patches_restores_on_exception(self):
-        # run_with_patches restores the original values even when the function
-        # raises an exception.
-        def _():
-            self.assertEquals(self.test_object.foo, 'haha')
-            self.assertEquals(self.test_object.bar, 'blahblah')
-            raise RuntimeError("Something went wrong!")
-
-        self.monkey_patcher.add_patch(self.test_object, 'foo', 'haha')
-        self.monkey_patcher.add_patch(self.test_object, 'bar', 'blahblah')
-
-        self.assertThat(lambda:self.monkey_patcher.run_with_patches(_),
-            Raises(MatchesException(RuntimeError("Something went wrong!"))))
-        self.assertEquals(self.test_object.foo, self.original_object.foo)
-        self.assertEquals(self.test_object.bar, self.original_object.bar)
-
-
-class TestPatchHelper(TestCase):
-
-    def test_patch_patches(self):
-        # patch(obj, name, value) sets obj.name to value.
-        test_object = TestObj()
-        patch(test_object, 'foo', 42)
-        self.assertEqual(42, test_object.foo)
-
-    def test_patch_returns_cleanup(self):
-        # patch(obj, name, value) returns a nullary callable that restores obj
-        # to its original state when run.
-        test_object = TestObj()
-        original = test_object.foo
-        cleanup = patch(test_object, 'foo', 42)
-        cleanup()
-        self.assertEqual(original, test_object.foo)
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_run.py'
--- python-for-subunit2junitxml/testtools/tests/test_run.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_run.py	1970-01-01 00:00:00 +0000
@@ -1,77 +0,0 @@
-# Copyright (c) 2010 testtools developers. See LICENSE for details.
-
-"""Tests for the test runner logic."""
-
-from testtools.compat import StringIO
-from testtools.helpers import try_import
-fixtures = try_import('fixtures')
-
-import testtools
-from testtools import TestCase, run
-
-
-if fixtures:
-    class SampleTestFixture(fixtures.Fixture):
-        """Creates testtools.runexample temporarily."""
-
-        def __init__(self):
-            self.package = fixtures.PythonPackage(
-            'runexample', [('__init__.py', """
-from testtools import TestCase
-
-class TestFoo(TestCase):
-    def test_bar(self):
-        pass
-    def test_quux(self):
-        pass
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)
-""")])
-
-        def setUp(self):
-            super(SampleTestFixture, self).setUp()
-            self.useFixture(self.package)
-            testtools.__path__.append(self.package.base)
-            self.addCleanup(testtools.__path__.remove, self.package.base)
-
-
-class TestRun(TestCase):
-
-    def test_run_list(self):
-        if fixtures is None:
-            self.skipTest("Need fixtures")
-        self.useFixture(SampleTestFixture())
-        out = StringIO()
-        run.main(['prog', '-l', 'testtools.runexample.test_suite'], out)
-        self.assertEqual("""testtools.runexample.TestFoo.test_bar
-testtools.runexample.TestFoo.test_quux
-""", out.getvalue())
-
-    def test_run_load_list(self):
-        if fixtures is None:
-            self.skipTest("Need fixtures")
-        self.useFixture(SampleTestFixture())
-        out = StringIO()
-        # We load two tests - one that exists and one that doesn't, and we
-        # should get the one that exists and neither the one that doesn't nor
-        # the unmentioned one that does.
-        tempdir = self.useFixture(fixtures.TempDir())
-        tempname = tempdir.path + '/tests.list'
-        f = open(tempname, 'wb')
-        try:
-            f.write("""
-testtools.runexample.TestFoo.test_bar
-testtools.runexample.missingtest
-""")
-        finally:
-            f.close()
-        run.main(['prog', '-l', '--load-list', tempname,
-            'testtools.runexample.test_suite'], out)
-        self.assertEqual("""testtools.runexample.TestFoo.test_bar
-""", out.getvalue())
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_runtest.py'
--- python-for-subunit2junitxml/testtools/tests/test_runtest.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_runtest.py	1970-01-01 00:00:00 +0000
@@ -1,300 +0,0 @@
-# Copyright (c) 2009-2010 testtools developers. See LICENSE for details.
-
-"""Tests for the RunTest single test execution logic."""
-
-from testtools import (
-    ExtendedToOriginalDecorator,
-    run_test_with,
-    RunTest,
-    TestCase,
-    TestResult,
-    )
-from testtools.matchers import MatchesException, Is, Raises
-from testtools.tests.helpers import ExtendedTestResult
-
-
-class TestRunTest(TestCase):
-
-    def make_case(self):
-        class Case(TestCase):
-            def test(self):
-                pass
-        return Case('test')
-
-    def test___init___short(self):
-        run = RunTest("bar")
-        self.assertEqual("bar", run.case)
-        self.assertEqual([], run.handlers)
-
-    def test__init____handlers(self):
-        handlers = [("quux", "baz")]
-        run = RunTest("bar", handlers)
-        self.assertEqual(handlers, run.handlers)
-
-    def test_run_with_result(self):
-        # test.run passes result down to _run_test_method.
-        log = []
-        class Case(TestCase):
-            def _run_test_method(self, result):
-                log.append(result)
-        case = Case('_run_test_method')
-        run = RunTest(case, lambda x: log.append(x))
-        result = TestResult()
-        run.run(result)
-        self.assertEqual(1, len(log))
-        self.assertEqual(result, log[0].decorated)
-
-    def test_run_no_result_manages_new_result(self):
-        log = []
-        run = RunTest(self.make_case(), lambda x: log.append(x) or x)
-        result = run.run()
-        self.assertIsInstance(result.decorated, TestResult)
-
-    def test__run_core_called(self):
-        case = self.make_case()
-        log = []
-        run = RunTest(case, lambda x: x)
-        run._run_core = lambda: log.append('foo')
-        run.run()
-        self.assertEqual(['foo'], log)
-
-    def test__run_user_does_not_catch_keyboard(self):
-        case = self.make_case()
-        def raises():
-            raise KeyboardInterrupt("yo")
-        run = RunTest(case, None)
-        run.result = ExtendedTestResult()
-        self.assertThat(lambda: run._run_user(raises),
-            Raises(MatchesException(KeyboardInterrupt)))
-        self.assertEqual([], run.result._events)
-
-    def test__run_user_calls_onException(self):
-        case = self.make_case()
-        log = []
-        def handler(exc_info):
-            log.append("got it")
-            self.assertEqual(3, len(exc_info))
-            self.assertIsInstance(exc_info[1], KeyError)
-            self.assertIs(KeyError, exc_info[0])
-        case.addOnException(handler)
-        e = KeyError('Yo')
-        def raises():
-            raise e
-        run = RunTest(case, [(KeyError, None)])
-        run.result = ExtendedTestResult()
-        status = run._run_user(raises)
-        self.assertEqual(run.exception_caught, status)
-        self.assertEqual([], run.result._events)
-        self.assertEqual(["got it"], log)
-
-    def test__run_user_can_catch_Exception(self):
-        case = self.make_case()
-        e = Exception('Yo')
-        def raises():
-            raise e
-        log = []
-        run = RunTest(case, [(Exception, None)])
-        run.result = ExtendedTestResult()
-        status = run._run_user(raises)
-        self.assertEqual(run.exception_caught, status)
-        self.assertEqual([], run.result._events)
-        self.assertEqual([], log)
-
-    def test__run_user_uncaught_Exception_raised(self):
-        case = self.make_case()
-        e = KeyError('Yo')
-        def raises():
-            raise e
-        log = []
-        def log_exc(self, result, err):
-            log.append((result, err))
-        run = RunTest(case, [(ValueError, log_exc)])
-        run.result = ExtendedTestResult()
-        self.assertThat(lambda: run._run_user(raises),
-            Raises(MatchesException(KeyError)))
-        self.assertEqual([], run.result._events)
-        self.assertEqual([], log)
-
-    def test__run_user_uncaught_Exception_from_exception_handler_raised(self):
-        case = self.make_case()
-        def broken_handler(exc_info):
-            # ValueError because thats what we know how to catch - and must
-            # not.
-            raise ValueError('boo')
-        case.addOnException(broken_handler)
-        e = KeyError('Yo')
-        def raises():
-            raise e
-        log = []
-        def log_exc(self, result, err):
-            log.append((result, err))
-        run = RunTest(case, [(ValueError, log_exc)])
-        run.result = ExtendedTestResult()
-        self.assertThat(lambda: run._run_user(raises),
-            Raises(MatchesException(ValueError)))
-        self.assertEqual([], run.result._events)
-        self.assertEqual([], log)
-
-    def test__run_user_returns_result(self):
-        case = self.make_case()
-        def returns():
-            return 1
-        run = RunTest(case)
-        run.result = ExtendedTestResult()
-        self.assertEqual(1, run._run_user(returns))
-        self.assertEqual([], run.result._events)
-
-    def test__run_one_decorates_result(self):
-        log = []
-        class Run(RunTest):
-            def _run_prepared_result(self, result):
-                log.append(result)
-                return result
-        run = Run(self.make_case(), lambda x: x)
-        result = run._run_one('foo')
-        self.assertEqual([result], log)
-        self.assertIsInstance(log[0], ExtendedToOriginalDecorator)
-        self.assertEqual('foo', result.decorated)
-
-    def test__run_prepared_result_calls_start_and_stop_test(self):
-        result = ExtendedTestResult()
-        case = self.make_case()
-        run = RunTest(case, lambda x: x)
-        run.run(result)
-        self.assertEqual([
-            ('startTest', case),
-            ('addSuccess', case),
-            ('stopTest', case),
-            ], result._events)
-
-    def test__run_prepared_result_calls_stop_test_always(self):
-        result = ExtendedTestResult()
-        case = self.make_case()
-        def inner():
-            raise Exception("foo")
-        run = RunTest(case, lambda x: x)
-        run._run_core = inner
-        self.assertThat(lambda: run.run(result),
-            Raises(MatchesException(Exception("foo"))))
-        self.assertEqual([
-            ('startTest', case),
-            ('stopTest', case),
-            ], result._events)
-
-
-class CustomRunTest(RunTest):
-
-    marker = object()
-
-    def run(self, result=None):
-        return self.marker
-
-
-class TestTestCaseSupportForRunTest(TestCase):
-
-    def test_pass_custom_run_test(self):
-        class SomeCase(TestCase):
-            def test_foo(self):
-                pass
-        result = TestResult()
-        case = SomeCase('test_foo', runTest=CustomRunTest)
-        from_run_test = case.run(result)
-        self.assertThat(from_run_test, Is(CustomRunTest.marker))
-
-    def test_default_is_runTest_class_variable(self):
-        class SomeCase(TestCase):
-            run_tests_with = CustomRunTest
-            def test_foo(self):
-                pass
-        result = TestResult()
-        case = SomeCase('test_foo')
-        from_run_test = case.run(result)
-        self.assertThat(from_run_test, Is(CustomRunTest.marker))
-
-    def test_constructor_argument_overrides_class_variable(self):
-        # If a 'runTest' argument is passed to the test's constructor, that
-        # overrides the class variable.
-        marker = object()
-        class DifferentRunTest(RunTest):
-            def run(self, result=None):
-                return marker
-        class SomeCase(TestCase):
-            run_tests_with = CustomRunTest
-            def test_foo(self):
-                pass
-        result = TestResult()
-        case = SomeCase('test_foo', runTest=DifferentRunTest)
-        from_run_test = case.run(result)
-        self.assertThat(from_run_test, Is(marker))
-
-    def test_decorator_for_run_test(self):
-        # Individual test methods can be marked as needing a special runner.
-        class SomeCase(TestCase):
-            @run_test_with(CustomRunTest)
-            def test_foo(self):
-                pass
-        result = TestResult()
-        case = SomeCase('test_foo')
-        from_run_test = case.run(result)
-        self.assertThat(from_run_test, Is(CustomRunTest.marker))
-
-    def test_extended_decorator_for_run_test(self):
-        # Individual test methods can be marked as needing a special runner.
-        # Extra arguments can be passed to the decorator which will then be
-        # passed on to the RunTest object.
-        marker = object()
-        class FooRunTest(RunTest):
-            def __init__(self, case, handlers=None, bar=None):
-                super(FooRunTest, self).__init__(case, handlers)
-                self.bar = bar
-            def run(self, result=None):
-                return self.bar
-        class SomeCase(TestCase):
-            @run_test_with(FooRunTest, bar=marker)
-            def test_foo(self):
-                pass
-        result = TestResult()
-        case = SomeCase('test_foo')
-        from_run_test = case.run(result)
-        self.assertThat(from_run_test, Is(marker))
-
-    def test_works_as_inner_decorator(self):
-        # Even if run_test_with is the innermost decorator, it will be
-        # respected.
-        def wrapped(function):
-            """Silly, trivial decorator."""
-            def decorated(*args, **kwargs):
-                return function(*args, **kwargs)
-            decorated.__name__ = function.__name__
-            decorated.__dict__.update(function.__dict__)
-            return decorated
-        class SomeCase(TestCase):
-            @wrapped
-            @run_test_with(CustomRunTest)
-            def test_foo(self):
-                pass
-        result = TestResult()
-        case = SomeCase('test_foo')
-        from_run_test = case.run(result)
-        self.assertThat(from_run_test, Is(CustomRunTest.marker))
-
-    def test_constructor_overrides_decorator(self):
-        # If a 'runTest' argument is passed to the test's constructor, that
-        # overrides the decorator.
-        marker = object()
-        class DifferentRunTest(RunTest):
-            def run(self, result=None):
-                return marker
-        class SomeCase(TestCase):
-            @run_test_with(CustomRunTest)
-            def test_foo(self):
-                pass
-        result = TestResult()
-        case = SomeCase('test_foo', runTest=DifferentRunTest)
-        from_run_test = case.run(result)
-        self.assertThat(from_run_test, Is(marker))
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_spinner.py'
--- python-for-subunit2junitxml/testtools/tests/test_spinner.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_spinner.py	1970-01-01 00:00:00 +0000
@@ -1,332 +0,0 @@
-# Copyright (c) 2010 testtools developers. See LICENSE for details.
-
-"""Tests for the evil Twisted reactor-spinning we do."""
-
-import os
-import signal
-
-from testtools import (
-    skipIf,
-    TestCase,
-    )
-from testtools.helpers import try_import
-from testtools.matchers import (
-    Equals,
-    Is,
-    MatchesException,
-    Raises,
-    )
-
-_spinner = try_import('testtools._spinner')
-
-defer = try_import('twisted.internet.defer')
-Failure = try_import('twisted.python.failure.Failure')
-
-
-class NeedsTwistedTestCase(TestCase):
-
-    def setUp(self):
-        super(NeedsTwistedTestCase, self).setUp()
-        if defer is None or Failure is None:
-            self.skipTest("Need Twisted to run")
-
-
-class TestNotReentrant(NeedsTwistedTestCase):
-
-    def test_not_reentrant(self):
-        # A function decorated as not being re-entrant will raise a
-        # _spinner.ReentryError if it is called while it is running.
-        calls = []
-        @_spinner.not_reentrant
-        def log_something():
-            calls.append(None)
-            if len(calls) < 5:
-                log_something()
-        self.assertThat(
-            log_something, Raises(MatchesException(_spinner.ReentryError)))
-        self.assertEqual(1, len(calls))
-
-    def test_deeper_stack(self):
-        calls = []
-        @_spinner.not_reentrant
-        def g():
-            calls.append(None)
-            if len(calls) < 5:
-                f()
-        @_spinner.not_reentrant
-        def f():
-            calls.append(None)
-            if len(calls) < 5:
-                g()
-        self.assertThat(f, Raises(MatchesException(_spinner.ReentryError)))
-        self.assertEqual(2, len(calls))
-
-
-class TestExtractResult(NeedsTwistedTestCase):
-
-    def test_not_fired(self):
-        # _spinner.extract_result raises _spinner.DeferredNotFired if it's
-        # given a Deferred that has not fired.
-        self.assertThat(lambda:_spinner.extract_result(defer.Deferred()),
-            Raises(MatchesException(_spinner.DeferredNotFired)))
-
-    def test_success(self):
-        # _spinner.extract_result returns the value of the Deferred if it has
-        # fired successfully.
-        marker = object()
-        d = defer.succeed(marker)
-        self.assertThat(_spinner.extract_result(d), Equals(marker))
-
-    def test_failure(self):
-        # _spinner.extract_result raises the failure's exception if it's given
-        # a Deferred that is failing.
-        try:
-            1/0
-        except ZeroDivisionError:
-            f = Failure()
-        d = defer.fail(f)
-        self.assertThat(lambda:_spinner.extract_result(d),
-            Raises(MatchesException(ZeroDivisionError)))
-
-
-class TestTrapUnhandledErrors(NeedsTwistedTestCase):
-
-    def test_no_deferreds(self):
-        marker = object()
-        result, errors = _spinner.trap_unhandled_errors(lambda: marker)
-        self.assertEqual([], errors)
-        self.assertIs(marker, result)
-
-    def test_unhandled_error(self):
-        failures = []
-        def make_deferred_but_dont_handle():
-            try:
-                1/0
-            except ZeroDivisionError:
-                f = Failure()
-                failures.append(f)
-                defer.fail(f)
-        result, errors = _spinner.trap_unhandled_errors(
-            make_deferred_but_dont_handle)
-        self.assertIs(None, result)
-        self.assertEqual(failures, [error.failResult for error in errors])
-
-
-class TestRunInReactor(NeedsTwistedTestCase):
-
-    def make_reactor(self):
-        from twisted.internet import reactor
-        return reactor
-
-    def make_spinner(self, reactor=None):
-        if reactor is None:
-            reactor = self.make_reactor()
-        return _spinner.Spinner(reactor)
-
-    def make_timeout(self):
-        return 0.01
-
-    def test_function_called(self):
-        # run_in_reactor actually calls the function given to it.
-        calls = []
-        marker = object()
-        self.make_spinner().run(self.make_timeout(), calls.append, marker)
-        self.assertThat(calls, Equals([marker]))
-
-    def test_return_value_returned(self):
-        # run_in_reactor returns the value returned by the function given to
-        # it.
-        marker = object()
-        result = self.make_spinner().run(self.make_timeout(), lambda: marker)
-        self.assertThat(result, Is(marker))
-
-    def test_exception_reraised(self):
-        # If the given function raises an error, run_in_reactor re-raises that
-        # error.
-        self.assertThat(
-            lambda:self.make_spinner().run(self.make_timeout(), lambda: 1/0),
-            Raises(MatchesException(ZeroDivisionError)))
-
-    def test_keyword_arguments(self):
-        # run_in_reactor passes keyword arguments on.
-        calls = []
-        function = lambda *a, **kw: calls.extend([a, kw])
-        self.make_spinner().run(self.make_timeout(), function, foo=42)
-        self.assertThat(calls, Equals([(), {'foo': 42}]))
-
-    def test_not_reentrant(self):
-        # run_in_reactor raises an error if it is called inside another call
-        # to run_in_reactor.
-        spinner = self.make_spinner()
-        self.assertThat(lambda: spinner.run(
-            self.make_timeout(), spinner.run, self.make_timeout(),
-            lambda: None), Raises(MatchesException(_spinner.ReentryError)))
-
-    def test_deferred_value_returned(self):
-        # If the given function returns a Deferred, run_in_reactor returns the
-        # value in the Deferred at the end of the callback chain.
-        marker = object()
-        result = self.make_spinner().run(
-            self.make_timeout(), lambda: defer.succeed(marker))
-        self.assertThat(result, Is(marker))
-
-    def test_preserve_signal_handler(self):
-        signals = ['SIGINT', 'SIGTERM', 'SIGCHLD']
-        signals = filter(
-            None, (getattr(signal, name, None) for name in signals))
-        for sig in signals:
-            self.addCleanup(signal.signal, sig, signal.getsignal(sig))
-        new_hdlrs = list(lambda *a: None for _ in signals)
-        for sig, hdlr in zip(signals, new_hdlrs):
-            signal.signal(sig, hdlr)
-        spinner = self.make_spinner()
-        spinner.run(self.make_timeout(), lambda: None)
-        self.assertEqual(new_hdlrs, map(signal.getsignal, signals))
-
-    def test_timeout(self):
-        # If the function takes too long to run, we raise a
-        # _spinner.TimeoutError.
-        timeout = self.make_timeout()
-        self.assertThat(
-            lambda:self.make_spinner().run(timeout, lambda: defer.Deferred()),
-            Raises(MatchesException(_spinner.TimeoutError)))
-
-    def test_no_junk_by_default(self):
-        # If the reactor hasn't spun yet, then there cannot be any junk.
-        spinner = self.make_spinner()
-        self.assertThat(spinner.get_junk(), Equals([]))
-
-    def test_clean_do_nothing(self):
-        # If there's nothing going on in the reactor, then clean does nothing
-        # and returns an empty list.
-        spinner = self.make_spinner()
-        result = spinner._clean()
-        self.assertThat(result, Equals([]))
-
-    def test_clean_delayed_call(self):
-        # If there's a delayed call in the reactor, then clean cancels it and
-        # returns an empty list.
-        reactor = self.make_reactor()
-        spinner = self.make_spinner(reactor)
-        call = reactor.callLater(10, lambda: None)
-        results = spinner._clean()
-        self.assertThat(results, Equals([call]))
-        self.assertThat(call.active(), Equals(False))
-
-    def test_clean_delayed_call_cancelled(self):
-        # If there's a delayed call that's just been cancelled, then it's no
-        # longer there.
-        reactor = self.make_reactor()
-        spinner = self.make_spinner(reactor)
-        call = reactor.callLater(10, lambda: None)
-        call.cancel()
-        results = spinner._clean()
-        self.assertThat(results, Equals([]))
-
-    def test_clean_selectables(self):
-        # If there's still a selectable (e.g. a listening socket), then
-        # clean() removes it from the reactor's registry.
-        #
-        # Note that the socket is left open. This emulates a bug in trial.
-        from twisted.internet.protocol import ServerFactory
-        reactor = self.make_reactor()
-        spinner = self.make_spinner(reactor)
-        port = reactor.listenTCP(0, ServerFactory())
-        spinner.run(self.make_timeout(), lambda: None)
-        results = spinner.get_junk()
-        self.assertThat(results, Equals([port]))
-
-    def test_clean_running_threads(self):
-        import threading
-        import time
-        current_threads = list(threading.enumerate())
-        reactor = self.make_reactor()
-        timeout = self.make_timeout()
-        spinner = self.make_spinner(reactor)
-        spinner.run(timeout, reactor.callInThread, time.sleep, timeout / 2.0)
-        # Python before 2.5 has a race condition with thread handling where
-        # join() does not remove threads from enumerate before returning - the
-        # thread being joined does the removal. This was fixed in Python 2.5
-        # but we still support 2.4, so we have to workaround the issue.
-        # http://bugs.python.org/issue1703448.
-        self.assertThat(
-            [thread for thread in threading.enumerate() if thread.isAlive()],
-            Equals(current_threads))
-
-    def test_leftover_junk_available(self):
-        # If 'run' is given a function that leaves the reactor dirty in some
-        # way, 'run' will clean up the reactor and then store information
-        # about the junk. This information can be got using get_junk.
-        from twisted.internet.protocol import ServerFactory
-        reactor = self.make_reactor()
-        spinner = self.make_spinner(reactor)
-        port = spinner.run(
-            self.make_timeout(), reactor.listenTCP, 0, ServerFactory())
-        self.assertThat(spinner.get_junk(), Equals([port]))
-
-    def test_will_not_run_with_previous_junk(self):
-        # If 'run' is called and there's still junk in the spinner's junk
-        # list, then the spinner will refuse to run.
-        from twisted.internet.protocol import ServerFactory
-        reactor = self.make_reactor()
-        spinner = self.make_spinner(reactor)
-        timeout = self.make_timeout()
-        spinner.run(timeout, reactor.listenTCP, 0, ServerFactory())
-        self.assertThat(lambda: spinner.run(timeout, lambda: None),
-            Raises(MatchesException(_spinner.StaleJunkError)))
-
-    def test_clear_junk_clears_previous_junk(self):
-        # If 'run' is called and there's still junk in the spinner's junk
-        # list, then the spinner will refuse to run.
-        from twisted.internet.protocol import ServerFactory
-        reactor = self.make_reactor()
-        spinner = self.make_spinner(reactor)
-        timeout = self.make_timeout()
-        port = spinner.run(timeout, reactor.listenTCP, 0, ServerFactory())
-        junk = spinner.clear_junk()
-        self.assertThat(junk, Equals([port]))
-        self.assertThat(spinner.get_junk(), Equals([]))
-
-    @skipIf(os.name != "posix", "Sending SIGINT with os.kill is posix only")
-    def test_sigint_raises_no_result_error(self):
-        # If we get a SIGINT during a run, we raise _spinner.NoResultError.
-        SIGINT = getattr(signal, 'SIGINT', None)
-        if not SIGINT:
-            self.skipTest("SIGINT not available")
-        reactor = self.make_reactor()
-        spinner = self.make_spinner(reactor)
-        timeout = self.make_timeout()
-        reactor.callLater(timeout, os.kill, os.getpid(), SIGINT)
-        self.assertThat(lambda:spinner.run(timeout * 5, defer.Deferred),
-            Raises(MatchesException(_spinner.NoResultError)))
-        self.assertEqual([], spinner._clean())
-
-    @skipIf(os.name != "posix", "Sending SIGINT with os.kill is posix only")
-    def test_sigint_raises_no_result_error_second_time(self):
-        # If we get a SIGINT during a run, we raise _spinner.NoResultError.
-        # This test is exactly the same as test_sigint_raises_no_result_error,
-        # and exists to make sure we haven't futzed with state.
-        self.test_sigint_raises_no_result_error()
-
-    @skipIf(os.name != "posix", "Sending SIGINT with os.kill is posix only")
-    def test_fast_sigint_raises_no_result_error(self):
-        # If we get a SIGINT during a run, we raise _spinner.NoResultError.
-        SIGINT = getattr(signal, 'SIGINT', None)
-        if not SIGINT:
-            self.skipTest("SIGINT not available")
-        reactor = self.make_reactor()
-        spinner = self.make_spinner(reactor)
-        timeout = self.make_timeout()
-        reactor.callWhenRunning(os.kill, os.getpid(), SIGINT)
-        self.assertThat(lambda:spinner.run(timeout * 5, defer.Deferred),
-            Raises(MatchesException(_spinner.NoResultError)))
-        self.assertEqual([], spinner._clean())
-
-    @skipIf(os.name != "posix", "Sending SIGINT with os.kill is posix only")
-    def test_fast_sigint_raises_no_result_error_second_time(self):
-        self.test_fast_sigint_raises_no_result_error()
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_testresult.py'
--- python-for-subunit2junitxml/testtools/tests/test_testresult.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_testresult.py	1970-01-01 00:00:00 +0000
@@ -1,1372 +0,0 @@
-# Copyright (c) 2008 testtools developers. See LICENSE for details.
-
-"""Test TestResults and related things."""
-
-__metaclass__ = type
-
-import codecs
-import datetime
-import doctest
-import os
-import shutil
-import sys
-import tempfile
-import threading
-import warnings
-
-from testtools import (
-    ExtendedToOriginalDecorator,
-    MultiTestResult,
-    TestCase,
-    TestResult,
-    TextTestResult,
-    ThreadsafeForwardingResult,
-    testresult,
-    )
-from testtools.compat import (
-    _b,
-    _get_exception_encoding,
-    _r,
-    _u,
-    str_is_unicode,
-    StringIO,
-    )
-from testtools.content import Content
-from testtools.content_type import ContentType, UTF8_TEXT
-from testtools.matchers import (
-    DocTestMatches,
-    MatchesException,
-    Raises,
-    )
-from testtools.tests.helpers import (
-    LoggingResult,
-    Python26TestResult,
-    Python27TestResult,
-    ExtendedTestResult,
-    an_exc_info
-    )
-from testtools.testresult.real import utc
-
-
-class Python26Contract(object):
-
-    def test_fresh_result_is_successful(self):
-        # A result is considered successful before any tests are run.
-        result = self.makeResult()
-        self.assertTrue(result.wasSuccessful())
-
-    def test_addError_is_failure(self):
-        # addError fails the test run.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addError(self, an_exc_info)
-        result.stopTest(self)
-        self.assertFalse(result.wasSuccessful())
-
-    def test_addFailure_is_failure(self):
-        # addFailure fails the test run.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addFailure(self, an_exc_info)
-        result.stopTest(self)
-        self.assertFalse(result.wasSuccessful())
-
-    def test_addSuccess_is_success(self):
-        # addSuccess does not fail the test run.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addSuccess(self)
-        result.stopTest(self)
-        self.assertTrue(result.wasSuccessful())
-
-
-class Python27Contract(Python26Contract):
-
-    def test_addExpectedFailure(self):
-        # Calling addExpectedFailure(test, exc_info) completes ok.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addExpectedFailure(self, an_exc_info)
-
-    def test_addExpectedFailure_is_success(self):
-        # addExpectedFailure does not fail the test run.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addExpectedFailure(self, an_exc_info)
-        result.stopTest(self)
-        self.assertTrue(result.wasSuccessful())
-
-    def test_addSkipped(self):
-        # Calling addSkip(test, reason) completes ok.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addSkip(self, _u("Skipped for some reason"))
-
-    def test_addSkip_is_success(self):
-        # addSkip does not fail the test run.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addSkip(self, _u("Skipped for some reason"))
-        result.stopTest(self)
-        self.assertTrue(result.wasSuccessful())
-
-    def test_addUnexpectedSuccess(self):
-        # Calling addUnexpectedSuccess(test) completes ok.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addUnexpectedSuccess(self)
-
-    def test_addUnexpectedSuccess_was_successful(self):
-        # addUnexpectedSuccess does not fail the test run in Python 2.7.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addUnexpectedSuccess(self)
-        result.stopTest(self)
-        self.assertTrue(result.wasSuccessful())
-
-    def test_startStopTestRun(self):
-        # Calling startTestRun completes ok.
-        result = self.makeResult()
-        result.startTestRun()
-        result.stopTestRun()
-
-
-class DetailsContract(Python27Contract):
-    """Tests for the contract of TestResults."""
-
-    def test_addExpectedFailure_details(self):
-        # Calling addExpectedFailure(test, details=xxx) completes ok.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addExpectedFailure(self, details={})
-
-    def test_addError_details(self):
-        # Calling addError(test, details=xxx) completes ok.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addError(self, details={})
-
-    def test_addFailure_details(self):
-        # Calling addFailure(test, details=xxx) completes ok.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addFailure(self, details={})
-
-    def test_addSkipped_details(self):
-        # Calling addSkip(test, reason) completes ok.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addSkip(self, details={})
-
-    def test_addUnexpectedSuccess_details(self):
-        # Calling addUnexpectedSuccess(test) completes ok.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addUnexpectedSuccess(self, details={})
-
-    def test_addSuccess_details(self):
-        # Calling addSuccess(test) completes ok.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addSuccess(self, details={})
-
-
-class FallbackContract(DetailsContract):
-    """When we fallback we take our policy choice to map calls.
-
-    For instance, we map unexpectedSuccess to an error code, not to success.
-    """
-
-    def test_addUnexpectedSuccess_was_successful(self):
-        # addUnexpectedSuccess fails test run in testtools.
-        result = self.makeResult()
-        result.startTest(self)
-        result.addUnexpectedSuccess(self)
-        result.stopTest(self)
-        self.assertFalse(result.wasSuccessful())
-
-
-class StartTestRunContract(FallbackContract):
-    """Defines the contract for testtools policy choices.
-    
-    That is things which are not simply extensions to unittest but choices we
-    have made differently.
-    """
-
-    def test_startTestRun_resets_unexpected_success(self):
-        result = self.makeResult()
-        result.startTest(self)
-        result.addUnexpectedSuccess(self)
-        result.stopTest(self)
-        result.startTestRun()
-        self.assertTrue(result.wasSuccessful())
-
-    def test_startTestRun_resets_failure(self):
-        result = self.makeResult()
-        result.startTest(self)
-        result.addFailure(self, an_exc_info)
-        result.stopTest(self)
-        result.startTestRun()
-        self.assertTrue(result.wasSuccessful())
-
-    def test_startTestRun_resets_errors(self):
-        result = self.makeResult()
-        result.startTest(self)
-        result.addError(self, an_exc_info)
-        result.stopTest(self)
-        result.startTestRun()
-        self.assertTrue(result.wasSuccessful())
-
-
-class TestTestResultContract(TestCase, StartTestRunContract):
-
-    def makeResult(self):
-        return TestResult()
-
-
-class TestMultiTestResultContract(TestCase, StartTestRunContract):
-
-    def makeResult(self):
-        return MultiTestResult(TestResult(), TestResult())
-
-
-class TestTextTestResultContract(TestCase, StartTestRunContract):
-
-    def makeResult(self):
-        return TextTestResult(StringIO())
-
-
-class TestThreadSafeForwardingResultContract(TestCase, StartTestRunContract):
-
-    def makeResult(self):
-        result_semaphore = threading.Semaphore(1)
-        target = TestResult()
-        return ThreadsafeForwardingResult(target, result_semaphore)
-
-
-class TestExtendedTestResultContract(TestCase, StartTestRunContract):
-
-    def makeResult(self):
-        return ExtendedTestResult()
-
-
-class TestPython26TestResultContract(TestCase, Python26Contract):
-
-    def makeResult(self):
-        return Python26TestResult()
-
-
-class TestAdaptedPython26TestResultContract(TestCase, FallbackContract):
-
-    def makeResult(self):
-        return ExtendedToOriginalDecorator(Python26TestResult())
-
-
-class TestPython27TestResultContract(TestCase, Python27Contract):
-
-    def makeResult(self):
-        return Python27TestResult()
-
-
-class TestAdaptedPython27TestResultContract(TestCase, DetailsContract):
-
-    def makeResult(self):
-        return ExtendedToOriginalDecorator(Python27TestResult())
-
-
-class TestTestResult(TestCase):
-    """Tests for 'TestResult'."""
-
-    def makeResult(self):
-        """Make an arbitrary result for testing."""
-        return TestResult()
-
-    def test_addSkipped(self):
-        # Calling addSkip on a TestResult records the test that was skipped in
-        # its skip_reasons dict.
-        result = self.makeResult()
-        result.addSkip(self, _u("Skipped for some reason"))
-        self.assertEqual({_u("Skipped for some reason"):[self]},
-            result.skip_reasons)
-        result.addSkip(self, _u("Skipped for some reason"))
-        self.assertEqual({_u("Skipped for some reason"):[self, self]},
-            result.skip_reasons)
-        result.addSkip(self, _u("Skipped for another reason"))
-        self.assertEqual({_u("Skipped for some reason"):[self, self],
-            _u("Skipped for another reason"):[self]},
-            result.skip_reasons)
-
-    def test_now_datetime_now(self):
-        result = self.makeResult()
-        olddatetime = testresult.real.datetime
-        def restore():
-            testresult.real.datetime = olddatetime
-        self.addCleanup(restore)
-        class Module:
-            pass
-        now = datetime.datetime.now(utc)
-        stubdatetime = Module()
-        stubdatetime.datetime = Module()
-        stubdatetime.datetime.now = lambda tz: now
-        testresult.real.datetime = stubdatetime
-        # Calling _now() looks up the time.
-        self.assertEqual(now, result._now())
-        then = now + datetime.timedelta(0, 1)
-        # Set an explicit datetime, which gets returned from then on.
-        result.time(then)
-        self.assertNotEqual(now, result._now())
-        self.assertEqual(then, result._now())
-        # go back to looking it up.
-        result.time(None)
-        self.assertEqual(now, result._now())
-
-    def test_now_datetime_time(self):
-        result = self.makeResult()
-        now = datetime.datetime.now(utc)
-        result.time(now)
-        self.assertEqual(now, result._now())
-
-
-class TestWithFakeExceptions(TestCase):
-
-    def makeExceptionInfo(self, exceptionFactory, *args, **kwargs):
-        try:
-            raise exceptionFactory(*args, **kwargs)
-        except:
-            return sys.exc_info()
-
-
-class TestMultiTestResult(TestWithFakeExceptions):
-    """Tests for 'MultiTestResult'."""
-
-    def setUp(self):
-        TestWithFakeExceptions.setUp(self)
-        self.result1 = LoggingResult([])
-        self.result2 = LoggingResult([])
-        self.multiResult = MultiTestResult(self.result1, self.result2)
-
-    def assertResultLogsEqual(self, expectedEvents):
-        """Assert that our test results have received the expected events."""
-        self.assertEqual(expectedEvents, self.result1._events)
-        self.assertEqual(expectedEvents, self.result2._events)
-
-    def test_empty(self):
-        # Initializing a `MultiTestResult` doesn't do anything to its
-        # `TestResult`s.
-        self.assertResultLogsEqual([])
-
-    def test_startTest(self):
-        # Calling `startTest` on a `MultiTestResult` calls `startTest` on all
-        # its `TestResult`s.
-        self.multiResult.startTest(self)
-        self.assertResultLogsEqual([('startTest', self)])
-
-    def test_stopTest(self):
-        # Calling `stopTest` on a `MultiTestResult` calls `stopTest` on all
-        # its `TestResult`s.
-        self.multiResult.stopTest(self)
-        self.assertResultLogsEqual([('stopTest', self)])
-
-    def test_addSkipped(self):
-        # Calling `addSkip` on a `MultiTestResult` calls addSkip on its
-        # results.
-        reason = _u("Skipped for some reason")
-        self.multiResult.addSkip(self, reason)
-        self.assertResultLogsEqual([('addSkip', self, reason)])
-
-    def test_addSuccess(self):
-        # Calling `addSuccess` on a `MultiTestResult` calls `addSuccess` on
-        # all its `TestResult`s.
-        self.multiResult.addSuccess(self)
-        self.assertResultLogsEqual([('addSuccess', self)])
-
-    def test_done(self):
-        # Calling `done` on a `MultiTestResult` calls `done` on all its
-        # `TestResult`s.
-        self.multiResult.done()
-        self.assertResultLogsEqual([('done')])
-
-    def test_addFailure(self):
-        # Calling `addFailure` on a `MultiTestResult` calls `addFailure` on
-        # all its `TestResult`s.
-        exc_info = self.makeExceptionInfo(AssertionError, 'failure')
-        self.multiResult.addFailure(self, exc_info)
-        self.assertResultLogsEqual([('addFailure', self, exc_info)])
-
-    def test_addError(self):
-        # Calling `addError` on a `MultiTestResult` calls `addError` on all
-        # its `TestResult`s.
-        exc_info = self.makeExceptionInfo(RuntimeError, 'error')
-        self.multiResult.addError(self, exc_info)
-        self.assertResultLogsEqual([('addError', self, exc_info)])
-
-    def test_startTestRun(self):
-        # Calling `startTestRun` on a `MultiTestResult` forwards to all its
-        # `TestResult`s.
-        self.multiResult.startTestRun()
-        self.assertResultLogsEqual([('startTestRun')])
-
-    def test_stopTestRun(self):
-        # Calling `stopTestRun` on a `MultiTestResult` forwards to all its
-        # `TestResult`s.
-        self.multiResult.stopTestRun()
-        self.assertResultLogsEqual([('stopTestRun')])
-
-    def test_stopTestRun_returns_results(self):
-        # `MultiTestResult.stopTestRun` returns a tuple of all of the return
-        # values the `stopTestRun`s that it forwards to.
-        class Result(LoggingResult):
-            def stopTestRun(self):
-                super(Result, self).stopTestRun()
-                return 'foo'
-        multi_result = MultiTestResult(Result([]), Result([]))
-        result = multi_result.stopTestRun()
-        self.assertEqual(('foo', 'foo'), result)
-
-    def test_time(self):
-        # the time call is dispatched, not eaten by the base class
-        self.multiResult.time('foo')
-        self.assertResultLogsEqual([('time', 'foo')])
-
-
-class TestTextTestResult(TestCase):
-    """Tests for 'TextTestResult'."""
-
-    def setUp(self):
-        super(TestTextTestResult, self).setUp()
-        self.result = TextTestResult(StringIO())
-
-    def make_erroring_test(self):
-        class Test(TestCase):
-            def error(self):
-                1/0
-        return Test("error")
-
-    def make_failing_test(self):
-        class Test(TestCase):
-            def failed(self):
-                self.fail("yo!")
-        return Test("failed")
-
-    def make_unexpectedly_successful_test(self):
-        class Test(TestCase):
-            def succeeded(self):
-                self.expectFailure("yo!", lambda: None)
-        return Test("succeeded")
-
-    def make_test(self):
-        class Test(TestCase):
-            def test(self):
-                pass
-        return Test("test")
-
-    def getvalue(self):
-        return self.result.stream.getvalue()
-
-    def test__init_sets_stream(self):
-        result = TextTestResult("fp")
-        self.assertEqual("fp", result.stream)
-
-    def reset_output(self):
-        self.result.stream = StringIO()
-
-    def test_startTestRun(self):
-        self.result.startTestRun()
-        self.assertEqual("Tests running...\n", self.getvalue())
-
-    def test_stopTestRun_count_many(self):
-        test = self.make_test()
-        self.result.startTestRun()
-        self.result.startTest(test)
-        self.result.stopTest(test)
-        self.result.startTest(test)
-        self.result.stopTest(test)
-        self.result.stream = StringIO()
-        self.result.stopTestRun()
-        self.assertThat(self.getvalue(),
-            DocTestMatches("Ran 2 tests in ...s\n...", doctest.ELLIPSIS))
-
-    def test_stopTestRun_count_single(self):
-        test = self.make_test()
-        self.result.startTestRun()
-        self.result.startTest(test)
-        self.result.stopTest(test)
-        self.reset_output()
-        self.result.stopTestRun()
-        self.assertThat(self.getvalue(),
-            DocTestMatches("Ran 1 test in ...s\n\nOK\n", doctest.ELLIPSIS))
-
-    def test_stopTestRun_count_zero(self):
-        self.result.startTestRun()
-        self.reset_output()
-        self.result.stopTestRun()
-        self.assertThat(self.getvalue(),
-            DocTestMatches("Ran 0 tests in ...s\n\nOK\n", doctest.ELLIPSIS))
-
-    def test_stopTestRun_current_time(self):
-        test = self.make_test()
-        now = datetime.datetime.now(utc)
-        self.result.time(now)
-        self.result.startTestRun()
-        self.result.startTest(test)
-        now = now + datetime.timedelta(0, 0, 0, 1)
-        self.result.time(now)
-        self.result.stopTest(test)
-        self.reset_output()
-        self.result.stopTestRun()
-        self.assertThat(self.getvalue(),
-            DocTestMatches("... in 0.001s\n...", doctest.ELLIPSIS))
-
-    def test_stopTestRun_successful(self):
-        self.result.startTestRun()
-        self.result.stopTestRun()
-        self.assertThat(self.getvalue(),
-            DocTestMatches("...\n\nOK\n", doctest.ELLIPSIS))
-
-    def test_stopTestRun_not_successful_failure(self):
-        test = self.make_failing_test()
-        self.result.startTestRun()
-        test.run(self.result)
-        self.result.stopTestRun()
-        self.assertThat(self.getvalue(),
-            DocTestMatches("...\n\nFAILED (failures=1)\n", doctest.ELLIPSIS))
-
-    def test_stopTestRun_not_successful_error(self):
-        test = self.make_erroring_test()
-        self.result.startTestRun()
-        test.run(self.result)
-        self.result.stopTestRun()
-        self.assertThat(self.getvalue(),
-            DocTestMatches("...\n\nFAILED (failures=1)\n", doctest.ELLIPSIS))
-
-    def test_stopTestRun_not_successful_unexpected_success(self):
-        test = self.make_unexpectedly_successful_test()
-        self.result.startTestRun()
-        test.run(self.result)
-        self.result.stopTestRun()
-        self.assertThat(self.getvalue(),
-            DocTestMatches("...\n\nFAILED (failures=1)\n", doctest.ELLIPSIS))
-
-    def test_stopTestRun_shows_details(self):
-        self.result.startTestRun()
-        self.make_erroring_test().run(self.result)
-        self.make_unexpectedly_successful_test().run(self.result)
-        self.make_failing_test().run(self.result)
-        self.reset_output()
-        self.result.stopTestRun()
-        self.assertThat(self.getvalue(),
-            DocTestMatches("""...======================================================================
-ERROR: testtools.tests.test_testresult.Test.error
-----------------------------------------------------------------------
-Text attachment: traceback
-------------
-Traceback (most recent call last):
-  File "...testtools...runtest.py", line ..., in _run_user...
-    return fn(*args, **kwargs)
-  File "...testtools...testcase.py", line ..., in _run_test_method
-    return self._get_test_method()()
-  File "...testtools...tests...test_testresult.py", line ..., in error
-    1/0
-ZeroDivisionError:... divi... by zero...
-------------
-======================================================================
-FAIL: testtools.tests.test_testresult.Test.failed
-----------------------------------------------------------------------
-Text attachment: traceback
-------------
-Traceback (most recent call last):
-  File "...testtools...runtest.py", line ..., in _run_user...
-    return fn(*args, **kwargs)
-  File "...testtools...testcase.py", line ..., in _run_test_method
-    return self._get_test_method()()
-  File "...testtools...tests...test_testresult.py", line ..., in failed
-    self.fail("yo!")
-AssertionError: yo!
-------------
-======================================================================
-UNEXPECTED SUCCESS: testtools.tests.test_testresult.Test.succeeded
-----------------------------------------------------------------------
-...""", doctest.ELLIPSIS | doctest.REPORT_NDIFF))
-
-
-class TestThreadSafeForwardingResult(TestWithFakeExceptions):
-    """Tests for `TestThreadSafeForwardingResult`."""
-
-    def setUp(self):
-        TestWithFakeExceptions.setUp(self)
-        self.result_semaphore = threading.Semaphore(1)
-        self.target = LoggingResult([])
-        self.result1 = ThreadsafeForwardingResult(self.target,
-            self.result_semaphore)
-
-    def test_nonforwarding_methods(self):
-        # startTest and stopTest are not forwarded because they need to be
-        # batched.
-        self.result1.startTest(self)
-        self.result1.stopTest(self)
-        self.assertEqual([], self.target._events)
-
-    def test_startTestRun(self):
-        self.result1.startTestRun()
-        self.result2 = ThreadsafeForwardingResult(self.target,
-            self.result_semaphore)
-        self.result2.startTestRun()
-        self.assertEqual(["startTestRun", "startTestRun"], self.target._events)
-
-    def test_stopTestRun(self):
-        self.result1.stopTestRun()
-        self.result2 = ThreadsafeForwardingResult(self.target,
-            self.result_semaphore)
-        self.result2.stopTestRun()
-        self.assertEqual(["stopTestRun", "stopTestRun"], self.target._events)
-
-    def test_forwarding_methods(self):
-        # error, failure, skip and success are forwarded in batches.
-        exc_info1 = self.makeExceptionInfo(RuntimeError, 'error')
-        starttime1 = datetime.datetime.utcfromtimestamp(1.489)
-        endtime1 = datetime.datetime.utcfromtimestamp(51.476)
-        self.result1.time(starttime1)
-        self.result1.startTest(self)
-        self.result1.time(endtime1)
-        self.result1.addError(self, exc_info1)
-        exc_info2 = self.makeExceptionInfo(AssertionError, 'failure')
-        starttime2 = datetime.datetime.utcfromtimestamp(2.489)
-        endtime2 = datetime.datetime.utcfromtimestamp(3.476)
-        self.result1.time(starttime2)
-        self.result1.startTest(self)
-        self.result1.time(endtime2)
-        self.result1.addFailure(self, exc_info2)
-        reason = _u("Skipped for some reason")
-        starttime3 = datetime.datetime.utcfromtimestamp(4.489)
-        endtime3 = datetime.datetime.utcfromtimestamp(5.476)
-        self.result1.time(starttime3)
-        self.result1.startTest(self)
-        self.result1.time(endtime3)
-        self.result1.addSkip(self, reason)
-        starttime4 = datetime.datetime.utcfromtimestamp(6.489)
-        endtime4 = datetime.datetime.utcfromtimestamp(7.476)
-        self.result1.time(starttime4)
-        self.result1.startTest(self)
-        self.result1.time(endtime4)
-        self.result1.addSuccess(self)
-        self.assertEqual([
-            ('time', starttime1),
-            ('startTest', self),
-            ('time', endtime1),
-            ('addError', self, exc_info1),
-            ('stopTest', self),
-            ('time', starttime2),
-            ('startTest', self),
-            ('time', endtime2),
-            ('addFailure', self, exc_info2),
-            ('stopTest', self),
-            ('time', starttime3),
-            ('startTest', self),
-            ('time', endtime3),
-            ('addSkip', self, reason),
-            ('stopTest', self),
-            ('time', starttime4),
-            ('startTest', self),
-            ('time', endtime4),
-            ('addSuccess', self),
-            ('stopTest', self),
-            ], self.target._events)
-
-
-class TestExtendedToOriginalResultDecoratorBase(TestCase):
-
-    def make_26_result(self):
-        self.result = Python26TestResult()
-        self.make_converter()
-
-    def make_27_result(self):
-        self.result = Python27TestResult()
-        self.make_converter()
-
-    def make_converter(self):
-        self.converter = ExtendedToOriginalDecorator(self.result)
-
-    def make_extended_result(self):
-        self.result = ExtendedTestResult()
-        self.make_converter()
-
-    def check_outcome_details(self, outcome):
-        """Call an outcome with a details dict to be passed through."""
-        # This dict is /not/ convertible - thats deliberate, as it should
-        # not hit the conversion code path.
-        details = {'foo': 'bar'}
-        getattr(self.converter, outcome)(self, details=details)
-        self.assertEqual([(outcome, self, details)], self.result._events)
-
-    def get_details_and_string(self):
-        """Get a details dict and expected string."""
-        text1 = lambda: [_b("1\n2\n")]
-        text2 = lambda: [_b("3\n4\n")]
-        bin1 = lambda: [_b("5\n")]
-        details = {'text 1': Content(ContentType('text', 'plain'), text1),
-            'text 2': Content(ContentType('text', 'strange'), text2),
-            'bin 1': Content(ContentType('application', 'binary'), bin1)}
-        return (details, "Binary content: bin 1\n"
-            "Text attachment: text 1\n------------\n1\n2\n"
-            "------------\nText attachment: text 2\n------------\n"
-            "3\n4\n------------\n")
-
-    def check_outcome_details_to_exec_info(self, outcome, expected=None):
-        """Call an outcome with a details dict to be made into exc_info."""
-        # The conversion is a done using RemoteError and the string contents
-        # of the text types in the details dict.
-        if not expected:
-            expected = outcome
-        details, err_str = self.get_details_and_string()
-        getattr(self.converter, outcome)(self, details=details)
-        err = self.converter._details_to_exc_info(details)
-        self.assertEqual([(expected, self, err)], self.result._events)
-
-    def check_outcome_details_to_nothing(self, outcome, expected=None):
-        """Call an outcome with a details dict to be swallowed."""
-        if not expected:
-            expected = outcome
-        details = {'foo': 'bar'}
-        getattr(self.converter, outcome)(self, details=details)
-        self.assertEqual([(expected, self)], self.result._events)
-
-    def check_outcome_details_to_string(self, outcome):
-        """Call an outcome with a details dict to be stringified."""
-        details, err_str = self.get_details_and_string()
-        getattr(self.converter, outcome)(self, details=details)
-        self.assertEqual([(outcome, self, err_str)], self.result._events)
-
-    def check_outcome_details_to_arg(self, outcome, arg, extra_detail=None):
-        """Call an outcome with a details dict to have an arg extracted."""
-        details, _ = self.get_details_and_string()
-        if extra_detail:
-            details.update(extra_detail)
-        getattr(self.converter, outcome)(self, details=details)
-        self.assertEqual([(outcome, self, arg)], self.result._events)
-
-    def check_outcome_exc_info(self, outcome, expected=None):
-        """Check that calling a legacy outcome still works."""
-        # calling some outcome with the legacy exc_info style api (no keyword
-        # parameters) gets passed through.
-        if not expected:
-            expected = outcome
-        err = sys.exc_info()
-        getattr(self.converter, outcome)(self, err)
-        self.assertEqual([(expected, self, err)], self.result._events)
-
-    def check_outcome_exc_info_to_nothing(self, outcome, expected=None):
-        """Check that calling a legacy outcome on a fallback works."""
-        # calling some outcome with the legacy exc_info style api (no keyword
-        # parameters) gets passed through.
-        if not expected:
-            expected = outcome
-        err = sys.exc_info()
-        getattr(self.converter, outcome)(self, err)
-        self.assertEqual([(expected, self)], self.result._events)
-
-    def check_outcome_nothing(self, outcome, expected=None):
-        """Check that calling a legacy outcome still works."""
-        if not expected:
-            expected = outcome
-        getattr(self.converter, outcome)(self)
-        self.assertEqual([(expected, self)], self.result._events)
-
-    def check_outcome_string_nothing(self, outcome, expected):
-        """Check that calling outcome with a string calls expected."""
-        getattr(self.converter, outcome)(self, "foo")
-        self.assertEqual([(expected, self)], self.result._events)
-
-    def check_outcome_string(self, outcome):
-        """Check that calling outcome with a string works."""
-        getattr(self.converter, outcome)(self, "foo")
-        self.assertEqual([(outcome, self, "foo")], self.result._events)
-
-
-class TestExtendedToOriginalResultDecorator(
-    TestExtendedToOriginalResultDecoratorBase):
-
-    def test_progress_py26(self):
-        self.make_26_result()
-        self.converter.progress(1, 2)
-
-    def test_progress_py27(self):
-        self.make_27_result()
-        self.converter.progress(1, 2)
-
-    def test_progress_pyextended(self):
-        self.make_extended_result()
-        self.converter.progress(1, 2)
-        self.assertEqual([('progress', 1, 2)], self.result._events)
-
-    def test_shouldStop(self):
-        self.make_26_result()
-        self.assertEqual(False, self.converter.shouldStop)
-        self.converter.decorated.stop()
-        self.assertEqual(True, self.converter.shouldStop)
-
-    def test_startTest_py26(self):
-        self.make_26_result()
-        self.converter.startTest(self)
-        self.assertEqual([('startTest', self)], self.result._events)
-
-    def test_startTest_py27(self):
-        self.make_27_result()
-        self.converter.startTest(self)
-        self.assertEqual([('startTest', self)], self.result._events)
-
-    def test_startTest_pyextended(self):
-        self.make_extended_result()
-        self.converter.startTest(self)
-        self.assertEqual([('startTest', self)], self.result._events)
-
-    def test_startTestRun_py26(self):
-        self.make_26_result()
-        self.converter.startTestRun()
-        self.assertEqual([], self.result._events)
-
-    def test_startTestRun_py27(self):
-        self.make_27_result()
-        self.converter.startTestRun()
-        self.assertEqual([('startTestRun',)], self.result._events)
-
-    def test_startTestRun_pyextended(self):
-        self.make_extended_result()
-        self.converter.startTestRun()
-        self.assertEqual([('startTestRun',)], self.result._events)
-
-    def test_stopTest_py26(self):
-        self.make_26_result()
-        self.converter.stopTest(self)
-        self.assertEqual([('stopTest', self)], self.result._events)
-
-    def test_stopTest_py27(self):
-        self.make_27_result()
-        self.converter.stopTest(self)
-        self.assertEqual([('stopTest', self)], self.result._events)
-
-    def test_stopTest_pyextended(self):
-        self.make_extended_result()
-        self.converter.stopTest(self)
-        self.assertEqual([('stopTest', self)], self.result._events)
-
-    def test_stopTestRun_py26(self):
-        self.make_26_result()
-        self.converter.stopTestRun()
-        self.assertEqual([], self.result._events)
-
-    def test_stopTestRun_py27(self):
-        self.make_27_result()
-        self.converter.stopTestRun()
-        self.assertEqual([('stopTestRun',)], self.result._events)
-
-    def test_stopTestRun_pyextended(self):
-        self.make_extended_result()
-        self.converter.stopTestRun()
-        self.assertEqual([('stopTestRun',)], self.result._events)
-
-    def test_tags_py26(self):
-        self.make_26_result()
-        self.converter.tags(1, 2)
-
-    def test_tags_py27(self):
-        self.make_27_result()
-        self.converter.tags(1, 2)
-
-    def test_tags_pyextended(self):
-        self.make_extended_result()
-        self.converter.tags(1, 2)
-        self.assertEqual([('tags', 1, 2)], self.result._events)
-
-    def test_time_py26(self):
-        self.make_26_result()
-        self.converter.time(1)
-
-    def test_time_py27(self):
-        self.make_27_result()
-        self.converter.time(1)
-
-    def test_time_pyextended(self):
-        self.make_extended_result()
-        self.converter.time(1)
-        self.assertEqual([('time', 1)], self.result._events)
-
-
-class TestExtendedToOriginalAddError(TestExtendedToOriginalResultDecoratorBase):
-
-    outcome = 'addError'
-
-    def test_outcome_Original_py26(self):
-        self.make_26_result()
-        self.check_outcome_exc_info(self.outcome)
-
-    def test_outcome_Original_py27(self):
-        self.make_27_result()
-        self.check_outcome_exc_info(self.outcome)
-
-    def test_outcome_Original_pyextended(self):
-        self.make_extended_result()
-        self.check_outcome_exc_info(self.outcome)
-
-    def test_outcome_Extended_py26(self):
-        self.make_26_result()
-        self.check_outcome_details_to_exec_info(self.outcome)
-
-    def test_outcome_Extended_py27(self):
-        self.make_27_result()
-        self.check_outcome_details_to_exec_info(self.outcome)
-
-    def test_outcome_Extended_pyextended(self):
-        self.make_extended_result()
-        self.check_outcome_details(self.outcome)
-
-    def test_outcome__no_details(self):
-        self.make_extended_result()
-        self.assertThat(
-            lambda: getattr(self.converter, self.outcome)(self),
-            Raises(MatchesException(ValueError)))
-
-
-class TestExtendedToOriginalAddFailure(
-    TestExtendedToOriginalAddError):
-
-    outcome = 'addFailure'
-
-
-class TestExtendedToOriginalAddExpectedFailure(
-    TestExtendedToOriginalAddError):
-
-    outcome = 'addExpectedFailure'
-
-    def test_outcome_Original_py26(self):
-        self.make_26_result()
-        self.check_outcome_exc_info_to_nothing(self.outcome, 'addSuccess')
-
-    def test_outcome_Extended_py26(self):
-        self.make_26_result()
-        self.check_outcome_details_to_nothing(self.outcome, 'addSuccess')
-
-
-
-class TestExtendedToOriginalAddSkip(
-    TestExtendedToOriginalResultDecoratorBase):
-
-    outcome = 'addSkip'
-
-    def test_outcome_Original_py26(self):
-        self.make_26_result()
-        self.check_outcome_string_nothing(self.outcome, 'addSuccess')
-
-    def test_outcome_Original_py27(self):
-        self.make_27_result()
-        self.check_outcome_string(self.outcome)
-
-    def test_outcome_Original_pyextended(self):
-        self.make_extended_result()
-        self.check_outcome_string(self.outcome)
-
-    def test_outcome_Extended_py26(self):
-        self.make_26_result()
-        self.check_outcome_string_nothing(self.outcome, 'addSuccess')
-
-    def test_outcome_Extended_py27_no_reason(self):
-        self.make_27_result()
-        self.check_outcome_details_to_string(self.outcome)
-
-    def test_outcome_Extended_py27_reason(self):
-        self.make_27_result()
-        self.check_outcome_details_to_arg(self.outcome, 'foo',
-            {'reason': Content(UTF8_TEXT, lambda:[_b('foo')])})
-
-    def test_outcome_Extended_pyextended(self):
-        self.make_extended_result()
-        self.check_outcome_details(self.outcome)
-
-    def test_outcome__no_details(self):
-        self.make_extended_result()
-        self.assertThat(
-            lambda: getattr(self.converter, self.outcome)(self),
-            Raises(MatchesException(ValueError)))
-
-
-class TestExtendedToOriginalAddSuccess(
-    TestExtendedToOriginalResultDecoratorBase):
-
-    outcome = 'addSuccess'
-    expected = 'addSuccess'
-
-    def test_outcome_Original_py26(self):
-        self.make_26_result()
-        self.check_outcome_nothing(self.outcome, self.expected)
-
-    def test_outcome_Original_py27(self):
-        self.make_27_result()
-        self.check_outcome_nothing(self.outcome)
-
-    def test_outcome_Original_pyextended(self):
-        self.make_extended_result()
-        self.check_outcome_nothing(self.outcome)
-
-    def test_outcome_Extended_py26(self):
-        self.make_26_result()
-        self.check_outcome_details_to_nothing(self.outcome, self.expected)
-
-    def test_outcome_Extended_py27(self):
-        self.make_27_result()
-        self.check_outcome_details_to_nothing(self.outcome)
-
-    def test_outcome_Extended_pyextended(self):
-        self.make_extended_result()
-        self.check_outcome_details(self.outcome)
-
-
-class TestExtendedToOriginalAddUnexpectedSuccess(
-    TestExtendedToOriginalResultDecoratorBase):
-
-    outcome = 'addUnexpectedSuccess'
-    expected = 'addFailure'
-
-    def test_outcome_Original_py26(self):
-        self.make_26_result()
-        getattr(self.converter, self.outcome)(self)
-        [event] = self.result._events
-        self.assertEqual((self.expected, self), event[:2])
-
-    def test_outcome_Original_py27(self):
-        self.make_27_result()
-        self.check_outcome_nothing(self.outcome)
-
-    def test_outcome_Original_pyextended(self):
-        self.make_extended_result()
-        self.check_outcome_nothing(self.outcome)
-
-    def test_outcome_Extended_py26(self):
-        self.make_26_result()
-        getattr(self.converter, self.outcome)(self)
-        [event] = self.result._events
-        self.assertEqual((self.expected, self), event[:2])
-
-    def test_outcome_Extended_py27(self):
-        self.make_27_result()
-        self.check_outcome_details_to_nothing(self.outcome)
-
-    def test_outcome_Extended_pyextended(self):
-        self.make_extended_result()
-        self.check_outcome_details(self.outcome)
-
-
-class TestExtendedToOriginalResultOtherAttributes(
-    TestExtendedToOriginalResultDecoratorBase):
-
-    def test_other_attribute(self):
-        class OtherExtendedResult:
-            def foo(self):
-                return 2
-            bar = 1
-        self.result = OtherExtendedResult()
-        self.make_converter()
-        self.assertEqual(1, self.converter.bar)
-        self.assertEqual(2, self.converter.foo())
-
-
-class TestNonAsciiResults(TestCase):
-    """Test all kinds of tracebacks are cleanly interpreted as unicode
-
-    Currently only uses weak "contains" assertions, would be good to be much
-    stricter about the expected output. This would add a few failures for the
-    current release of IronPython for instance, which gets some traceback
-    lines muddled.
-    """
-
-    _sample_texts = (
-        _u("pa\u026a\u03b8\u0259n"), # Unicode encodings only
-        _u("\u5357\u7121"), # In ISO 2022 encodings
-        _u("\xa7\xa7\xa7"), # In ISO 8859 encodings
-        )
-    # Everything but Jython shows syntax errors on the current character
-    _error_on_character = os.name != "java"
-
-    def _run(self, stream, test):
-        """Run the test, the same as in testtools.run but not to stdout"""
-        result = TextTestResult(stream)
-        result.startTestRun()
-        try:
-            return test.run(result)
-        finally:
-            result.stopTestRun()
-
-    def _write_module(self, name, encoding, contents):
-        """Create Python module on disk with contents in given encoding"""
-        try:
-            # Need to pre-check that the coding is valid or codecs.open drops
-            # the file without closing it which breaks non-refcounted pythons
-            codecs.lookup(encoding)
-        except LookupError:
-            self.skip("Encoding unsupported by implementation: %r" % encoding)
-        f = codecs.open(os.path.join(self.dir, name + ".py"), "w", encoding)
-        try:
-            f.write(contents)
-        finally:
-            f.close()
-
-    def _test_external_case(self, testline, coding="ascii", modulelevel="",
-            suffix=""):
-        """Create and run a test case in a seperate module"""
-        self._setup_external_case(testline, coding, modulelevel, suffix)
-        return self._run_external_case()
-
-    def _setup_external_case(self, testline, coding="ascii", modulelevel="",
-            suffix=""):
-        """Create a test case in a seperate module"""
-        _, prefix, self.modname = self.id().rsplit(".", 2)
-        self.dir = tempfile.mkdtemp(prefix=prefix, suffix=suffix)
-        self.addCleanup(shutil.rmtree, self.dir)
-        self._write_module(self.modname, coding,
-            # Older Python 2 versions don't see a coding declaration in a
-            # docstring so it has to be in a comment, but then we can't
-            # workaround bug: <http://ironpython.codeplex.com/workitem/26940>
-            "# coding: %s\n"
-            "import testtools\n"
-            "%s\n"
-            "class Test(testtools.TestCase):\n"
-            "    def runTest(self):\n"
-            "        %s\n" % (coding, modulelevel, testline))
-
-    def _run_external_case(self):
-        """Run the prepared test case in a seperate module"""
-        sys.path.insert(0, self.dir)
-        self.addCleanup(sys.path.remove, self.dir)
-        module = __import__(self.modname)
-        self.addCleanup(sys.modules.pop, self.modname)
-        stream = StringIO()
-        self._run(stream, module.Test())
-        return stream.getvalue()
-
-    def _silence_deprecation_warnings(self):
-        """Shut up DeprecationWarning for this test only"""
-        warnings.simplefilter("ignore", DeprecationWarning)
-        self.addCleanup(warnings.filters.remove, warnings.filters[0])
-
-    def _get_sample_text(self, encoding="unicode_internal"):
-        if encoding is None and str_is_unicode:
-           encoding = "unicode_internal"
-        for u in self._sample_texts:
-            try:
-                b = u.encode(encoding)
-                if u == b.decode(encoding):
-                   if str_is_unicode:
-                       return u, u
-                   return u, b
-            except (LookupError, UnicodeError):
-                pass
-        self.skip("Could not find a sample text for encoding: %r" % encoding)
-
-    def _as_output(self, text):
-        return text
-
-    def test_non_ascii_failure_string(self):
-        """Assertion contents can be non-ascii and should get decoded"""
-        text, raw = self._get_sample_text(_get_exception_encoding())
-        textoutput = self._test_external_case("self.fail(%s)" % _r(raw))
-        self.assertIn(self._as_output(text), textoutput)
-
-    def test_non_ascii_failure_string_via_exec(self):
-        """Assertion via exec can be non-ascii and still gets decoded"""
-        text, raw = self._get_sample_text(_get_exception_encoding())
-        textoutput = self._test_external_case(
-            testline='exec ("self.fail(%s)")' % _r(raw))
-        self.assertIn(self._as_output(text), textoutput)
-
-    def test_control_characters_in_failure_string(self):
-        """Control characters in assertions should be escaped"""
-        textoutput = self._test_external_case("self.fail('\\a\\a\\a')")
-        self.expectFailure("Defense against the beeping horror unimplemented",
-            self.assertNotIn, self._as_output("\a\a\a"), textoutput)
-        self.assertIn(self._as_output(_u("\uFFFD\uFFFD\uFFFD")), textoutput)
-
-    def test_os_error(self):
-        """Locale error messages from the OS shouldn't break anything"""
-        textoutput = self._test_external_case(
-            modulelevel="import os",
-            testline="os.mkdir('/')")
-        if os.name != "nt" or sys.version_info < (2, 5):
-            self.assertIn(self._as_output("OSError: "), textoutput)
-        else:
-            self.assertIn(self._as_output("WindowsError: "), textoutput)
-
-    def test_assertion_text_shift_jis(self):
-        """A terminal raw backslash in an encoded string is weird but fine"""
-        example_text = _u("\u5341")
-        textoutput = self._test_external_case(
-            coding="shift_jis",
-            testline="self.fail('%s')" % example_text)
-        if str_is_unicode:
-            output_text = example_text
-        else:
-            output_text = example_text.encode("shift_jis").decode(
-                _get_exception_encoding(), "replace")
-        self.assertIn(self._as_output("AssertionError: %s" % output_text),
-            textoutput)
-
-    def test_file_comment_iso2022_jp(self):
-        """Control character escapes must be preserved if valid encoding"""
-        example_text, _ = self._get_sample_text("iso2022_jp")
-        textoutput = self._test_external_case(
-            coding="iso2022_jp",
-            testline="self.fail('Simple') # %s" % example_text)
-        self.assertIn(self._as_output(example_text), textoutput)
-
-    def test_unicode_exception(self):
-        """Exceptions that can be formated losslessly as unicode should be"""
-        example_text, _ = self._get_sample_text()
-        exception_class = (
-            "class FancyError(Exception):\n"
-            # A __unicode__ method does nothing on py3k but the default works
-            "    def __unicode__(self):\n"
-            "        return self.args[0]\n")
-        textoutput = self._test_external_case(
-            modulelevel=exception_class,
-            testline="raise FancyError(%s)" % _r(example_text))
-        self.assertIn(self._as_output(example_text), textoutput)
-
-    def test_unprintable_exception(self):
-        """A totally useless exception instance still prints something"""
-        exception_class = (
-            "class UnprintableError(Exception):\n"
-            "    def __str__(self):\n"
-            "        raise RuntimeError\n"
-            "    def __unicode__(self):\n"
-            "        raise RuntimeError\n"
-            "    def __repr__(self):\n"
-            "        raise RuntimeError\n")
-        textoutput = self._test_external_case(
-            modulelevel=exception_class,
-            testline="raise UnprintableError")
-        self.assertIn(self._as_output(
-            "UnprintableError: <unprintable UnprintableError object>\n"),
-            textoutput)
-
-    def test_string_exception(self):
-        """Raise a string rather than an exception instance if supported"""
-        if sys.version_info > (2, 6):
-            self.skip("No string exceptions in Python 2.6 or later")
-        elif sys.version_info > (2, 5):
-            self._silence_deprecation_warnings()
-        textoutput = self._test_external_case(testline="raise 'plain str'")
-        self.assertIn(self._as_output("\nplain str\n"), textoutput)
-
-    def test_non_ascii_dirname(self):
-        """Script paths in the traceback can be non-ascii"""
-        text, raw = self._get_sample_text(sys.getfilesystemencoding())
-        textoutput = self._test_external_case(
-            # Avoid bug in Python 3 by giving a unicode source encoding rather
-            # than just ascii which raises a SyntaxError with no other details
-            coding="utf-8",
-            testline="self.fail('Simple')",
-            suffix=raw)
-        self.assertIn(self._as_output(text), textoutput)
-
-    def test_syntax_error(self):
-        """Syntax errors should still have fancy special-case formatting"""
-        textoutput = self._test_external_case("exec ('f(a, b c)')")
-        self.assertIn(self._as_output(
-            '  File "<string>", line 1\n'
-            '    f(a, b c)\n'
-            + ' ' * self._error_on_character +
-            '          ^\n'
-            'SyntaxError: '
-            ), textoutput)
-
-    def test_syntax_error_malformed(self):
-        """Syntax errors with bogus parameters should break anything"""
-        textoutput = self._test_external_case("raise SyntaxError(3, 2, 1)")
-        self.assertIn(self._as_output("\nSyntaxError: "), textoutput)
-
-    def test_syntax_error_import_binary(self):
-        """Importing a binary file shouldn't break SyntaxError formatting"""
-        if sys.version_info < (2, 5):
-            # Python 2.4 assumes the file is latin-1 and tells you off
-            self._silence_deprecation_warnings()
-        self._setup_external_case("import bad")
-        f = open(os.path.join(self.dir, "bad.py"), "wb")
-        try:
-            f.write(_b("x\x9c\xcb*\xcd\xcb\x06\x00\x04R\x01\xb9"))
-        finally:
-            f.close()
-        textoutput = self._run_external_case()
-        self.assertIn(self._as_output("\nSyntaxError: "), textoutput)
-
-    def test_syntax_error_line_iso_8859_1(self):
-        """Syntax error on a latin-1 line shows the line decoded"""
-        text, raw = self._get_sample_text("iso-8859-1")
-        textoutput = self._setup_external_case("import bad")
-        self._write_module("bad", "iso-8859-1",
-            "# coding: iso-8859-1\n! = 0 # %s\n" % text)
-        textoutput = self._run_external_case()
-        self.assertIn(self._as_output(_u(
-            #'bad.py", line 2\n'
-            '    ! = 0 # %s\n'
-            '    ^\n'
-            'SyntaxError: ') %
-            (text,)), textoutput)
-
-    def test_syntax_error_line_iso_8859_5(self):
-        """Syntax error on a iso-8859-5 line shows the line decoded"""
-        text, raw = self._get_sample_text("iso-8859-5")
-        textoutput = self._setup_external_case("import bad")
-        self._write_module("bad", "iso-8859-5",
-            "# coding: iso-8859-5\n%% = 0 # %s\n" % text)
-        textoutput = self._run_external_case()
-        self.assertIn(self._as_output(_u(
-            #'bad.py", line 2\n'
-            '    %% = 0 # %s\n'
-            + ' ' * self._error_on_character +
-            '   ^\n'
-            'SyntaxError: ') %
-            (text,)), textoutput)
-
-    def test_syntax_error_line_euc_jp(self):
-        """Syntax error on a euc_jp line shows the line decoded"""
-        text, raw = self._get_sample_text("euc_jp")
-        textoutput = self._setup_external_case("import bad")
-        self._write_module("bad", "euc_jp",
-            "# coding: euc_jp\n$ = 0 # %s\n" % text)
-        textoutput = self._run_external_case()
-        self.assertIn(self._as_output(_u(
-            #'bad.py", line 2\n'
-            '    $ = 0 # %s\n'
-            + ' ' * self._error_on_character +
-            '   ^\n'
-            'SyntaxError: ') %
-            (text,)), textoutput)
-
-    def test_syntax_error_line_utf_8(self):
-        """Syntax error on a utf-8 line shows the line decoded"""
-        text, raw = self._get_sample_text("utf-8")
-        textoutput = self._setup_external_case("import bad")
-        self._write_module("bad", "utf-8", _u("\ufeff^ = 0 # %s\n") % text)
-        textoutput = self._run_external_case()
-        self.assertIn(self._as_output(_u(
-            'bad.py", line 1\n'
-            '    ^ = 0 # %s\n'
-            + ' ' * self._error_on_character +
-            '   ^\n'
-            'SyntaxError: ') %
-            text), textoutput)
-
-
-class TestNonAsciiResultsWithUnittest(TestNonAsciiResults):
-    """Test that running under unittest produces clean ascii strings"""
-
-    def _run(self, stream, test):
-        from unittest import TextTestRunner as _Runner
-        return _Runner(stream).run(test)
-
-    def _as_output(self, text):
-        if str_is_unicode:
-            return text
-        return text.encode("utf-8")
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_testsuite.py'
--- python-for-subunit2junitxml/testtools/tests/test_testsuite.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_testsuite.py	1970-01-01 00:00:00 +0000
@@ -1,53 +0,0 @@
-# Copyright (c) 2009 testtools developers. See LICENSE for details.
-
-"""Test ConcurrentTestSuite and related things."""
-
-__metaclass__ = type
-
-import datetime
-import unittest
-
-from testtools import (
-    ConcurrentTestSuite,
-    iterate_tests,
-    TestCase,
-    )
-from testtools.matchers import (
-    Equals,
-    )
-from testtools.tests.helpers import LoggingResult
-
-
-class TestConcurrentTestSuiteRun(TestCase):
-
-    def test_trivial(self):
-        log = []
-        result = LoggingResult(log)
-        class Sample(TestCase):
-            def __hash__(self):
-                return id(self)
-
-            def test_method1(self):
-                pass
-            def test_method2(self):
-                pass
-        test1 = Sample('test_method1')
-        test2 = Sample('test_method2')
-        original_suite = unittest.TestSuite([test1, test2])
-        suite = ConcurrentTestSuite(original_suite, self.split_suite)
-        suite.run(result)
-        # 0 is the timestamp for the first test starting.
-        test1 = log[1][1]
-        test2 = log[-1][1]
-        self.assertIsInstance(test1, Sample)
-        self.assertIsInstance(test2, Sample)
-        self.assertNotEqual(test1.id(), test2.id())
-
-    def split_suite(self, suite):
-        tests = list(iterate_tests(suite))
-        return tests[0], tests[1]
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_testtools.py'
--- python-for-subunit2junitxml/testtools/tests/test_testtools.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_testtools.py	1970-01-01 00:00:00 +0000
@@ -1,1143 +0,0 @@
-# Copyright (c) 2008-2010 testtools developers. See LICENSE for details.
-
-"""Tests for extensions to the base test library."""
-
-from pprint import pformat
-import os
-import sys
-import tempfile
-import unittest
-
-from testtools import (
-    ErrorHolder,
-    MultipleExceptions,
-    PlaceHolder,
-    TestCase,
-    clone_test_with_new_id,
-    content,
-    skip,
-    skipIf,
-    skipUnless,
-    testcase,
-    )
-from testtools.matchers import (
-    Equals,
-    MatchesException,
-    Raises,
-    )
-from testtools.tests.helpers import (
-    an_exc_info,
-    LoggingResult,
-    Python26TestResult,
-    Python27TestResult,
-    ExtendedTestResult,
-    )
-try:
-    exec('from __future__ import with_statement')
-except SyntaxError:
-    pass
-else:
-    from test_with_with import *
-
-
-class TestPlaceHolder(TestCase):
-
-    def makePlaceHolder(self, test_id="foo", short_description=None):
-        return PlaceHolder(test_id, short_description)
-
-    def test_id_comes_from_constructor(self):
-        # The id() of a PlaceHolder is whatever you pass into the constructor.
-        test = PlaceHolder("test id")
-        self.assertEqual("test id", test.id())
-
-    def test_shortDescription_is_id(self):
-        # The shortDescription() of a PlaceHolder is the id, by default.
-        test = PlaceHolder("test id")
-        self.assertEqual(test.id(), test.shortDescription())
-
-    def test_shortDescription_specified(self):
-        # If a shortDescription is provided to the constructor, then
-        # shortDescription() returns that instead.
-        test = PlaceHolder("test id", "description")
-        self.assertEqual("description", test.shortDescription())
-
-    def test_repr_just_id(self):
-        # repr(placeholder) shows you how the object was constructed.
-        test = PlaceHolder("test id")
-        self.assertEqual(
-            "<testtools.testcase.PlaceHolder(%s)>" % repr(test.id()),
-            repr(test))
-
-    def test_repr_with_description(self):
-        # repr(placeholder) shows you how the object was constructed.
-        test = PlaceHolder("test id", "description")
-        self.assertEqual(
-            "<testtools.testcase.PlaceHolder(%r, %r)>" % (
-                test.id(), test.shortDescription()),
-            repr(test))
-
-    def test_counts_as_one_test(self):
-        # A placeholder test counts as one test.
-        test = self.makePlaceHolder()
-        self.assertEqual(1, test.countTestCases())
-
-    def test_str_is_id(self):
-        # str(placeholder) is always the id(). We are not barbarians.
-        test = self.makePlaceHolder()
-        self.assertEqual(test.id(), str(test))
-
-    def test_runs_as_success(self):
-        # When run, a PlaceHolder test records a success.
-        test = self.makePlaceHolder()
-        log = []
-        test.run(LoggingResult(log))
-        self.assertEqual(
-            [('startTest', test), ('addSuccess', test), ('stopTest', test)],
-            log)
-
-    def test_call_is_run(self):
-        # A PlaceHolder can be called, in which case it behaves like run.
-        test = self.makePlaceHolder()
-        run_log = []
-        test.run(LoggingResult(run_log))
-        call_log = []
-        test(LoggingResult(call_log))
-        self.assertEqual(run_log, call_log)
-
-    def test_runs_without_result(self):
-        # A PlaceHolder can be run without a result, in which case there's no
-        # way to actually get at the result.
-        self.makePlaceHolder().run()
-
-    def test_debug(self):
-        # A PlaceHolder can be debugged.
-        self.makePlaceHolder().debug()
-
-
-class TestErrorHolder(TestCase):
-
-    def makeException(self):
-        try:
-            raise RuntimeError("danger danger")
-        except:
-            return sys.exc_info()
-
-    def makePlaceHolder(self, test_id="foo", error=None,
-                        short_description=None):
-        if error is None:
-            error = self.makeException()
-        return ErrorHolder(test_id, error, short_description)
-
-    def test_id_comes_from_constructor(self):
-        # The id() of a PlaceHolder is whatever you pass into the constructor.
-        test = ErrorHolder("test id", self.makeException())
-        self.assertEqual("test id", test.id())
-
-    def test_shortDescription_is_id(self):
-        # The shortDescription() of a PlaceHolder is the id, by default.
-        test = ErrorHolder("test id", self.makeException())
-        self.assertEqual(test.id(), test.shortDescription())
-
-    def test_shortDescription_specified(self):
-        # If a shortDescription is provided to the constructor, then
-        # shortDescription() returns that instead.
-        test = ErrorHolder("test id", self.makeException(), "description")
-        self.assertEqual("description", test.shortDescription())
-
-    def test_repr_just_id(self):
-        # repr(placeholder) shows you how the object was constructed.
-        error = self.makeException()
-        test = ErrorHolder("test id", error)
-        self.assertEqual(
-            "<testtools.testcase.ErrorHolder(%r, %r)>" % (test.id(), error),
-            repr(test))
-
-    def test_repr_with_description(self):
-        # repr(placeholder) shows you how the object was constructed.
-        error = self.makeException()
-        test = ErrorHolder("test id", error, "description")
-        self.assertEqual(
-            "<testtools.testcase.ErrorHolder(%r, %r, %r)>" % (
-                test.id(), error, test.shortDescription()),
-            repr(test))
-
-    def test_counts_as_one_test(self):
-        # A placeholder test counts as one test.
-        test = self.makePlaceHolder()
-        self.assertEqual(1, test.countTestCases())
-
-    def test_str_is_id(self):
-        # str(placeholder) is always the id(). We are not barbarians.
-        test = self.makePlaceHolder()
-        self.assertEqual(test.id(), str(test))
-
-    def test_runs_as_error(self):
-        # When run, a PlaceHolder test records a success.
-        error = self.makeException()
-        test = self.makePlaceHolder(error=error)
-        log = []
-        test.run(LoggingResult(log))
-        self.assertEqual(
-            [('startTest', test),
-             ('addError', test, error),
-             ('stopTest', test)], log)
-
-    def test_call_is_run(self):
-        # A PlaceHolder can be called, in which case it behaves like run.
-        test = self.makePlaceHolder()
-        run_log = []
-        test.run(LoggingResult(run_log))
-        call_log = []
-        test(LoggingResult(call_log))
-        self.assertEqual(run_log, call_log)
-
-    def test_runs_without_result(self):
-        # A PlaceHolder can be run without a result, in which case there's no
-        # way to actually get at the result.
-        self.makePlaceHolder().run()
-
-    def test_debug(self):
-        # A PlaceHolder can be debugged.
-        self.makePlaceHolder().debug()
-
-
-class TestEquality(TestCase):
-    """Test ``TestCase``'s equality implementation."""
-
-    def test_identicalIsEqual(self):
-        # TestCase's are equal if they are identical.
-        self.assertEqual(self, self)
-
-    def test_nonIdenticalInUnequal(self):
-        # TestCase's are not equal if they are not identical.
-        self.assertNotEqual(TestCase(methodName='run'),
-            TestCase(methodName='skip'))
-
-
-class TestAssertions(TestCase):
-    """Test assertions in TestCase."""
-
-    def raiseError(self, exceptionFactory, *args, **kwargs):
-        raise exceptionFactory(*args, **kwargs)
-
-    def test_formatTypes_single(self):
-        # Given a single class, _formatTypes returns the name.
-        class Foo(object):
-            pass
-        self.assertEqual('Foo', self._formatTypes(Foo))
-
-    def test_formatTypes_multiple(self):
-        # Given multiple types, _formatTypes returns the names joined by
-        # commas.
-        class Foo(object):
-            pass
-        class Bar(object):
-            pass
-        self.assertEqual('Foo, Bar', self._formatTypes([Foo, Bar]))
-
-    def test_assertRaises(self):
-        # assertRaises asserts that a callable raises a particular exception.
-        self.assertRaises(RuntimeError, self.raiseError, RuntimeError)
-
-    def test_assertRaises_fails_when_no_error_raised(self):
-        # assertRaises raises self.failureException when it's passed a
-        # callable that raises no error.
-        ret = ('orange', 42)
-        try:
-            self.assertRaises(RuntimeError, lambda: ret)
-        except self.failureException:
-            # We expected assertRaises to raise this exception.
-            e = sys.exc_info()[1]
-            self.assertEqual(
-                '%s not raised, %r returned instead.'
-                % (self._formatTypes(RuntimeError), ret), str(e))
-        else:
-            self.fail('Expected assertRaises to fail, but it did not.')
-
-    def test_assertRaises_fails_when_different_error_raised(self):
-        # assertRaises re-raises an exception that it didn't expect.
-        self.assertThat(lambda: self.assertRaises(RuntimeError,
-            self.raiseError, ZeroDivisionError),
-            Raises(MatchesException(ZeroDivisionError)))
-
-    def test_assertRaises_returns_the_raised_exception(self):
-        # assertRaises returns the exception object that was raised. This is
-        # useful for testing that exceptions have the right message.
-
-        # This contraption stores the raised exception, so we can compare it
-        # to the return value of assertRaises.
-        raisedExceptions = []
-        def raiseError():
-            try:
-                raise RuntimeError('Deliberate error')
-            except RuntimeError:
-                raisedExceptions.append(sys.exc_info()[1])
-                raise
-
-        exception = self.assertRaises(RuntimeError, raiseError)
-        self.assertEqual(1, len(raisedExceptions))
-        self.assertTrue(
-            exception is raisedExceptions[0],
-            "%r is not %r" % (exception, raisedExceptions[0]))
-
-    def test_assertRaises_with_multiple_exceptions(self):
-        # assertRaises((ExceptionOne, ExceptionTwo), function) asserts that
-        # function raises one of ExceptionTwo or ExceptionOne.
-        expectedExceptions = (RuntimeError, ZeroDivisionError)
-        self.assertRaises(
-            expectedExceptions, self.raiseError, expectedExceptions[0])
-        self.assertRaises(
-            expectedExceptions, self.raiseError, expectedExceptions[1])
-
-    def test_assertRaises_with_multiple_exceptions_failure_mode(self):
-        # If assertRaises is called expecting one of a group of exceptions and
-        # a callable that doesn't raise an exception, then fail with an
-        # appropriate error message.
-        expectedExceptions = (RuntimeError, ZeroDivisionError)
-        failure = self.assertRaises(
-            self.failureException,
-            self.assertRaises, expectedExceptions, lambda: None)
-        self.assertEqual(
-            '%s not raised, None returned instead.'
-            % self._formatTypes(expectedExceptions), str(failure))
-
-    def assertFails(self, message, function, *args, **kwargs):
-        """Assert that function raises a failure with the given message."""
-        failure = self.assertRaises(
-            self.failureException, function, *args, **kwargs)
-        self.assertEqual(message, str(failure))
-
-    def test_assertIn_success(self):
-        # assertIn(needle, haystack) asserts that 'needle' is in 'haystack'.
-        self.assertIn(3, range(10))
-        self.assertIn('foo', 'foo bar baz')
-        self.assertIn('foo', 'foo bar baz'.split())
-
-    def test_assertIn_failure(self):
-        # assertIn(needle, haystack) fails the test when 'needle' is not in
-        # 'haystack'.
-        self.assertFails('3 not in [0, 1, 2]', self.assertIn, 3, [0, 1, 2])
-        self.assertFails(
-            '%r not in %r' % ('qux', 'foo bar baz'),
-            self.assertIn, 'qux', 'foo bar baz')
-
-    def test_assertNotIn_success(self):
-        # assertNotIn(needle, haystack) asserts that 'needle' is not in
-        # 'haystack'.
-        self.assertNotIn(3, [0, 1, 2])
-        self.assertNotIn('qux', 'foo bar baz')
-
-    def test_assertNotIn_failure(self):
-        # assertNotIn(needle, haystack) fails the test when 'needle' is in
-        # 'haystack'.
-        self.assertFails('3 in [1, 2, 3]', self.assertNotIn, 3, [1, 2, 3])
-        self.assertFails(
-            '%r in %r' % ('foo', 'foo bar baz'),
-            self.assertNotIn, 'foo', 'foo bar baz')
-
-    def test_assertIsInstance(self):
-        # assertIsInstance asserts that an object is an instance of a class.
-
-        class Foo(object):
-            """Simple class for testing assertIsInstance."""
-
-        foo = Foo()
-        self.assertIsInstance(foo, Foo)
-
-    def test_assertIsInstance_multiple_classes(self):
-        # assertIsInstance asserts that an object is an instance of one of a
-        # group of classes.
-
-        class Foo(object):
-            """Simple class for testing assertIsInstance."""
-
-        class Bar(object):
-            """Another simple class for testing assertIsInstance."""
-
-        foo = Foo()
-        self.assertIsInstance(foo, (Foo, Bar))
-        self.assertIsInstance(Bar(), (Foo, Bar))
-
-    def test_assertIsInstance_failure(self):
-        # assertIsInstance(obj, klass) fails the test when obj is not an
-        # instance of klass.
-
-        class Foo(object):
-            """Simple class for testing assertIsInstance."""
-
-        self.assertFails(
-            '42 is not an instance of %s' % self._formatTypes(Foo),
-            self.assertIsInstance, 42, Foo)
-
-    def test_assertIsInstance_failure_multiple_classes(self):
-        # assertIsInstance(obj, (klass1, klass2)) fails the test when obj is
-        # not an instance of klass1 or klass2.
-
-        class Foo(object):
-            """Simple class for testing assertIsInstance."""
-
-        class Bar(object):
-            """Another simple class for testing assertIsInstance."""
-
-        self.assertFails(
-            '42 is not an instance of %s' % self._formatTypes([Foo, Bar]),
-            self.assertIsInstance, 42, (Foo, Bar))
-
-    def test_assertIsInstance_overridden_message(self):
-        # assertIsInstance(obj, klass, msg) permits a custom message.
-        self.assertFails("foo", self.assertIsInstance, 42, str, "foo")
-
-    def test_assertIs(self):
-        # assertIs asserts that an object is identical to another object.
-        self.assertIs(None, None)
-        some_list = [42]
-        self.assertIs(some_list, some_list)
-        some_object = object()
-        self.assertIs(some_object, some_object)
-
-    def test_assertIs_fails(self):
-        # assertIs raises assertion errors if one object is not identical to
-        # another.
-        self.assertFails('None is not 42', self.assertIs, None, 42)
-        self.assertFails('[42] is not [42]', self.assertIs, [42], [42])
-
-    def test_assertIs_fails_with_message(self):
-        # assertIs raises assertion errors if one object is not identical to
-        # another, and includes a user-supplied message, if it's provided.
-        self.assertFails(
-            'None is not 42: foo bar', self.assertIs, None, 42, 'foo bar')
-
-    def test_assertIsNot(self):
-        # assertIsNot asserts that an object is not identical to another
-        # object.
-        self.assertIsNot(None, 42)
-        self.assertIsNot([42], [42])
-        self.assertIsNot(object(), object())
-
-    def test_assertIsNot_fails(self):
-        # assertIsNot raises assertion errors if one object is identical to
-        # another.
-        self.assertFails('None is None', self.assertIsNot, None, None)
-        some_list = [42]
-        self.assertFails(
-            '[42] is [42]', self.assertIsNot, some_list, some_list)
-
-    def test_assertIsNot_fails_with_message(self):
-        # assertIsNot raises assertion errors if one object is identical to
-        # another, and includes a user-supplied message if it's provided.
-        self.assertFails(
-            'None is None: foo bar', self.assertIsNot, None, None, "foo bar")
-
-    def test_assertThat_matches_clean(self):
-        class Matcher(object):
-            def match(self, foo):
-                return None
-        self.assertThat("foo", Matcher())
-
-    def test_assertThat_mismatch_raises_description(self):
-        calls = []
-        class Mismatch(object):
-            def __init__(self, thing):
-                self.thing = thing
-            def describe(self):
-                calls.append(('describe_diff', self.thing))
-                return "object is not a thing"
-            def get_details(self):
-                return {}
-        class Matcher(object):
-            def match(self, thing):
-                calls.append(('match', thing))
-                return Mismatch(thing)
-            def __str__(self):
-                calls.append(('__str__',))
-                return "a description"
-        class Test(TestCase):
-            def test(self):
-                self.assertThat("foo", Matcher())
-        result = Test("test").run()
-        self.assertEqual([
-            ('match', "foo"),
-            ('describe_diff', "foo"),
-            ('__str__',),
-            ], calls)
-        self.assertFalse(result.wasSuccessful())
-
-    def test_assertEqual_nice_formatting(self):
-        message = "These things ought not be equal."
-        a = ['apple', 'banana', 'cherry']
-        b = {'Thatcher': 'One who mends roofs of straw',
-             'Major': 'A military officer, ranked below colonel',
-             'Blair': 'To shout loudly',
-             'Brown': 'The colour of healthy human faeces'}
-        expected_error = '\n'.join(
-            [message,
-             'not equal:',
-             'a = %s' % pformat(a),
-             'b = %s' % pformat(b),
-             ''])
-        expected_error = '\n'.join([
-            'Match failed. Matchee: "%r"' % b,
-            'Matcher: Annotate(%r, Equals(%r))' % (message, a),
-            'Difference: !=:',
-            'reference = %s' % pformat(a),
-            'actual = %s' % pformat(b),
-            ': ' + message,
-            ''
-            ])
-        self.assertFails(expected_error, self.assertEqual, a, b, message)
-        self.assertFails(expected_error, self.assertEquals, a, b, message)
-        self.assertFails(expected_error, self.failUnlessEqual, a, b, message)
-
-    def test_assertEqual_formatting_no_message(self):
-        a = "cat"
-        b = "dog"
-        expected_error = '\n'.join([
-            'Match failed. Matchee: "dog"',
-            'Matcher: Equals(\'cat\')',
-            'Difference: \'cat\' != \'dog\'',
-            ''
-            ])
-        self.assertFails(expected_error, self.assertEqual, a, b)
-        self.assertFails(expected_error, self.assertEquals, a, b)
-        self.assertFails(expected_error, self.failUnlessEqual, a, b)
-
-
-class TestAddCleanup(TestCase):
-    """Tests for TestCase.addCleanup."""
-
-    class LoggingTest(TestCase):
-        """A test that logs calls to setUp, runTest and tearDown."""
-
-        def setUp(self):
-            TestCase.setUp(self)
-            self._calls = ['setUp']
-
-        def brokenSetUp(self):
-            # A tearDown that deliberately fails.
-            self._calls = ['brokenSetUp']
-            raise RuntimeError('Deliberate Failure')
-
-        def runTest(self):
-            self._calls.append('runTest')
-
-        def brokenTest(self):
-            raise RuntimeError('Deliberate broken test')
-
-        def tearDown(self):
-            self._calls.append('tearDown')
-            TestCase.tearDown(self)
-
-    def setUp(self):
-        TestCase.setUp(self)
-        self._result_calls = []
-        self.test = TestAddCleanup.LoggingTest('runTest')
-        self.logging_result = LoggingResult(self._result_calls)
-
-    def assertErrorLogEqual(self, messages):
-        self.assertEqual(messages, [call[0] for call in self._result_calls])
-
-    def assertTestLogEqual(self, messages):
-        """Assert that the call log equals 'messages'."""
-        case = self._result_calls[0][1]
-        self.assertEqual(messages, case._calls)
-
-    def logAppender(self, message):
-        """A cleanup that appends 'message' to the tests log.
-
-        Cleanups are callables that are added to a test by addCleanup. To
-        verify that our cleanups run in the right order, we add strings to a
-        list that acts as a log. This method returns a cleanup that will add
-        the given message to that log when run.
-        """
-        self.test._calls.append(message)
-
-    def test_fixture(self):
-        # A normal run of self.test logs 'setUp', 'runTest' and 'tearDown'.
-        # This test doesn't test addCleanup itself, it just sanity checks the
-        # fixture.
-        self.test.run(self.logging_result)
-        self.assertTestLogEqual(['setUp', 'runTest', 'tearDown'])
-
-    def test_cleanup_run_before_tearDown(self):
-        # Cleanup functions added with 'addCleanup' are called before tearDown
-        # runs.
-        self.test.addCleanup(self.logAppender, 'cleanup')
-        self.test.run(self.logging_result)
-        self.assertTestLogEqual(['setUp', 'runTest', 'tearDown', 'cleanup'])
-
-    def test_add_cleanup_called_if_setUp_fails(self):
-        # Cleanup functions added with 'addCleanup' are called even if setUp
-        # fails. Note that tearDown has a different behavior: it is only
-        # called when setUp succeeds.
-        self.test.setUp = self.test.brokenSetUp
-        self.test.addCleanup(self.logAppender, 'cleanup')
-        self.test.run(self.logging_result)
-        self.assertTestLogEqual(['brokenSetUp', 'cleanup'])
-
-    def test_addCleanup_called_in_reverse_order(self):
-        # Cleanup functions added with 'addCleanup' are called in reverse
-        # order.
-        #
-        # One of the main uses of addCleanup is to dynamically create
-        # resources that need some sort of explicit tearDown. Often one
-        # resource will be created in terms of another, e.g.,
-        #     self.first = self.makeFirst()
-        #     self.second = self.makeSecond(self.first)
-        #
-        # When this happens, we generally want to clean up the second resource
-        # before the first one, since the second depends on the first.
-        self.test.addCleanup(self.logAppender, 'first')
-        self.test.addCleanup(self.logAppender, 'second')
-        self.test.run(self.logging_result)
-        self.assertTestLogEqual(
-            ['setUp', 'runTest', 'tearDown', 'second', 'first'])
-
-    def test_tearDown_runs_after_cleanup_failure(self):
-        # tearDown runs even if a cleanup function fails.
-        self.test.addCleanup(lambda: 1/0)
-        self.test.run(self.logging_result)
-        self.assertTestLogEqual(['setUp', 'runTest', 'tearDown'])
-
-    def test_cleanups_continue_running_after_error(self):
-        # All cleanups are always run, even if one or two of them fail.
-        self.test.addCleanup(self.logAppender, 'first')
-        self.test.addCleanup(lambda: 1/0)
-        self.test.addCleanup(self.logAppender, 'second')
-        self.test.run(self.logging_result)
-        self.assertTestLogEqual(
-            ['setUp', 'runTest', 'tearDown', 'second', 'first'])
-
-    def test_error_in_cleanups_are_captured(self):
-        # If a cleanup raises an error, we want to record it and fail the the
-        # test, even though we go on to run other cleanups.
-        self.test.addCleanup(lambda: 1/0)
-        self.test.run(self.logging_result)
-        self.assertErrorLogEqual(['startTest', 'addError', 'stopTest'])
-
-    def test_keyboard_interrupt_not_caught(self):
-        # If a cleanup raises KeyboardInterrupt, it gets reraised.
-        def raiseKeyboardInterrupt():
-            raise KeyboardInterrupt()
-        self.test.addCleanup(raiseKeyboardInterrupt)
-        self.assertThat(lambda:self.test.run(self.logging_result),
-            Raises(MatchesException(KeyboardInterrupt)))
-
-    def test_all_errors_from_MultipleExceptions_reported(self):
-        # When a MultipleExceptions exception is caught, all the errors are
-        # reported.
-        def raiseMany():
-            try:
-                1/0
-            except Exception:
-                exc_info1 = sys.exc_info()
-            try:
-                1/0
-            except Exception:
-                exc_info2 = sys.exc_info()
-            raise MultipleExceptions(exc_info1, exc_info2)
-        self.test.addCleanup(raiseMany)
-        self.logging_result = ExtendedTestResult()
-        self.test.run(self.logging_result)
-        self.assertEqual(['startTest', 'addError', 'stopTest'],
-            [event[0] for event in self.logging_result._events])
-        self.assertEqual(set(['traceback', 'traceback-1']),
-            set(self.logging_result._events[1][2].keys()))
-
-    def test_multipleCleanupErrorsReported(self):
-        # Errors from all failing cleanups are reported as separate backtraces.
-        self.test.addCleanup(lambda: 1/0)
-        self.test.addCleanup(lambda: 1/0)
-        self.logging_result = ExtendedTestResult()
-        self.test.run(self.logging_result)
-        self.assertEqual(['startTest', 'addError', 'stopTest'],
-            [event[0] for event in self.logging_result._events])
-        self.assertEqual(set(['traceback', 'traceback-1']),
-            set(self.logging_result._events[1][2].keys()))
-
-    def test_multipleErrorsCoreAndCleanupReported(self):
-        # Errors from all failing cleanups are reported, with stopTest,
-        # startTest inserted.
-        self.test = TestAddCleanup.LoggingTest('brokenTest')
-        self.test.addCleanup(lambda: 1/0)
-        self.test.addCleanup(lambda: 1/0)
-        self.logging_result = ExtendedTestResult()
-        self.test.run(self.logging_result)
-        self.assertEqual(['startTest', 'addError', 'stopTest'],
-            [event[0] for event in self.logging_result._events])
-        self.assertEqual(set(['traceback', 'traceback-1', 'traceback-2']),
-            set(self.logging_result._events[1][2].keys()))
-
-
-class TestWithDetails(TestCase):
-
-    def assertDetailsProvided(self, case, expected_outcome, expected_keys):
-        """Assert that when case is run, details are provided to the result.
-
-        :param case: A TestCase to run.
-        :param expected_outcome: The call that should be made.
-        :param expected_keys: The keys to look for.
-        """
-        result = ExtendedTestResult()
-        case.run(result)
-        case = result._events[0][1]
-        expected = [
-            ('startTest', case),
-            (expected_outcome, case),
-            ('stopTest', case),
-            ]
-        self.assertEqual(3, len(result._events))
-        self.assertEqual(expected[0], result._events[0])
-        self.assertEqual(expected[1], result._events[1][0:2])
-        # Checking the TB is right is rather tricky. doctest line matching
-        # would help, but 'meh'.
-        self.assertEqual(sorted(expected_keys),
-            sorted(result._events[1][2].keys()))
-        self.assertEqual(expected[-1], result._events[-1])
-
-    def get_content(self):
-        return content.Content(
-            content.ContentType("text", "foo"), lambda: ['foo'])
-
-
-class TestExpectedFailure(TestWithDetails):
-    """Tests for expected failures and unexpected successess."""
-
-    def make_unexpected_case(self):
-        class Case(TestCase):
-            def test(self):
-                raise testcase._UnexpectedSuccess
-        case = Case('test')
-        return case
-
-    def test_raising__UnexpectedSuccess_py27(self):
-        case = self.make_unexpected_case()
-        result = Python27TestResult()
-        case.run(result)
-        case = result._events[0][1]
-        self.assertEqual([
-            ('startTest', case),
-            ('addUnexpectedSuccess', case),
-            ('stopTest', case),
-            ], result._events)
-
-    def test_raising__UnexpectedSuccess_extended(self):
-        case = self.make_unexpected_case()
-        result = ExtendedTestResult()
-        case.run(result)
-        case = result._events[0][1]
-        self.assertEqual([
-            ('startTest', case),
-            ('addUnexpectedSuccess', case, {}),
-            ('stopTest', case),
-            ], result._events)
-
-    def make_xfail_case_xfails(self):
-        content = self.get_content()
-        class Case(TestCase):
-            def test(self):
-                self.addDetail("foo", content)
-                self.expectFailure("we are sad", self.assertEqual,
-                    1, 0)
-        case = Case('test')
-        return case
-
-    def make_xfail_case_succeeds(self):
-        content = self.get_content()
-        class Case(TestCase):
-            def test(self):
-                self.addDetail("foo", content)
-                self.expectFailure("we are sad", self.assertEqual,
-                    1, 1)
-        case = Case('test')
-        return case
-
-    def test_expectFailure_KnownFailure_extended(self):
-        case = self.make_xfail_case_xfails()
-        self.assertDetailsProvided(case, "addExpectedFailure",
-            ["foo", "traceback", "reason"])
-
-    def test_expectFailure_KnownFailure_unexpected_success(self):
-        case = self.make_xfail_case_succeeds()
-        self.assertDetailsProvided(case, "addUnexpectedSuccess",
-            ["foo", "reason"])
-
-
-class TestUniqueFactories(TestCase):
-    """Tests for getUniqueString and getUniqueInteger."""
-
-    def test_getUniqueInteger(self):
-        # getUniqueInteger returns an integer that increments each time you
-        # call it.
-        one = self.getUniqueInteger()
-        self.assertEqual(1, one)
-        two = self.getUniqueInteger()
-        self.assertEqual(2, two)
-
-    def test_getUniqueString(self):
-        # getUniqueString returns the current test id followed by a unique
-        # integer.
-        name_one = self.getUniqueString()
-        self.assertEqual('%s-%d' % (self.id(), 1), name_one)
-        name_two = self.getUniqueString()
-        self.assertEqual('%s-%d' % (self.id(), 2), name_two)
-
-    def test_getUniqueString_prefix(self):
-        # If getUniqueString is given an argument, it uses that argument as
-        # the prefix of the unique string, rather than the test id.
-        name_one = self.getUniqueString('foo')
-        self.assertThat(name_one, Equals('foo-1'))
-        name_two = self.getUniqueString('bar')
-        self.assertThat(name_two, Equals('bar-2'))
-
-
-class TestCloneTestWithNewId(TestCase):
-    """Tests for clone_test_with_new_id."""
-
-    def test_clone_test_with_new_id(self):
-        class FooTestCase(TestCase):
-            def test_foo(self):
-                pass
-        test = FooTestCase('test_foo')
-        oldName = test.id()
-        newName = self.getUniqueString()
-        newTest = clone_test_with_new_id(test, newName)
-        self.assertEqual(newName, newTest.id())
-        self.assertEqual(oldName, test.id(),
-            "the original test instance should be unchanged.")
-
-    def test_cloned_testcase_does_not_share_details(self):
-        """A cloned TestCase does not share the details dict."""
-        class Test(TestCase):
-            def test_foo(self):
-                self.addDetail(
-                    'foo', content.Content('text/plain', lambda: 'foo'))
-        orig_test = Test('test_foo')
-        cloned_test = clone_test_with_new_id(orig_test, self.getUniqueString())
-        orig_test.run(unittest.TestResult())
-        self.assertEqual('foo', orig_test.getDetails()['foo'].iter_bytes())
-        self.assertEqual(None, cloned_test.getDetails().get('foo'))
-
-
-class TestDetailsProvided(TestWithDetails):
-
-    def test_addDetail(self):
-        mycontent = self.get_content()
-        self.addDetail("foo", mycontent)
-        details = self.getDetails()
-        self.assertEqual({"foo": mycontent}, details)
-
-    def test_addError(self):
-        class Case(TestCase):
-            def test(this):
-                this.addDetail("foo", self.get_content())
-                1/0
-        self.assertDetailsProvided(Case("test"), "addError",
-            ["foo", "traceback"])
-
-    def test_addFailure(self):
-        class Case(TestCase):
-            def test(this):
-                this.addDetail("foo", self.get_content())
-                self.fail('yo')
-        self.assertDetailsProvided(Case("test"), "addFailure",
-            ["foo", "traceback"])
-
-    def test_addSkip(self):
-        class Case(TestCase):
-            def test(this):
-                this.addDetail("foo", self.get_content())
-                self.skip('yo')
-        self.assertDetailsProvided(Case("test"), "addSkip",
-            ["foo", "reason"])
-
-    def test_addSucccess(self):
-        class Case(TestCase):
-            def test(this):
-                this.addDetail("foo", self.get_content())
-        self.assertDetailsProvided(Case("test"), "addSuccess",
-            ["foo"])
-
-    def test_addUnexpectedSuccess(self):
-        class Case(TestCase):
-            def test(this):
-                this.addDetail("foo", self.get_content())
-                raise testcase._UnexpectedSuccess()
-        self.assertDetailsProvided(Case("test"), "addUnexpectedSuccess",
-            ["foo"])
-
-    def test_addDetails_from_Mismatch(self):
-        content = self.get_content()
-        class Mismatch(object):
-            def describe(self):
-                return "Mismatch"
-            def get_details(self):
-                return {"foo": content}
-        class Matcher(object):
-            def match(self, thing):
-                return Mismatch()
-            def __str__(self):
-                return "a description"
-        class Case(TestCase):
-            def test(self):
-                self.assertThat("foo", Matcher())
-        self.assertDetailsProvided(Case("test"), "addFailure",
-            ["foo", "traceback"])
-
-    def test_multiple_addDetails_from_Mismatch(self):
-        content = self.get_content()
-        class Mismatch(object):
-            def describe(self):
-                return "Mismatch"
-            def get_details(self):
-                return {"foo": content, "bar": content}
-        class Matcher(object):
-            def match(self, thing):
-                return Mismatch()
-            def __str__(self):
-                return "a description"
-        class Case(TestCase):
-            def test(self):
-                self.assertThat("foo", Matcher())
-        self.assertDetailsProvided(Case("test"), "addFailure",
-            ["bar", "foo", "traceback"])
-
-    def test_addDetails_with_same_name_as_key_from_get_details(self):
-        content = self.get_content()
-        class Mismatch(object):
-            def describe(self):
-                return "Mismatch"
-            def get_details(self):
-                return {"foo": content}
-        class Matcher(object):
-            def match(self, thing):
-                return Mismatch()
-            def __str__(self):
-                return "a description"
-        class Case(TestCase):
-            def test(self):
-                self.addDetail("foo", content)
-                self.assertThat("foo", Matcher())
-        self.assertDetailsProvided(Case("test"), "addFailure",
-            ["foo", "foo-1", "traceback"])
-
-
-class TestSetupTearDown(TestCase):
-
-    def test_setUpNotCalled(self):
-        class DoesnotcallsetUp(TestCase):
-            def setUp(self):
-                pass
-            def test_method(self):
-                pass
-        result = unittest.TestResult()
-        DoesnotcallsetUp('test_method').run(result)
-        self.assertEqual(1, len(result.errors))
-
-    def test_tearDownNotCalled(self):
-        class DoesnotcalltearDown(TestCase):
-            def test_method(self):
-                pass
-            def tearDown(self):
-                pass
-        result = unittest.TestResult()
-        DoesnotcalltearDown('test_method').run(result)
-        self.assertEqual(1, len(result.errors))
-
-
-class TestSkipping(TestCase):
-    """Tests for skipping of tests functionality."""
-
-    def test_skip_causes_skipException(self):
-        self.assertThat(lambda:self.skip("Skip this test"),
-            Raises(MatchesException(self.skipException)))
-
-    def test_can_use_skipTest(self):
-        self.assertThat(lambda:self.skipTest("Skip this test"),
-            Raises(MatchesException(self.skipException)))
-
-    def test_skip_without_reason_works(self):
-        class Test(TestCase):
-            def test(self):
-                raise self.skipException()
-        case = Test("test")
-        result = ExtendedTestResult()
-        case.run(result)
-        self.assertEqual('addSkip', result._events[1][0])
-        self.assertEqual('no reason given.',
-            ''.join(result._events[1][2]['reason'].iter_text()))
-
-    def test_skipException_in_setup_calls_result_addSkip(self):
-        class TestThatRaisesInSetUp(TestCase):
-            def setUp(self):
-                TestCase.setUp(self)
-                self.skip("skipping this test")
-            def test_that_passes(self):
-                pass
-        calls = []
-        result = LoggingResult(calls)
-        test = TestThatRaisesInSetUp("test_that_passes")
-        test.run(result)
-        case = result._events[0][1]
-        self.assertEqual([('startTest', case),
-            ('addSkip', case, "skipping this test"), ('stopTest', case)],
-            calls)
-
-    def test_skipException_in_test_method_calls_result_addSkip(self):
-        class SkippingTest(TestCase):
-            def test_that_raises_skipException(self):
-                self.skip("skipping this test")
-        result = Python27TestResult()
-        test = SkippingTest("test_that_raises_skipException")
-        test.run(result)
-        case = result._events[0][1]
-        self.assertEqual([('startTest', case),
-            ('addSkip', case, "skipping this test"), ('stopTest', case)],
-            result._events)
-
-    def test_skip__in_setup_with_old_result_object_calls_addSuccess(self):
-        class SkippingTest(TestCase):
-            def setUp(self):
-                TestCase.setUp(self)
-                raise self.skipException("skipping this test")
-            def test_that_raises_skipException(self):
-                pass
-        result = Python26TestResult()
-        test = SkippingTest("test_that_raises_skipException")
-        test.run(result)
-        self.assertEqual('addSuccess', result._events[1][0])
-
-    def test_skip_with_old_result_object_calls_addError(self):
-        class SkippingTest(TestCase):
-            def test_that_raises_skipException(self):
-                raise self.skipException("skipping this test")
-        result = Python26TestResult()
-        test = SkippingTest("test_that_raises_skipException")
-        test.run(result)
-        self.assertEqual('addSuccess', result._events[1][0])
-
-    def test_skip_decorator(self):
-        class SkippingTest(TestCase):
-            @skip("skipping this test")
-            def test_that_is_decorated_with_skip(self):
-                self.fail()
-        result = Python26TestResult()
-        test = SkippingTest("test_that_is_decorated_with_skip")
-        test.run(result)
-        self.assertEqual('addSuccess', result._events[1][0])
-
-    def test_skipIf_decorator(self):
-        class SkippingTest(TestCase):
-            @skipIf(True, "skipping this test")
-            def test_that_is_decorated_with_skipIf(self):
-                self.fail()
-        result = Python26TestResult()
-        test = SkippingTest("test_that_is_decorated_with_skipIf")
-        test.run(result)
-        self.assertEqual('addSuccess', result._events[1][0])
-
-    def test_skipUnless_decorator(self):
-        class SkippingTest(TestCase):
-            @skipUnless(False, "skipping this test")
-            def test_that_is_decorated_with_skipUnless(self):
-                self.fail()
-        result = Python26TestResult()
-        test = SkippingTest("test_that_is_decorated_with_skipUnless")
-        test.run(result)
-        self.assertEqual('addSuccess', result._events[1][0])
-
-
-class TestOnException(TestCase):
-
-    def test_default_works(self):
-        events = []
-        class Case(TestCase):
-            def method(self):
-                self.onException(an_exc_info)
-                events.append(True)
-        case = Case("method")
-        case.run()
-        self.assertThat(events, Equals([True]))
-
-    def test_added_handler_works(self):
-        events = []
-        class Case(TestCase):
-            def method(self):
-                self.addOnException(events.append)
-                self.onException(an_exc_info)
-        case = Case("method")
-        case.run()
-        self.assertThat(events, Equals([an_exc_info]))
-
-    def test_handler_that_raises_is_not_caught(self):
-        events = []
-        class Case(TestCase):
-            def method(self):
-                self.addOnException(events.index)
-                self.assertThat(lambda: self.onException(an_exc_info),
-                    Raises(MatchesException(ValueError)))
-        case = Case("method")
-        case.run()
-        self.assertThat(events, Equals([]))
-
-
-class TestPatchSupport(TestCase):
-
-    class Case(TestCase):
-        def test(self):
-            pass
-
-    def test_patch(self):
-        # TestCase.patch masks obj.attribute with the new value.
-        self.foo = 'original'
-        test = self.Case('test')
-        test.patch(self, 'foo', 'patched')
-        self.assertEqual('patched', self.foo)
-
-    def test_patch_restored_after_run(self):
-        # TestCase.patch masks obj.attribute with the new value, but restores
-        # the original value after the test is finished.
-        self.foo = 'original'
-        test = self.Case('test')
-        test.patch(self, 'foo', 'patched')
-        test.run()
-        self.assertEqual('original', self.foo)
-
-    def test_successive_patches_apply(self):
-        # TestCase.patch can be called multiple times per test. Each time you
-        # call it, it overrides the original value.
-        self.foo = 'original'
-        test = self.Case('test')
-        test.patch(self, 'foo', 'patched')
-        test.patch(self, 'foo', 'second')
-        self.assertEqual('second', self.foo)
-
-    def test_successive_patches_restored_after_run(self):
-        # TestCase.patch restores the original value, no matter how many times
-        # it was called.
-        self.foo = 'original'
-        test = self.Case('test')
-        test.patch(self, 'foo', 'patched')
-        test.patch(self, 'foo', 'second')
-        test.run()
-        self.assertEqual('original', self.foo)
-
-    def test_patch_nonexistent_attribute(self):
-        # TestCase.patch can be used to patch a non-existent attribute.
-        test = self.Case('test')
-        test.patch(self, 'doesntexist', 'patched')
-        self.assertEqual('patched', self.doesntexist)
-
-    def test_restore_nonexistent_attribute(self):
-        # TestCase.patch can be used to patch a non-existent attribute, after
-        # the test run, the attribute is then removed from the object.
-        test = self.Case('test')
-        test.patch(self, 'doesntexist', 'patched')
-        test.run()
-        marker = object()
-        value = getattr(self, 'doesntexist', marker)
-        self.assertIs(marker, value)
-
-
-def test_suite():
-    from unittest import TestLoader
-    return TestLoader().loadTestsFromName(__name__)

=== removed file 'python-for-subunit2junitxml/testtools/tests/test_with_with.py'
--- python-for-subunit2junitxml/testtools/tests/test_with_with.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/tests/test_with_with.py	1970-01-01 00:00:00 +0000
@@ -1,42 +0,0 @@
-# Copyright (c) 2011 testtools developers. See LICENSE for details.
-
-from __future__ import with_statement
-
-from testtools import (
-    ExpectedException,
-    TestCase,
-    )
-
-class TestExpectedException(TestCase):
-    """Test the ExpectedException context manager."""
-
-    def test_pass_on_raise(self):
-        with ExpectedException(ValueError, 'tes.'):
-            raise ValueError('test')
-
-    def test_raise_on_text_mismatch(self):
-        try:
-            with ExpectedException(ValueError, 'tes.'):
-                raise ValueError('mismatch')
-        except AssertionError, e:
-            self.assertEqual('"mismatch" does not match "tes.".', str(e))
-        else:
-            self.fail('AssertionError not raised.')
-
-    def test_raise_on_error_mismatch(self):
-        try:
-            with ExpectedException(TypeError, 'tes.'):
-                raise ValueError('mismatch')
-        except ValueError, e:
-            self.assertEqual('mismatch', str(e))
-        else:
-            self.fail('ValueError not raised.')
-
-    def test_raise_if_no_exception(self):
-        try:
-            with ExpectedException(TypeError, 'tes.'):
-                pass
-        except AssertionError, e:
-            self.assertEqual('TypeError not raised.', str(e))
-        else:
-            self.fail('AssertionError not raised.')

=== removed file 'python-for-subunit2junitxml/testtools/testsuite.py'
--- python-for-subunit2junitxml/testtools/testsuite.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/testsuite.py	1970-01-01 00:00:00 +0000
@@ -1,87 +0,0 @@
-# Copyright (c) 2009 testtools developers. See LICENSE for details.
-
-"""Test suites and related things."""
-
-__metaclass__ = type
-__all__ = [
-  'ConcurrentTestSuite',
-  'iterate_tests',
-  ]
-
-try:
-    from Queue import Queue
-except ImportError:
-    from queue import Queue
-import threading
-import unittest
-
-import testtools
-
-
-def iterate_tests(test_suite_or_case):
-    """Iterate through all of the test cases in 'test_suite_or_case'."""
-    try:
-        suite = iter(test_suite_or_case)
-    except TypeError:
-        yield test_suite_or_case
-    else:
-        for test in suite:
-            for subtest in iterate_tests(test):
-                yield subtest
-
-
-class ConcurrentTestSuite(unittest.TestSuite):
-    """A TestSuite whose run() calls out to a concurrency strategy."""
-
-    def __init__(self, suite, make_tests):
-        """Create a ConcurrentTestSuite to execute suite.
-
-        :param suite: A suite to run concurrently.
-        :param make_tests: A helper function to split the tests in the
-            ConcurrentTestSuite into some number of concurrently executing
-            sub-suites. make_tests must take a suite, and return an iterable
-            of TestCase-like object, each of which must have a run(result)
-            method.
-        """
-        super(ConcurrentTestSuite, self).__init__([suite])
-        self.make_tests = make_tests
-
-    def run(self, result):
-        """Run the tests concurrently.
-
-        This calls out to the provided make_tests helper, and then serialises
-        the results so that result only sees activity from one TestCase at
-        a time.
-
-        ConcurrentTestSuite provides no special mechanism to stop the tests
-        returned by make_tests, it is up to the make_tests to honour the
-        shouldStop attribute on the result object they are run with, which will
-        be set if an exception is raised in the thread which
-        ConcurrentTestSuite.run is called in.
-        """
-        tests = self.make_tests(self)
-        try:
-            threads = {}
-            queue = Queue()
-            result_semaphore = threading.Semaphore(1)
-            for test in tests:
-                process_result = testtools.ThreadsafeForwardingResult(result,
-                    result_semaphore)
-                reader_thread = threading.Thread(
-                    target=self._run_test, args=(test, process_result, queue))
-                threads[test] = reader_thread, process_result
-                reader_thread.start()
-            while threads:
-                finished_test = queue.get()
-                threads[finished_test][0].join()
-                del threads[finished_test]
-        except:
-            for thread, process_result in threads.values():
-                process_result.stop()
-            raise
-
-    def _run_test(self, test, process_result, queue):
-        try:
-            test.run(process_result)
-        finally:
-            queue.put(test)

=== removed file 'python-for-subunit2junitxml/testtools/utils.py'
--- python-for-subunit2junitxml/testtools/utils.py	2013-05-27 12:16:36 +0000
+++ python-for-subunit2junitxml/testtools/utils.py	1970-01-01 00:00:00 +0000
@@ -1,13 +0,0 @@
-# Copyright (c) 2008-2010 testtools developers. See LICENSE for details.
-
-"""Utilities for dealing with stuff in unittest.
-
-Legacy - deprecated - use testtools.testsuite.iterate_tests
-"""
-
-import warnings
-warnings.warn("Please import iterate_tests from testtools.testsuite - "
-    "testtools.utils is deprecated.", DeprecationWarning, stacklevel=2)
-
-from testtools.testsuite import iterate_tests
-

