kafka-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jun...@apache.org
Subject [1/3] kafka git commit: kafka-1809; Refactor brokers to allow listening on multiple ports and IPs; patched by Gwen Shapira; reviewed by Joel Koshy and Jun Rao
Date Mon, 06 Apr 2015 00:21:48 GMT
Repository: kafka
Updated Branches:
  refs/heads/trunk 07598ad8a -> 53f31432a


http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala b/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala
index 150c311..62d1832 100644
--- a/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala
+++ b/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala
@@ -18,9 +18,10 @@ package unit.kafka.server
 
 import java.util.Properties
 
+import kafka.api.ApiVersion
 import kafka.message._
 import kafka.server.{Defaults, KafkaConfig}
-import org.apache.kafka.common.config.ConfigException
+import org.apache.kafka.common.protocol.SecurityProtocol
 import org.junit.{Assert, Test}
 import org.scalatest.junit.JUnit3Suite
 
@@ -172,8 +173,10 @@ class KafkaConfigConfigDefTest extends JUnit3Suite {
 
         case KafkaConfig.PortProp => expected.setProperty(name, "1234")
         case KafkaConfig.HostNameProp => expected.setProperty(name, nextString(10))
+        case KafkaConfig.ListenersProp => expected.setProperty(name, "PLAINTEXT://:9092")
         case KafkaConfig.AdvertisedHostNameProp => expected.setProperty(name, nextString(10))
         case KafkaConfig.AdvertisedPortProp => expected.setProperty(name, "4321")
+        case KafkaConfig.AdvertisedListenersProp => expected.setProperty(name, "PLAINTEXT://:2909")
         case KafkaConfig.SocketRequestMaxBytesProp => expected.setProperty(name, atLeastOneIntProp)
         case KafkaConfig.MaxConnectionsPerIpProp => expected.setProperty(name, atLeastOneIntProp)
         case KafkaConfig.MaxConnectionsPerIpOverridesProp => expected.setProperty(name, "127.0.0.1:2, 127.0.0.2:3")
@@ -204,6 +207,9 @@ class KafkaConfigConfigDefTest extends JUnit3Suite {
         case KafkaConfig.MinInSyncReplicasProp => expected.setProperty(name, atLeastOneIntProp)
         case KafkaConfig.AutoLeaderRebalanceEnableProp => expected.setProperty(name, randFrom("true", "false"))
         case KafkaConfig.UncleanLeaderElectionEnableProp => expected.setProperty(name, randFrom("true", "false"))
+        case KafkaConfig.InterBrokerSecurityProtocolProp => expected.setProperty(name, SecurityProtocol.PLAINTEXT.toString)
+        case KafkaConfig.InterBrokerProtocolVersionProp => expected.setProperty(name, ApiVersion.latestVersion.toString)
+
         case KafkaConfig.ControlledShutdownEnableProp => expected.setProperty(name, randFrom("true", "false"))
         case KafkaConfig.OffsetsLoadBufferSizeProp => expected.setProperty(name, atLeastOneIntProp)
         case KafkaConfig.OffsetsTopicPartitionsProp => expected.setProperty(name, atLeastOneIntProp)

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala
index 852fa3b..ca46ba9 100644
--- a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala
+++ b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala
@@ -17,13 +17,15 @@
 
 package kafka.server
 
+import java.util.Properties
+
+import junit.framework.Assert._
+import kafka.api.{ApiVersion, KAFKA_082}
+import kafka.utils.{TestUtils, Utils}
 import org.apache.kafka.common.config.ConfigException
+import org.apache.kafka.common.protocol.SecurityProtocol
 import org.junit.Test
-import junit.framework.Assert._
 import org.scalatest.junit.JUnit3Suite
-import kafka.utils.TestUtils
-import kafka.message.GZIPCompressionCodec
-import kafka.message.NoCompressionCodec
 
 class KafkaConfigTest extends JUnit3Suite {
 
@@ -34,7 +36,6 @@ class KafkaConfigTest extends JUnit3Suite {
 
     val cfg = KafkaConfig.fromProps(props)
     assertEquals(60L * 60L * 1000L, cfg.logRetentionTimeMillis)
-
   }
   
   @Test
@@ -44,7 +45,6 @@ class KafkaConfigTest extends JUnit3Suite {
 
     val cfg = KafkaConfig.fromProps(props)
     assertEquals(30 * 60L * 1000L, cfg.logRetentionTimeMillis)
-
   }
   
   @Test
@@ -54,7 +54,6 @@ class KafkaConfigTest extends JUnit3Suite {
 
     val cfg = KafkaConfig.fromProps(props)
     assertEquals(30 * 60L * 1000L, cfg.logRetentionTimeMillis)
-
   }
   
   @Test
@@ -63,7 +62,6 @@ class KafkaConfigTest extends JUnit3Suite {
 
     val cfg = KafkaConfig.fromProps(props)
     assertEquals(24 * 7 * 60L * 60L * 1000L, cfg.logRetentionTimeMillis)
-
   }
   
   @Test
@@ -74,7 +72,6 @@ class KafkaConfigTest extends JUnit3Suite {
 
     val cfg = KafkaConfig.fromProps(props)
     assertEquals( 30 * 60L * 1000L, cfg.logRetentionTimeMillis)
-
   }
   
   @Test
@@ -85,37 +82,129 @@ class KafkaConfigTest extends JUnit3Suite {
 
     val cfg = KafkaConfig.fromProps(props)
     assertEquals( 30 * 60L * 1000L, cfg.logRetentionTimeMillis)
-
   }
 
   @Test
   def testAdvertiseDefaults() {
-    val port = 9999
+    val port = "9999"
     val hostName = "fake-host"
-    
-    val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = port)
+
+    val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect)
+    props.remove("listeners")
     props.put("host.name", hostName)
-    
+    props.put("port", port)
     val serverConfig = KafkaConfig.fromProps(props)
-    
-    assertEquals(serverConfig.advertisedHostName, hostName)
-    assertEquals(serverConfig.advertisedPort, port)
+    val endpoints = serverConfig.advertisedListeners
+    val endpoint = endpoints.get(SecurityProtocol.PLAINTEXT).get
+    assertEquals(endpoint.host, hostName)
+    assertEquals(endpoint.port, port.toInt)
   }
 
   @Test
   def testAdvertiseConfigured() {
-    val port = 9999
+    val port = "9999"
     val advertisedHostName = "routable-host"
-    val advertisedPort = 1234
-    
-    val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = port)
+    val advertisedPort = "1234"
+
+    val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect)
     props.put("advertised.host.name", advertisedHostName)
     props.put("advertised.port", advertisedPort.toString)
-    
+
     val serverConfig = KafkaConfig.fromProps(props)
+    val endpoints = serverConfig.advertisedListeners
+    val endpoint = endpoints.get(SecurityProtocol.PLAINTEXT).get
     
-    assertEquals(serverConfig.advertisedHostName, advertisedHostName)
-    assertEquals(serverConfig.advertisedPort, advertisedPort)
+    assertEquals(endpoint.host, advertisedHostName)
+    assertEquals(endpoint.port, advertisedPort.toInt)
+  }
+
+
+  @Test
+  def testDuplicateListeners() {
+    val props = new Properties()
+    props.put("broker.id", "1")
+    props.put("zookeeper.connect", "localhost:2181")
+
+    // listeners with duplicate port
+    props.put("listeners", "PLAINTEXT://localhost:9091,TRACE://localhost:9091")
+    assert(!isValidKafkaConfig(props))
+
+    // listeners with duplicate protocol
+    props.put("listeners", "PLAINTEXT://localhost:9091,PLAINTEXT://localhost:9092")
+    assert(!isValidKafkaConfig(props))
+
+    // advertised listeners with duplicate port
+    props.put("advertised,listeners", "PLAINTEXT://localhost:9091,TRACE://localhost:9091")
+    assert(!isValidKafkaConfig(props))
+  }
+
+  @Test
+  def testBadListenerProtocol() {
+    val props = new Properties()
+    props.put("broker.id", "1")
+    props.put("zookeeper.connect", "localhost:2181")
+    props.put("listeners", "BAD://localhost:9091")
+
+    assert(!isValidKafkaConfig(props))
+  }
+
+  @Test
+  def testListenerDefaults() {
+    val props = new Properties()
+    props.put("broker.id", "1")
+    props.put("zookeeper.connect", "localhost:2181")
+
+    // configuration with host and port, but no listeners
+    props.put("host.name", "myhost")
+    props.put("port", "1111")
+
+    val conf = KafkaConfig.fromProps(props)
+    assertEquals(Utils.listenerListToEndPoints("PLAINTEXT://myhost:1111"), conf.listeners)
+
+    // configuration with null host
+    props.remove("host.name")
+
+    val conf2 = KafkaConfig.fromProps(props)
+    assertEquals(Utils.listenerListToEndPoints("PLAINTEXT://:1111"), conf2.listeners)
+    assertEquals(Utils.listenerListToEndPoints("PLAINTEXT://:1111"), conf2.advertisedListeners)
+    assertEquals(null, conf2.listeners(SecurityProtocol.PLAINTEXT).host)
+
+    // configuration with advertised host and port, and no advertised listeners
+    props.put("advertised.host.name", "otherhost")
+    props.put("advertised.port", "2222")
+
+    val conf3 = KafkaConfig.fromProps(props)
+    assertEquals(conf3.advertisedListeners, Utils.listenerListToEndPoints("PLAINTEXT://otherhost:2222"))
+  }
+
+  @Test
+  def testVersionConfiguration() {
+    val props = new Properties()
+    props.put("broker.id", "1")
+    props.put("zookeeper.connect", "localhost:2181")
+    val conf = KafkaConfig.fromProps(props)
+    assertEquals(ApiVersion.latestVersion, conf.interBrokerProtocolVersion)
+
+    props.put("inter.broker.protocol.version","0.8.2.0")
+    val conf2 = KafkaConfig.fromProps(props)
+    assertEquals(KAFKA_082, conf2.interBrokerProtocolVersion)
+
+    // check that 0.8.2.0 is the same as 0.8.2.1
+    props.put("inter.broker.protocol.version","0.8.2.1")
+    val conf3 = KafkaConfig.fromProps(props)
+    assertEquals(KAFKA_082, conf3.interBrokerProtocolVersion)
+
+    //check that latest is newer than 0.8.2
+    assert(ApiVersion.latestVersion.onOrAfter(conf3.interBrokerProtocolVersion))
+  }
+
+  private def isValidKafkaConfig(props: Properties): Boolean = {
+    try {
+      KafkaConfig.fromProps(props)
+      true
+    } catch {
+      case e: IllegalArgumentException => false
+    }
   }
 
   @Test
@@ -161,7 +250,6 @@ class KafkaConfigTest extends JUnit3Suite {
 
     val cfg = KafkaConfig.fromProps(props)
     assertEquals(30 * 60L * 1000L, cfg.logRollTimeMillis)
-
   }
   
   @Test
@@ -172,7 +260,6 @@ class KafkaConfigTest extends JUnit3Suite {
 
     val cfg = KafkaConfig.fromProps(props)
     assertEquals( 30 * 60L * 1000L, cfg.logRollTimeMillis)
-
   }
     
   @Test
@@ -181,7 +268,6 @@ class KafkaConfigTest extends JUnit3Suite {
 
     val cfg = KafkaConfig.fromProps(props)
     assertEquals(24 * 7 * 60L * 60L * 1000L, cfg.logRollTimeMillis																									)
-
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala b/core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala
index 3d4258f..d9bdcef 100644
--- a/core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala
+++ b/core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala
@@ -17,15 +17,16 @@
 
 package kafka.server
 
-import org.scalatest.junit.JUnit3Suite
-import kafka.zk.ZooKeeperTestHarness
-import kafka.utils.TestUtils._
 import junit.framework.Assert._
-import kafka.utils.{ZkUtils, Utils, TestUtils}
-import kafka.controller.{ControllerContext, LeaderIsrAndControllerEpoch, ControllerChannelManager}
+import kafka.api._
 import kafka.cluster.Broker
 import kafka.common.ErrorMapping
-import kafka.api._
+import kafka.controller.{ControllerChannelManager, ControllerContext, LeaderIsrAndControllerEpoch}
+import kafka.utils.TestUtils._
+import kafka.utils.{TestUtils, Utils, ZkUtils}
+import kafka.zk.ZooKeeperTestHarness
+import org.apache.kafka.common.protocol.SecurityProtocol
+import org.scalatest.junit.JUnit3Suite
 
 class LeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness {
   val brokerId1 = 0
@@ -116,8 +117,11 @@ class LeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness {
 
     // start another controller
     val controllerId = 2
+
     val controllerConfig = KafkaConfig.fromProps(TestUtils.createBrokerConfig(controllerId, zkConnect))
     val brokers = servers.map(s => new Broker(s.config.brokerId, "localhost", s.boundPort()))
+    val brokerEndPoints = brokers.map(b => b.getBrokerEndPoint(SecurityProtocol.PLAINTEXT))
+
     val controllerContext = new ControllerContext(zkClient, 6000)
     controllerContext.liveBrokers = brokers.toSet
     val controllerChannelManager = new ControllerChannelManager(controllerContext, controllerConfig)
@@ -127,7 +131,7 @@ class LeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness {
     leaderAndIsr.put((topic, partitionId),
       new LeaderIsrAndControllerEpoch(new LeaderAndIsr(brokerId2, List(brokerId1, brokerId2)), 2))
     val partitionStateInfo = leaderAndIsr.mapValues(l => new PartitionStateInfo(l, Set(0,1))).toMap
-    val leaderAndIsrRequest = new LeaderAndIsrRequest(partitionStateInfo, brokers.toSet, controllerId,
+    val leaderAndIsrRequest = new LeaderAndIsrRequest(partitionStateInfo, brokerEndPoints.toSet, controllerId,
                                                       staleControllerEpoch, 0, "")
 
     controllerChannelManager.sendRequest(brokerId2, leaderAndIsrRequest, staleControllerEpochCallback)

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/core/src/test/scala/unit/kafka/utils/TestUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/unit/kafka/utils/TestUtils.scala b/core/src/test/scala/unit/kafka/utils/TestUtils.scala
index f451825..a8ed142 100644
--- a/core/src/test/scala/unit/kafka/utils/TestUtils.scala
+++ b/core/src/test/scala/unit/kafka/utils/TestUtils.scala
@@ -18,12 +18,12 @@
 package kafka.utils
 
 import java.io._
-import java.net._
 import java.nio._
 import java.nio.channels._
 import java.util.Random
 import java.util.Properties
 
+import org.apache.kafka.common.protocol.SecurityProtocol
 import org.apache.kafka.common.utils.Utils._
 
 import collection.mutable.ListBuffer
@@ -45,7 +45,6 @@ import kafka.log._
 import junit.framework.AssertionFailedError
 import junit.framework.Assert._
 import org.apache.kafka.clients.producer.KafkaProducer
-import collection.Iterable
 
 import scala.collection.Map
 import org.apache.kafka.clients.consumer.KafkaConsumer
@@ -150,8 +149,7 @@ object TestUtils extends Logging {
     port: Int = RandomPort): Properties = {
     val props = new Properties
     if (nodeId >= 0) props.put("broker.id", nodeId.toString)
-    props.put("host.name", "localhost")
-    props.put("port", port.toString)
+    props.put("listeners", "PLAINTEXT://localhost:"+port.toString)
     props.put("log.dir", TestUtils.tempDir().getAbsolutePath)
     props.put("zookeeper.connect", zkConnect)
     props.put("replica.socket.timeout.ms", "1500")
@@ -466,13 +464,13 @@ object TestUtils extends Logging {
   }
 
   def createBrokersInZk(zkClient: ZkClient, ids: Seq[Int]): Seq[Broker] = {
-    val brokers = ids.map(id => new Broker(id, "localhost", 6667))
-    brokers.foreach(b => ZkUtils.registerBrokerInZk(zkClient, b.id, b.host, b.port, 6000, jmxPort = -1))
+    val brokers = ids.map(id => new Broker(id, "localhost", 6667, SecurityProtocol.PLAINTEXT))
+    brokers.foreach(b => ZkUtils.registerBrokerInZk(zkClient, b.id, "localhost", 6667, b.endPoints, 6000, jmxPort = -1))
     brokers
   }
 
   def deleteBrokersInZk(zkClient: ZkClient, ids: Seq[Int]): Seq[Broker] = {
-    val brokers = ids.map(id => new Broker(id, "localhost", 6667))
+    val brokers = ids.map(id => new Broker(id, "localhost", 6667, SecurityProtocol.PLAINTEXT))
     brokers.foreach(b => ZkUtils.deletePath(zkClient, ZkUtils.BrokerIdsPath + "/" + b))
     brokers
   }

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/README.txt
----------------------------------------------------------------------
diff --git a/system_test/README.txt b/system_test/README.txt
index 0e469e3..e96d15d 100644
--- a/system_test/README.txt
+++ b/system_test/README.txt
@@ -1,7 +1,23 @@
 # ==========================
-# Known Issues:
+# Quick Start
 # ==========================
-1. This test framework currently doesn't support MacOS due to different "ps" argument options from Linux. The correct ps execution is required to terminate the background running processes properly.
+
+* Please note that the following commands should be executed after downloading the kafka source code to build all the required binaries:
+  1. <kafka install dir>/ $ ./gradlew jar
+
+  Now you are ready to follow the steps below.
+  1. Update system_test/cluster_config.json for "kafka_home" & "java_home" specific to your environment
+  2. Edit system_test/replication_testsuite/testcase_1/testcase_1_properties.json and update "broker-list" to the proper settings of your environment. (If this test is to be run in a single localhost, no change is required for this.)
+  3. Create testcase_to_run.json file with the tests you wish to run. You can start by just copying one of our preset test suites. For example:
+    cp testcase_to_run_sanity.json testcase_to_run.json
+  4. To run the test, go to <kafka_home>/system_test and run the following command:
+     $ python -u -B system_test_runner.py 2>&1 | tee system_test_output.log
+  5. To turn on debugging, update system_test/logging.conf by changing the level in handlers session from INFO to DEBUG.
+
+  We also have three built-in test suites you can use after you set your environment (steps 1 and 2 above):
+  * run_sanity.sh - will run a single basic replication test
+  * run_all_replica.sh - will run all replication tests
+  * run_all.sh - will run all replication and mirror_maker tests
 
 # ==========================
 # Overview
@@ -44,20 +60,6 @@ The framework has the following levels:
    ** Please note the test framework will look for a specific prefix of the directories under system_test/<test suite dir>/ to determine what test cases are available. The prefix of <testcase directory name> can be defined in SystemTestEnv class (system_test_env.py)
 
 # ==========================
-# Quick Start
-# ==========================
-
-* Please note that the following commands should be executed after downloading the kafka source code to build all the required binaries:
-  1. <kafka install dir>/ $ ./gradlew jar
-
-  Now you are ready to follow the steps below.
-  1. Update system_test/cluster_config.json for "kafka_home" & "java_home" specific to your environment
-  2. Edit system_test/replication_testsuite/testcase_1/testcase_1_properties.json and update "broker-list" to the proper settings of your environment. (If this test is to be run in a single localhost, no change is required for this.)
-  3. To run the test, go to <kafka_home>/system_test and run the following command:
-     $ python -u -B system_test_runner.py 2>&1 | tee system_test_output.log
-  4. To turn on debugging, update system_test/logging.conf by changing the level in handlers session from INFO to DEBUG.
-
-# ==========================
 # Adding Test Case
 # ==========================
 

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0001/testcase_0001_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0001/testcase_0001_properties.json b/system_test/replication_testsuite/testcase_0001/testcase_0001_properties.json
index 250ffe0..7a32e8d 100644
--- a/system_test/replication_testsuite/testcase_0001/testcase_0001_properties.json
+++ b/system_test/replication_testsuite/testcase_0001/testcase_0001_properties.json
@@ -5,7 +5,7 @@
                   "04":"At the end it verifies the log size and contents",
                   "05":"Use a consumer to verify no message loss.",
                   "06":"Producer dimensions : mode:sync, acks:-1, comp:0",
-                  "07":"Log segment size    : 20480"
+                  "07":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -29,7 +29,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -40,7 +40,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -51,7 +51,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0002/testcase_0002_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0002/testcase_0002_properties.json b/system_test/replication_testsuite/testcase_0002/testcase_0002_properties.json
index 3f9e7d0..2929414 100644
--- a/system_test/replication_testsuite/testcase_0002/testcase_0002_properties.json
+++ b/system_test/replication_testsuite/testcase_0002/testcase_0002_properties.json
@@ -5,7 +5,7 @@
                   "04":"At the end it verifies the log size and contents",
                   "05":"Use a consumer to verify no message loss.",
                   "06":"Producer dimensions : mode:sync, acks:-1, comp:1",
-                  "07":"Log segment size    : 20480"
+                  "07":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -29,7 +29,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -40,7 +40,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -51,7 +51,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0003/testcase_0003_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0003/testcase_0003_properties.json b/system_test/replication_testsuite/testcase_0003/testcase_0003_properties.json
index b10c626..d9818e1 100644
--- a/system_test/replication_testsuite/testcase_0003/testcase_0003_properties.json
+++ b/system_test/replication_testsuite/testcase_0003/testcase_0003_properties.json
@@ -5,7 +5,7 @@
                   "04":"At the end it verifies the log size and contents",
                   "05":"Use a consumer to verify no message loss.",
                   "06":"Producer dimensions : mode:sync, acks:1, comp:1",
-                  "07":"Log segment size    : 20480"
+                  "07":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -29,7 +29,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -40,7 +40,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -51,7 +51,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0004/testcase_0004_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0004/testcase_0004_properties.json b/system_test/replication_testsuite/testcase_0004/testcase_0004_properties.json
index 22c0802..fe42626 100644
--- a/system_test/replication_testsuite/testcase_0004/testcase_0004_properties.json
+++ b/system_test/replication_testsuite/testcase_0004/testcase_0004_properties.json
@@ -5,7 +5,7 @@
                   "04":"At the end it verifies the log size and contents",
                   "05":"Use a consumer to verify no message loss.",
                   "06":"Producer dimensions : mode:async, acks:-1, comp:1",
-                  "07":"Log segment size    : 20480"
+                  "07":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -29,7 +29,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -40,7 +40,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -51,7 +51,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0005/testcase_0005_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0005/testcase_0005_properties.json b/system_test/replication_testsuite/testcase_0005/testcase_0005_properties.json
index 1317d84..37d180a 100644
--- a/system_test/replication_testsuite/testcase_0005/testcase_0005_properties.json
+++ b/system_test/replication_testsuite/testcase_0005/testcase_0005_properties.json
@@ -5,7 +5,7 @@
                   "04":"At the end it verifies the log size and contents",
                   "05":"Use a consumer to verify no message loss.",
                   "06":"Producer dimensions : mode:async, acks:1, comp:1",
-                  "07":"Log segment size    : 20480"
+                  "07":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -29,7 +29,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -40,7 +40,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -51,7 +51,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0006/testcase_0006_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0006/testcase_0006_properties.json b/system_test/replication_testsuite/testcase_0006/testcase_0006_properties.json
index d313465..dae8f76 100644
--- a/system_test/replication_testsuite/testcase_0006/testcase_0006_properties.json
+++ b/system_test/replication_testsuite/testcase_0006/testcase_0006_properties.json
@@ -5,7 +5,7 @@
                   "04":"At the end it verifies the log size and contents",
                   "05":"Use a consumer to verify no message loss.",
                   "06":"Producer dimensions : mode:sync, acks:-1, comp:1",
-                  "07":"Log segment size    : 20480"
+                  "07":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -29,7 +29,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -40,7 +40,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -51,7 +51,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0007/testcase_0007_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0007/testcase_0007_properties.json b/system_test/replication_testsuite/testcase_0007/testcase_0007_properties.json
index bd00f13..b6f513f 100644
--- a/system_test/replication_testsuite/testcase_0007/testcase_0007_properties.json
+++ b/system_test/replication_testsuite/testcase_0007/testcase_0007_properties.json
@@ -5,7 +5,7 @@
                   "04":"At the end it verifies the log size and contents",
                   "05":"Use a consumer to verify no message loss.",
                   "06":"Producer dimensions : mode:async, acks:-1, comp:1",
-                  "07":"Log segment size    : 20480"
+                  "07":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -29,7 +29,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -40,7 +40,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -51,7 +51,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0008/testcase_0008_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0008/testcase_0008_properties.json b/system_test/replication_testsuite/testcase_0008/testcase_0008_properties.json
index ac5c13b..4954752 100644
--- a/system_test/replication_testsuite/testcase_0008/testcase_0008_properties.json
+++ b/system_test/replication_testsuite/testcase_0008/testcase_0008_properties.json
@@ -5,7 +5,7 @@
                   "04":"At the end it verifies the log size and contents",
                   "05":"Use a consumer to verify no message loss.",
                   "06":"Producer dimensions : mode:sync, acks:1, comp:1",
-                  "07":"Log segment size    : 20480"
+                  "07":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -29,7 +29,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -40,7 +40,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -51,7 +51,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0009/testcase_0009_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0009/testcase_0009_properties.json b/system_test/replication_testsuite/testcase_0009/testcase_0009_properties.json
index 6dadd7a..0476b12 100644
--- a/system_test/replication_testsuite/testcase_0009/testcase_0009_properties.json
+++ b/system_test/replication_testsuite/testcase_0009/testcase_0009_properties.json
@@ -5,7 +5,7 @@
                   "04":"At the end it verifies the log size and contents",
                   "05":"Use a consumer to verify no message loss.",
                   "06":"Producer dimensions : mode:async, acks:1, comp:1",
-                  "07":"Log segment size    : 20480"
+                  "07":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -29,7 +29,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -40,7 +40,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -51,7 +51,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0101/testcase_0101_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0101/testcase_0101_properties.json b/system_test/replication_testsuite/testcase_0101/testcase_0101_properties.json
index 614cb1c..b9517b4 100644
--- a/system_test/replication_testsuite/testcase_0101/testcase_0101_properties.json
+++ b/system_test/replication_testsuite/testcase_0101/testcase_0101_properties.json
@@ -8,7 +8,7 @@
                   "07":"At the end it verifies the log size and contents",
                   "08":"Use a consumer to verify no message loss.",
                   "09":"Producer dimensions : mode:sync, acks:-1, comp:0",
-                  "10":"Log segment size    : 20480"
+                  "10":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -32,7 +32,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -43,7 +43,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -54,7 +54,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0102/testcase_0102_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0102/testcase_0102_properties.json b/system_test/replication_testsuite/testcase_0102/testcase_0102_properties.json
index fbf0177..3eb39a2 100644
--- a/system_test/replication_testsuite/testcase_0102/testcase_0102_properties.json
+++ b/system_test/replication_testsuite/testcase_0102/testcase_0102_properties.json
@@ -8,7 +8,7 @@
                   "07":"At the end it verifies the log size and contents",
                   "08":"Use a consumer to verify no message loss.",
                   "09":"Producer dimensions : mode:sync, acks:-1, comp:1",
-                  "10":"Log segment size    : 20480"
+                  "10":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -32,7 +32,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -43,7 +43,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -54,7 +54,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0103/testcase_0103_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0103/testcase_0103_properties.json b/system_test/replication_testsuite/testcase_0103/testcase_0103_properties.json
index 317a6e3..6bfc757 100644
--- a/system_test/replication_testsuite/testcase_0103/testcase_0103_properties.json
+++ b/system_test/replication_testsuite/testcase_0103/testcase_0103_properties.json
@@ -8,7 +8,7 @@
                   "07":"At the end it verifies the log size and contents",
                   "08":"Use a consumer to verify no message loss.",
                   "09":"Producer dimensions : mode:sync, acks:1, comp:1",
-                  "10":"Log segment size    : 20480"
+                  "10":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -32,7 +32,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -43,7 +43,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -54,7 +54,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0104/testcase_0104_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0104/testcase_0104_properties.json b/system_test/replication_testsuite/testcase_0104/testcase_0104_properties.json
index d1a790b..1cfe71c 100644
--- a/system_test/replication_testsuite/testcase_0104/testcase_0104_properties.json
+++ b/system_test/replication_testsuite/testcase_0104/testcase_0104_properties.json
@@ -8,7 +8,7 @@
                   "07":"At the end it verifies the log size and contents",
                   "08":"Use a consumer to verify no message loss.",
                   "09":"Producer dimensions : mode:async, acks:-1, comp:1",
-                  "10":"Log segment size    : 20480"
+                  "10":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -32,7 +32,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -43,7 +43,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -54,7 +54,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0105/testcase_0105_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0105/testcase_0105_properties.json b/system_test/replication_testsuite/testcase_0105/testcase_0105_properties.json
index 8f4f8bf..13f3ac0 100644
--- a/system_test/replication_testsuite/testcase_0105/testcase_0105_properties.json
+++ b/system_test/replication_testsuite/testcase_0105/testcase_0105_properties.json
@@ -8,7 +8,7 @@
                   "07":"At the end it verifies the log size and contents",
                   "08":"Use a consumer to verify no message loss.",
                   "09":"Producer dimensions : mode:async, acks:1, comp:1",
-                  "10":"Log segment size    : 20480"
+                  "10":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -32,7 +32,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -43,7 +43,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",
@@ -54,7 +54,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "1",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0106/testcase_0106_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0106/testcase_0106_properties.json b/system_test/replication_testsuite/testcase_0106/testcase_0106_properties.json
index a307f85..ccd4774 100644
--- a/system_test/replication_testsuite/testcase_0106/testcase_0106_properties.json
+++ b/system_test/replication_testsuite/testcase_0106/testcase_0106_properties.json
@@ -8,7 +8,7 @@
                   "07":"At the end it verifies the log size and contents",
                   "08":"Use a consumer to verify no message loss.",
                   "09":"Producer dimensions : mode:sync, acks:-1, comp:1",
-                  "10":"Log segment size    : 20480"
+                  "10":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -32,7 +32,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -43,7 +43,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -54,7 +54,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0107/testcase_0107_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0107/testcase_0107_properties.json b/system_test/replication_testsuite/testcase_0107/testcase_0107_properties.json
index 827319e..b1da75a 100644
--- a/system_test/replication_testsuite/testcase_0107/testcase_0107_properties.json
+++ b/system_test/replication_testsuite/testcase_0107/testcase_0107_properties.json
@@ -8,7 +8,7 @@
                   "07":"At the end it verifies the log size and contents",
                   "08":"Use a consumer to verify no message loss.",
                   "09":"Producer dimensions : mode:async, acks:-1, comp:1",
-                  "10":"Log segment size    : 20480"
+                  "10":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -32,7 +32,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -43,7 +43,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -54,7 +54,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0108/testcase_0108_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0108/testcase_0108_properties.json b/system_test/replication_testsuite/testcase_0108/testcase_0108_properties.json
index ae015cd..359abe7 100644
--- a/system_test/replication_testsuite/testcase_0108/testcase_0108_properties.json
+++ b/system_test/replication_testsuite/testcase_0108/testcase_0108_properties.json
@@ -8,7 +8,7 @@
                   "07":"At the end it verifies the log size and contents",
                   "08":"Use a consumer to verify no message loss.",
                   "09":"Producer dimensions : mode:sync, acks:1, comp:1",
-                  "10":"Log segment size    : 20480"
+                  "10":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -32,7 +32,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -43,7 +43,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -54,7 +54,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_0109/testcase_0109_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_0109/testcase_0109_properties.json b/system_test/replication_testsuite/testcase_0109/testcase_0109_properties.json
index 1951a12..90ea441 100644
--- a/system_test/replication_testsuite/testcase_0109/testcase_0109_properties.json
+++ b/system_test/replication_testsuite/testcase_0109/testcase_0109_properties.json
@@ -8,7 +8,7 @@
                   "07":"At the end it verifies the log size and contents",
                   "08":"Use a consumer to verify no message loss.",
                   "09":"Producer dimensions : mode:async, acks:1, comp:1",
-                  "10":"Log segment size    : 20480"
+                  "10":"Log segment size    : 10000000"
   },
   "testcase_args": {
     "broker_type": "leader",
@@ -32,7 +32,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -43,7 +43,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",
@@ -54,7 +54,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "default.replication.factor": "3",
       "num.partitions": "3",

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/replication_testsuite/testcase_1/testcase_1_properties.json
----------------------------------------------------------------------
diff --git a/system_test/replication_testsuite/testcase_1/testcase_1_properties.json b/system_test/replication_testsuite/testcase_1/testcase_1_properties.json
index 0c6d7a3..680213f 100644
--- a/system_test/replication_testsuite/testcase_1/testcase_1_properties.json
+++ b/system_test/replication_testsuite/testcase_1/testcase_1_properties.json
@@ -32,7 +32,7 @@
       "entity_id": "1",
       "port": "9091",
       "broker.id": "1",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_1_logs",
       "log_filename": "kafka_server_9091.log",
       "config_filename": "kafka_server_9091.properties"
@@ -41,7 +41,7 @@
       "entity_id": "2",
       "port": "9092",
       "broker.id": "2",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_2_logs",
       "log_filename": "kafka_server_9092.log",
       "config_filename": "kafka_server_9092.properties"
@@ -50,7 +50,7 @@
       "entity_id": "3",
       "port": "9093",
       "broker.id": "3",
-      "log.segment.bytes": "20480",
+      "log.segment.bytes": "10000000",
       "log.dir": "/tmp/kafka_server_3_logs",
       "log_filename": "kafka_server_9093.log",
       "config_filename": "kafka_server_9093.properties"

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/run_all.sh
----------------------------------------------------------------------
diff --git a/system_test/run_all.sh b/system_test/run_all.sh
new file mode 100755
index 0000000..0c5c02d
--- /dev/null
+++ b/system_test/run_all.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+cp testcase_to_run_all.json testcase_to_run.json
+
+python -B system_test_runner.py
+
+

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/run_all_replica.sh
----------------------------------------------------------------------
diff --git a/system_test/run_all_replica.sh b/system_test/run_all_replica.sh
new file mode 100755
index 0000000..b3bce84
--- /dev/null
+++ b/system_test/run_all_replica.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+cp testcase_to_run_all_replica.json testcase_to_run.json
+
+python -B system_test_runner.py
+
+

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/run_sanity.sh
----------------------------------------------------------------------
diff --git a/system_test/run_sanity.sh b/system_test/run_sanity.sh
index 9e8042a..a301b96 100755
--- a/system_test/run_sanity.sh
+++ b/system_test/run_sanity.sh
@@ -1,8 +1,5 @@
 #!/bin/bash
 
-my_ts=`date +"%s"`
-
-cp testcase_to_run.json testcase_to_run.json_${my_ts}
 cp testcase_to_run_sanity.json testcase_to_run.json
 
 python -B system_test_runner.py

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/testcase_to_run.json
----------------------------------------------------------------------
diff --git a/system_test/testcase_to_run.json b/system_test/testcase_to_run.json
deleted file mode 100644
index c6cf17e..0000000
--- a/system_test/testcase_to_run.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    "ReplicaBasicTest"   : [
-        "testcase_1"
-    ]
-}

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/testcase_to_run_all_replica.json
----------------------------------------------------------------------
diff --git a/system_test/testcase_to_run_all_replica.json b/system_test/testcase_to_run_all_replica.json
new file mode 100644
index 0000000..34841f5
--- /dev/null
+++ b/system_test/testcase_to_run_all_replica.json
@@ -0,0 +1,123 @@
+{
+    "ReplicaBasicTest"  : [
+        "testcase_0001",
+        "testcase_0002",
+        "testcase_0003",
+        "testcase_0004",
+        "testcase_0005",
+        "testcase_0006",
+        "testcase_0007",
+        "testcase_0008",
+        "testcase_0009",
+        "testcase_0010",
+
+        "testcase_0021",
+        "testcase_0022",
+        "testcase_0023",
+
+        "testcase_0101",
+        "testcase_0102",
+        "testcase_0103",
+        "testcase_0104",
+        "testcase_0105",
+        "testcase_0106",
+        "testcase_0107",
+        "testcase_0108",
+        "testcase_0109",
+        "testcase_0110",
+
+        "testcase_10101",
+        "testcase_10102",
+        "testcase_10103",
+        "testcase_10104",
+        "testcase_10105",
+        "testcase_10106",
+        "testcase_10107",
+        "testcase_10108",
+        "testcase_10109",
+        "testcase_10110",
+
+        "testcase_0111",
+        "testcase_0112",
+        "testcase_0113",
+        "testcase_0114",
+        "testcase_0115",
+        "testcase_0116",
+        "testcase_0117",
+        "testcase_0118",
+
+        "testcase_0121",
+        "testcase_0122",
+        "testcase_0123",
+        "testcase_0124",
+        "testcase_0125",
+        "testcase_0126",
+        "testcase_0127",
+
+        "testcase_0131",
+        "testcase_0132",
+        "testcase_0133",
+        "testcase_0134",
+
+        "testcase_10131",
+        "testcase_10132",
+        "testcase_10133",
+        "testcase_10134",
+
+        "testcase_0151",
+        "testcase_0152",
+        "testcase_0153",
+        "testcase_0154",
+        "testcase_0155",
+        "testcase_0156",
+        "testcase_0157",
+        "testcase_0158",
+
+        "testcase_0201",
+        "testcase_0202",
+        "testcase_0203",
+        "testcase_0204",
+        "testcase_0205",
+        "testcase_0206",
+        "testcase_0207",
+        "testcase_0208",
+
+        "testcase_0251",
+        "testcase_0252",
+        "testcase_0253",
+        "testcase_0254",
+        "testcase_0255",
+        "testcase_0256",
+        "testcase_0257",
+        "testcase_0258",
+
+        "testcase_0301",
+        "testcase_0302",
+        "testcase_0303",
+        "testcase_0304",
+        "testcase_0305",
+        "testcase_0306",
+        "testcase_0307",
+        "testcase_0308",
+
+        "testcase_4001",
+        "testcase_4002",
+        "testcase_4003",
+        "testcase_4004",
+        "testcase_4005",
+        "testcase_4006",
+        "testcase_4007",
+        "testcase_4008",
+
+        "testcase_4011",
+        "testcase_4012",
+        "testcase_4013",
+        "testcase_4014",
+        "testcase_4015",
+        "testcase_4016",
+        "testcase_4017",
+        "testcase_4018",
+
+        "testcase_9051"
+    ]
+}

http://git-wip-us.apache.org/repos/asf/kafka/blob/53f31432/system_test/utils/kafka_system_test_utils.py
----------------------------------------------------------------------
diff --git a/system_test/utils/kafka_system_test_utils.py b/system_test/utils/kafka_system_test_utils.py
index 41d511c..a9b73f7 100644
--- a/system_test/utils/kafka_system_test_utils.py
+++ b/system_test/utils/kafka_system_test_utils.py
@@ -436,6 +436,7 @@ def generate_overriden_props_files(testsuitePathname, testcaseEnv, systemTestEnv
                     addedCSVConfig["kafka.metrics.polling.interval.secs"] = "5"
                     addedCSVConfig["kafka.metrics.reporters"] = "kafka.metrics.KafkaCSVMetricsReporter"
                     addedCSVConfig["kafka.csv.metrics.reporter.enabled"] = "true"
+                    addedCSVConfig["listeners"] = "PLAINTEXT://localhost:"+tcCfg["port"]
 
                     if brokerVersion == "0.7":
                         addedCSVConfig["brokerid"] = tcCfg["brokerid"]


Mime
View raw message