geode-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dschnei...@apache.org
Subject [geode] branch feature/GEODE-6291 updated: got the intregation tests passing
Date Tue, 29 Jan 2019 01:28:22 GMT
This is an automated email from the ASF dual-hosted git repository.

dschneider pushed a commit to branch feature/GEODE-6291
in repository https://gitbox.apache.org/repos/asf/geode.git


The following commit(s) were added to refs/heads/feature/GEODE-6291 by this push:
     new 5651519  got the intregation tests passing
5651519 is described below

commit 565151959e6a587ff6c8c518a2c4c2c705ec5f02
Author: Darrel Schneider <dschneider@pivotal.io>
AuthorDate: Mon Jan 28 17:27:50 2019 -0800

    got the intregation tests passing
---
 .../jdbc/JdbcAsyncWriterIntegrationTest.java       | 19 +++++++---
 .../connectors/jdbc/JdbcLoaderIntegrationTest.java | 42 ++++++++++++++++------
 .../connectors/jdbc/JdbcWriterIntegrationTest.java | 20 ++++++++---
 .../jdbc/MySqlJdbcLoaderIntegrationTest.java       | 23 ++++++++++++
 .../jdbc/PostgresJdbcLoaderIntegrationTest.java    | 25 ++++++++++++-
 .../jdbc/internal/TestConfigService.java           | 22 ++++++++----
 6 files changed, 123 insertions(+), 28 deletions(-)

diff --git a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterIntegrationTest.java
b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterIntegrationTest.java
index 3b51e2b..15f2b68 100644
--- a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterIntegrationTest.java
+++ b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterIntegrationTest.java
@@ -19,9 +19,12 @@ import static org.apache.geode.test.awaitility.GeodeAwaitility.await;
 import static org.assertj.core.api.Assertions.assertThat;
 
 import java.sql.Connection;
+import java.sql.JDBCType;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.Arrays;
+import java.util.List;
 
 import org.awaitility.core.ThrowingRunnable;
 import org.junit.After;
@@ -35,6 +38,7 @@ import org.apache.geode.connectors.jdbc.internal.RegionMappingExistsException;
 import org.apache.geode.connectors.jdbc.internal.SqlHandler;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataManager;
 import org.apache.geode.connectors.jdbc.internal.TestConfigService;
+import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
 import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.pdx.PdxInstance;
 import org.apache.geode.pdx.WritablePdxInstance;
@@ -73,7 +77,11 @@ public abstract class JdbcAsyncWriterIntegrationTest {
   }
 
   private void setupRegion(String ids) throws RegionMappingExistsException {
-    employees = createRegionWithJDBCAsyncWriter(REGION_TABLE_NAME, ids);
+    List<FieldMapping> fieldMappings = Arrays.asList(
+        new FieldMapping("", "", "id", JDBCType.VARCHAR.name(), false),
+        new FieldMapping("", "", "name", JDBCType.VARCHAR.name(), true),
+        new FieldMapping("", "", "age", JDBCType.INTEGER.name(), true));
+    employees = createRegionWithJDBCAsyncWriter(REGION_TABLE_NAME, ids, fieldMappings);
   }
 
   @After
@@ -315,9 +323,10 @@ public abstract class JdbcAsyncWriterIntegrationTest {
     assertThat(resultSet.getObject("age")).isEqualTo(employee.getAge());
   }
 
-  private Region<Object, PdxInstance> createRegionWithJDBCAsyncWriter(String regionName,
String ids)
+  private Region<Object, PdxInstance> createRegionWithJDBCAsyncWriter(String regionName,
String ids,
+      List<FieldMapping> fieldMappings)
       throws RegionMappingExistsException {
-    jdbcWriter = new JdbcAsyncWriter(createSqlHandler(ids), cache);
+    jdbcWriter = new JdbcAsyncWriter(createSqlHandler(ids, fieldMappings), cache);
     cache.createAsyncEventQueueFactory().setBatchSize(1).setBatchTimeInterval(1)
         .create("jdbcAsyncQueue", jdbcWriter);
 
@@ -333,10 +342,10 @@ public abstract class JdbcAsyncWriterIntegrationTest {
     assertThat(size).isEqualTo(expected);
   }
 
-  private SqlHandler createSqlHandler(String ids)
+  private SqlHandler createSqlHandler(String ids, List<FieldMapping> fieldMappings)
       throws RegionMappingExistsException {
     return new SqlHandler(new TableMetaDataManager(),
-        TestConfigService.getTestConfigService(ids),
+        TestConfigService.getTestConfigService(ids, fieldMappings),
         testDataSourceFactory);
   }
 
diff --git a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcLoaderIntegrationTest.java
b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcLoaderIntegrationTest.java
index b9295e8..b1a8d00 100644
--- a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcLoaderIntegrationTest.java
+++ b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcLoaderIntegrationTest.java
@@ -19,10 +19,13 @@ import static org.assertj.core.api.Assertions.assertThat;
 
 import java.io.IOException;
 import java.sql.Connection;
+import java.sql.JDBCType;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.Arrays;
 import java.util.Date;
+import java.util.List;
 
 import org.junit.After;
 import org.junit.Before;
@@ -37,6 +40,7 @@ import org.apache.geode.connectors.jdbc.internal.RegionMappingExistsException;
 import org.apache.geode.connectors.jdbc.internal.SqlHandler;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataManager;
 import org.apache.geode.connectors.jdbc.internal.TestConfigService;
+import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
 import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.util.BlobHelper;
 import org.apache.geode.pdx.PdxInstance;
@@ -84,11 +88,21 @@ public abstract class JdbcLoaderIntegrationTest {
   protected abstract void createClassWithSupportedPdxFieldsTable(Statement statement,
       String tableName) throws SQLException;
 
+  protected abstract List<FieldMapping> getSupportedPdxFieldsTableFieldMappings();
+
   private void createEmployeeTable() throws Exception {
     statement.execute("Create Table " + REGION_TABLE_NAME
         + " (id varchar(10) primary key not null, name varchar(10), age int)");
   }
 
+  protected List<FieldMapping> getEmployeeTableFieldMappings() {
+    List<FieldMapping> fieldMappings = Arrays.asList(
+        new FieldMapping("", "", "id", JDBCType.VARCHAR.name(), false),
+        new FieldMapping("", "", "name", JDBCType.VARCHAR.name(), true),
+        new FieldMapping("", "", "age", JDBCType.INTEGER.name(), true));
+    return fieldMappings;
+  }
+
   private void createEmployeeTableWithSchema() throws Exception {
     statement.execute("CREATE SCHEMA " + SCHEMA_NAME);
     statement.execute("Create Table " + SCHEMA_NAME + '.' + REGION_TABLE_NAME
@@ -116,7 +130,8 @@ public abstract class JdbcLoaderIntegrationTest {
     statement
         .execute("Insert into " + REGION_TABLE_NAME + "(id, name, age) values('1', 'Emp1',
21)");
     Region<String, Employee> region =
-        createRegionWithJDBCLoader(REGION_TABLE_NAME, Employee.class.getName());
+        createRegionWithJDBCLoader(REGION_TABLE_NAME, Employee.class.getName(),
+            getEmployeeTableFieldMappings());
     createPdxType();
 
     Employee value = region.get("1");
@@ -132,7 +147,8 @@ public abstract class JdbcLoaderIntegrationTest {
         .execute("Insert into " + REGION_TABLE_NAME + "(id, name, age) values('1', 'Emp1',
21)");
     String ids = "id,name";
     Region<String, Employee> region =
-        createRegionWithJDBCLoader(REGION_TABLE_NAME, Employee.class.getName(), ids, null,
null);
+        createRegionWithJDBCLoader(REGION_TABLE_NAME, Employee.class.getName(), ids, null,
null,
+            getEmployeeTableFieldMappings());
     createPdxType();
 
     PdxInstance key =
@@ -163,7 +179,7 @@ public abstract class JdbcLoaderIntegrationTest {
     }
     Region<String, Employee> region =
         createRegionWithJDBCLoader(REGION_TABLE_NAME, Employee.class.getName(), ids, catalog,
-            schema);
+            schema, getEmployeeTableFieldMappings());
     createPdxType();
 
     PdxInstance key =
@@ -185,7 +201,8 @@ public abstract class JdbcLoaderIntegrationTest {
         createClassWithSupportedPdxFieldsForInsert("1");
     insertIntoClassWithSupportedPdxFieldsTable("1", classWithSupportedPdxFields);
     Region<String, ClassWithSupportedPdxFields> region = createRegionWithJDBCLoader(
-        REGION_TABLE_NAME, ClassWithSupportedPdxFields.class.getName());
+        REGION_TABLE_NAME, ClassWithSupportedPdxFields.class.getName(),
+        getSupportedPdxFieldsTableFieldMappings());
 
     createPdxType(classWithSupportedPdxFields);
 
@@ -205,33 +222,36 @@ public abstract class JdbcLoaderIntegrationTest {
   @Test
   public void verifySimpleMiss() throws Exception {
     createEmployeeTable();
-    Region<String, PdxInstance> region = createRegionWithJDBCLoader(REGION_TABLE_NAME,
null);
+    Region<String, PdxInstance> region =
+        createRegionWithJDBCLoader(REGION_TABLE_NAME, null, getEmployeeTableFieldMappings());
     PdxInstance pdx = region.get("1");
     assertThat(pdx).isNull();
   }
 
   protected SqlHandler createSqlHandler(String pdxClassName, String ids, String catalog,
-      String schema)
+      String schema, List<FieldMapping> fieldMappings)
       throws RegionMappingExistsException {
     return new SqlHandler(new TableMetaDataManager(),
         TestConfigService.getTestConfigService((InternalCache) cache, pdxClassName, ids,
catalog,
-            schema),
+            schema, fieldMappings),
         testDataSourceFactory);
   }
 
   protected <K, V> Region<K, V> createRegionWithJDBCLoader(String regionName,
String pdxClassName,
-      String ids, String catalog, String schema)
+      String ids, String catalog, String schema, List<FieldMapping> fieldMappings)
       throws RegionMappingExistsException {
     JdbcLoader<K, V> jdbcLoader =
-        new JdbcLoader<>(createSqlHandler(pdxClassName, ids, catalog, schema), cache);
+        new JdbcLoader<>(createSqlHandler(pdxClassName, ids, catalog, schema, fieldMappings),
+            cache);
     RegionFactory<K, V> regionFactory = cache.createRegionFactory(REPLICATE);
     regionFactory.setCacheLoader(jdbcLoader);
     return regionFactory.create(regionName);
   }
 
-  protected <K, V> Region<K, V> createRegionWithJDBCLoader(String regionName,
String pdxClassName)
+  protected <K, V> Region<K, V> createRegionWithJDBCLoader(String regionName,
String pdxClassName,
+      List<FieldMapping> fieldMappings)
       throws RegionMappingExistsException {
-    return createRegionWithJDBCLoader(regionName, pdxClassName, null, null, null);
+    return createRegionWithJDBCLoader(regionName, pdxClassName, null, null, null, fieldMappings);
   }
 
   protected ClassWithSupportedPdxFields createClassWithSupportedPdxFieldsForInsert(String
key) {
diff --git a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcWriterIntegrationTest.java
b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcWriterIntegrationTest.java
index 445b002..9cfe1dd 100644
--- a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcWriterIntegrationTest.java
+++ b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcWriterIntegrationTest.java
@@ -18,10 +18,13 @@ import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.catchThrowable;
 
 import java.sql.Connection;
+import java.sql.JDBCType;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.Arrays;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 import org.junit.After;
@@ -36,6 +39,7 @@ import org.apache.geode.connectors.jdbc.internal.RegionMappingExistsException;
 import org.apache.geode.connectors.jdbc.internal.SqlHandler;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataManager;
 import org.apache.geode.connectors.jdbc.internal.TestConfigService;
+import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
 import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.pdx.PdxInstance;
 import org.apache.geode.pdx.WritablePdxInstance;
@@ -103,7 +107,12 @@ public abstract class JdbcWriterIntegrationTest {
 
   protected void sharedRegionSetup(String ids, String catalog, String schema)
       throws RegionMappingExistsException {
-    employees = createRegionWithJDBCSynchronousWriter(REGION_TABLE_NAME, ids, catalog, schema);
+    List<FieldMapping> fieldMappings = Arrays.asList(
+        new FieldMapping("", "", "id", JDBCType.VARCHAR.name(), false),
+        new FieldMapping("", "", "name", JDBCType.VARCHAR.name(), true),
+        new FieldMapping("", "", "age", JDBCType.INTEGER.name(), true));
+    employees = createRegionWithJDBCSynchronousWriter(REGION_TABLE_NAME, ids, catalog, schema,
+        fieldMappings);
   }
 
   protected void setupRegionWithSchema(String ids) throws RegionMappingExistsException {
@@ -389,9 +398,9 @@ public abstract class JdbcWriterIntegrationTest {
   }
 
   protected Region<Object, PdxInstance> createRegionWithJDBCSynchronousWriter(String
regionName,
-      String ids, String catalog, String schema)
+      String ids, String catalog, String schema, List<FieldMapping> fieldMappings)
       throws RegionMappingExistsException {
-    jdbcWriter = new JdbcWriter(createSqlHandler(ids, catalog, schema), cache);
+    jdbcWriter = new JdbcWriter(createSqlHandler(ids, catalog, schema, fieldMappings), cache);
 
     RegionFactory<Object, PdxInstance> regionFactory =
         cache.createRegionFactory(RegionShortcut.REPLICATE);
@@ -406,10 +415,11 @@ public abstract class JdbcWriterIntegrationTest {
     assertThat(size).isEqualTo(expected);
   }
 
-  protected SqlHandler createSqlHandler(String ids, String catalog, String schema)
+  protected SqlHandler createSqlHandler(String ids, String catalog, String schema,
+      List<FieldMapping> fieldMappings)
       throws RegionMappingExistsException {
     return new SqlHandler(new TableMetaDataManager(),
-        TestConfigService.getTestConfigService(cache, null, ids, catalog, schema),
+        TestConfigService.getTestConfigService(cache, null, ids, catalog, schema, fieldMappings),
         testDataSourceFactory);
   }
 
diff --git a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/MySqlJdbcLoaderIntegrationTest.java
b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/MySqlJdbcLoaderIntegrationTest.java
index 14562d3..a6af3a6 100644
--- a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/MySqlJdbcLoaderIntegrationTest.java
+++ b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/MySqlJdbcLoaderIntegrationTest.java
@@ -16,11 +16,15 @@ package org.apache.geode.connectors.jdbc;
 
 import java.net.URL;
 import java.sql.Connection;
+import java.sql.JDBCType;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.Arrays;
+import java.util.List;
 
 import org.junit.ClassRule;
 
+import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
 import org.apache.geode.test.junit.rules.DatabaseConnectionRule;
 import org.apache.geode.test.junit.rules.MySqlConnectionRule;
 
@@ -54,6 +58,25 @@ public class MySqlJdbcLoaderIntegrationTest extends JdbcLoaderIntegrationTest
{
   }
 
   @Override
+  protected List<FieldMapping> getSupportedPdxFieldsTableFieldMappings() {
+    List<FieldMapping> fieldMappings = Arrays.asList(
+        new FieldMapping("", "", "id", JDBCType.VARCHAR.name(), false),
+        new FieldMapping("", "", "aboolean", JDBCType.SMALLINT.name(), true),
+        new FieldMapping("", "", "abyte", JDBCType.SMALLINT.name(), true),
+        new FieldMapping("", "", "ashort", JDBCType.SMALLINT.name(), true),
+        new FieldMapping("", "", "anint", JDBCType.INTEGER.name(), true),
+        new FieldMapping("", "", "along", JDBCType.BIGINT.name(), true),
+        new FieldMapping("", "", "afloat", JDBCType.FLOAT.name(), true),
+        new FieldMapping("", "", "adouble", JDBCType.FLOAT.name(), true),
+        new FieldMapping("", "", "astring", JDBCType.VARCHAR.name(), true),
+        new FieldMapping("", "", "adate", JDBCType.TIMESTAMP.name(), true),
+        new FieldMapping("", "", "anobject", JDBCType.VARCHAR.name(), true),
+        new FieldMapping("", "", "abytearray", JDBCType.BLOB.name(), true),
+        new FieldMapping("", "", "achar", JDBCType.CHAR.name(), true));
+    return fieldMappings;
+  }
+
+  @Override
   protected boolean vendorSupportsSchemas() {
     return false;
   }
diff --git a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/PostgresJdbcLoaderIntegrationTest.java
b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/PostgresJdbcLoaderIntegrationTest.java
index 220ebfc..808ee94 100644
--- a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/PostgresJdbcLoaderIntegrationTest.java
+++ b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/PostgresJdbcLoaderIntegrationTest.java
@@ -18,13 +18,17 @@ import static org.assertj.core.api.Assertions.assertThat;
 
 import java.net.URL;
 import java.sql.Connection;
+import java.sql.JDBCType;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.Arrays;
+import java.util.List;
 
 import org.junit.ClassRule;
 import org.junit.Test;
 
 import org.apache.geode.cache.Region;
+import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
 import org.apache.geode.pdx.PdxInstance;
 import org.apache.geode.test.junit.rules.DatabaseConnectionRule;
 import org.apache.geode.test.junit.rules.PostgresConnectionRule;
@@ -58,6 +62,25 @@ public class PostgresJdbcLoaderIntegrationTest extends JdbcLoaderIntegrationTest
   }
 
   @Override
+  protected List<FieldMapping> getSupportedPdxFieldsTableFieldMappings() {
+    List<FieldMapping> fieldMappings = Arrays.asList(
+        new FieldMapping("", "", "id", JDBCType.VARCHAR.name(), false),
+        new FieldMapping("", "", "aboolean", JDBCType.SMALLINT.name(), true),
+        new FieldMapping("", "", "abyte", JDBCType.SMALLINT.name(), true),
+        new FieldMapping("", "", "ashort", JDBCType.SMALLINT.name(), true),
+        new FieldMapping("", "", "anint", JDBCType.INTEGER.name(), true),
+        new FieldMapping("", "", "along", JDBCType.BIGINT.name(), true),
+        new FieldMapping("", "", "afloat", JDBCType.FLOAT.name(), true),
+        new FieldMapping("", "", "adouble", JDBCType.FLOAT.name(), true),
+        new FieldMapping("", "", "astring", JDBCType.VARCHAR.name(), true),
+        new FieldMapping("", "", "adate", JDBCType.TIMESTAMP.name(), true),
+        new FieldMapping("", "", "anobject", JDBCType.VARCHAR.name(), true),
+        new FieldMapping("", "", "abytearray", JDBCType.BINARY.name(), true),
+        new FieldMapping("", "", "achar", JDBCType.CHAR.name(), true));
+    return fieldMappings;
+  }
+
+  @Override
   protected boolean vendorSupportsSchemas() {
     return true;
   }
@@ -77,7 +100,7 @@ public class PostgresJdbcLoaderIntegrationTest extends JdbcLoaderIntegrationTest
     String ids = "id,name";
     Region<String, Employee> region =
         createRegionWithJDBCLoader(REGION_TABLE_NAME, Employee.class.getName(), ids, DB_NAME,
-            SCHEMA_NAME);
+            SCHEMA_NAME, getEmployeeTableFieldMappings());
     createPdxType();
 
     PdxInstance key =
diff --git a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/internal/TestConfigService.java
b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/internal/TestConfigService.java
index 3caa83f..472dfc8 100644
--- a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/internal/TestConfigService.java
+++ b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/internal/TestConfigService.java
@@ -17,6 +17,9 @@ package org.apache.geode.connectors.jdbc.internal;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
+import java.util.List;
+
+import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
 import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
 import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.cache.extension.ExtensionPoint;
@@ -29,18 +32,21 @@ public class TestConfigService {
   private static final String REGION_NAME = "employees";
   private static final String CONNECTION_CONFIG_NAME = "testConnectionConfig";
 
-  public static JdbcConnectorServiceImpl getTestConfigService(String ids)
+  public static JdbcConnectorServiceImpl getTestConfigService(String ids,
+      List<FieldMapping> fieldMappings)
       throws RegionMappingExistsException {
-    return getTestConfigService(createMockCache(), null, ids, null, null);
+    return getTestConfigService(createMockCache(), null, ids, null, null, fieldMappings);
   }
 
   public static JdbcConnectorServiceImpl getTestConfigService(InternalCache cache,
-      String pdxClassName, String ids, String catalog, String schema)
+      String pdxClassName, String ids, String catalog, String schema,
+      List<FieldMapping> fieldMappings)
       throws RegionMappingExistsException {
 
     JdbcConnectorServiceImpl service = new JdbcConnectorServiceImpl();
     service.init(cache);
-    service.createRegionMapping(createRegionMapping(pdxClassName, ids, catalog, schema));
+    service.createRegionMapping(
+        createRegionMapping(pdxClassName, ids, catalog, schema, fieldMappings));
     return service;
   }
 
@@ -51,8 +57,12 @@ public class TestConfigService {
   }
 
   private static RegionMapping createRegionMapping(String pdxClassName, String ids, String
catalog,
-      String schema) {
-    return new RegionMapping(REGION_NAME, pdxClassName, REGION_TABLE_NAME,
+      String schema, List<FieldMapping> fieldMappings) {
+    RegionMapping result = new RegionMapping(REGION_NAME, pdxClassName, REGION_TABLE_NAME,
         CONNECTION_CONFIG_NAME, ids, catalog, schema);
+    for (FieldMapping fieldMapping : fieldMappings) {
+      result.addFieldMapping(fieldMapping);
+    }
+    return result;
   }
 }


Mime
View raw message