ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From swa...@apache.org
Subject [1/3] AMBARI-4034. Create the RequestSchedule resource provider. Patch 1. (swagle)
Date Mon, 23 Dec 2013 22:13:16 GMT
Updated Branches:
  refs/heads/trunk ef81b392f -> 5dcea3726


http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecutionImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecutionImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecutionImpl.java
new file mode 100644
index 0000000..20c651f
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecutionImpl.java
@@ -0,0 +1,373 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.scheduler;
+
+import com.google.gson.Gson;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.assistedinject.Assisted;
+import com.google.inject.assistedinject.AssistedInject;
+import com.google.inject.persist.Transactional;
+import org.apache.ambari.server.controller.RequestScheduleResponse;
+import org.apache.ambari.server.orm.dao.ClusterDAO;
+import org.apache.ambari.server.orm.dao.HostDAO;
+import org.apache.ambari.server.orm.dao.RequestScheduleBatchRequestDAO;
+import org.apache.ambari.server.orm.dao.RequestScheduleDAO;
+import org.apache.ambari.server.orm.entities.ClusterEntity;
+import org.apache.ambari.server.orm.entities.RequestScheduleBatchRequestEntity;
+import org.apache.ambari.server.orm.entities.RequestScheduleEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.utils.DateUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+public class RequestExecutionImpl implements RequestExecution {
+  private Cluster cluster;
+  private Batch batch;
+  private Schedule schedule;
+  private RequestScheduleEntity requestScheduleEntity;
+  private volatile boolean isPersisted = false;
+
+  @Inject
+  private Gson gson;
+  @Inject
+  private Clusters clusters;
+  @Inject
+  private RequestScheduleDAO requestScheduleDAO;
+  @Inject
+  private RequestScheduleBatchRequestDAO batchRequestDAO;
+  @Inject
+  private ClusterDAO clusterDAO;
+  @Inject
+  private HostDAO hostDAO;
+
+  private static final Logger LOG = LoggerFactory.getLogger(RequestExecutionImpl.class);
+  private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
+
+  @AssistedInject
+  public RequestExecutionImpl(@Assisted("cluster") Cluster cluster,
+                              @Assisted("batch") Batch batch,
+                              @Assisted("schedule") Schedule schedule,
+                              Injector injector) {
+    this.cluster = cluster;
+    this.batch = batch;
+    this.schedule = schedule;
+    injector.injectMembers(this);
+
+    // Initialize the Entity object
+    // Batch Hosts is initialized on persist
+    requestScheduleEntity = new RequestScheduleEntity();
+    requestScheduleEntity.setClusterId(cluster.getClusterId());
+
+    updateBatchSettings();
+
+    updateSchedule();
+  }
+
+  @AssistedInject
+  public RequestExecutionImpl(@Assisted Cluster cluster,
+                              @Assisted RequestScheduleEntity requestScheduleEntity,
+                              Injector injector) {
+    this.cluster = cluster;
+    injector.injectMembers(this);
+
+    this.requestScheduleEntity = requestScheduleEntity;
+
+    batch = new Batch();
+    schedule = new Schedule();
+
+    BatchSettings batchSettings = new BatchSettings();
+    batchSettings.setBatchSeparationInMinutes(requestScheduleEntity.getBatchSeparationInMinutes());
+    batchSettings.setTaskFailureToleranceLimit(requestScheduleEntity.getBatchTolerationLimit());
+
+    batch.setBatchSettings(batchSettings);
+
+    Collection<RequestScheduleBatchRequestEntity> batchRequestEntities =
+      requestScheduleEntity.getRequestScheduleBatchRequestEntities();
+    if (batchRequestEntities != null) {
+      for (RequestScheduleBatchRequestEntity batchRequestEntity :
+          batchRequestEntities) {
+        BatchRequest batchRequest = new BatchRequest();
+        batchRequest.setType(BatchRequest.Type.valueOf(batchRequestEntity.getRequestType()));
+        batchRequest.setUri(batchRequestEntity.getRequestUri());
+        batchRequest.setBody(batchRequestEntity.getRequestBody());
+        batchRequest.setStatus(batchRequestEntity.getRequestStatus());
+        batchRequest.setReturnCode(batchRequestEntity.getReturnCode());
+        batchRequest.setResponseMsg(batchRequestEntity.getReturnMessage());
+        batch.getBatchRequests().add(batchRequest);
+      }
+    }
+
+    schedule.setDayOfWeek(requestScheduleEntity.getDayOfWeek());
+    schedule.setDaysOfMonth(requestScheduleEntity.getDaysOfMonth());
+    schedule.setMinutes(requestScheduleEntity.getMinutes());
+    schedule.setHours(requestScheduleEntity.getHours());
+    schedule.setMonth(requestScheduleEntity.getMonth());
+    schedule.setYear(requestScheduleEntity.getYear());
+    schedule.setStartTime(requestScheduleEntity.getStartTime());
+    schedule.setEndTime(requestScheduleEntity.getEndTime());
+
+    isPersisted = true;
+  }
+
+  @Override
+  public Long getId() {
+    return requestScheduleEntity.getScheduleId();
+  }
+
+  @Override
+  public String getClusterName() {
+    return cluster.getClusterName();
+  }
+
+  @Override
+  public Batch getBatch() {
+    return batch;
+  }
+
+  @Override
+  public void setBatch(Batch batch) {
+    this.batch = batch;
+  }
+
+  @Override
+  public Schedule getSchedule() {
+    return schedule;
+  }
+
+  @Override
+  public void setSchedule(Schedule schedule) {
+    this.schedule = schedule;
+  }
+
+  @Override
+  public RequestScheduleResponse convertToResponse() {
+    readWriteLock.readLock().lock();
+    try{
+      RequestScheduleResponse response = new RequestScheduleResponse(
+        getId(), getClusterName(), getDescription(), getStatus(), getBatch(),
+        getSchedule(), requestScheduleEntity.getCreateUser(),
+        DateUtils.convertToReadableTime(requestScheduleEntity.getCreateTimestamp()),
+        requestScheduleEntity.getUpdateUser(),
+        DateUtils.convertToReadableTime(requestScheduleEntity.getUpdateTimestamp())
+      );
+      return response;
+    } finally {
+      readWriteLock.readLock().unlock();
+    }
+  }
+
+  @Override
+  public void persist() {
+    readWriteLock.writeLock().lock();
+    try {
+      if (!isPersisted) {
+        persistEntities();
+        refresh();
+        cluster.refresh();
+        isPersisted = true;
+      } else {
+        saveIfPersisted();
+      }
+    } finally {
+      readWriteLock.writeLock().unlock();
+    }
+  }
+
+  @Override
+  public void refresh() {
+    readWriteLock.writeLock().lock();
+    try{
+      if (isPersisted) {
+        RequestScheduleEntity scheduleEntity = requestScheduleDAO.findById
+          (requestScheduleEntity.getScheduleId());
+        requestScheduleDAO.refresh(scheduleEntity);
+      }
+    } finally {
+      readWriteLock.writeLock().unlock();
+    }
+  }
+
+  @Override
+  public void delete() {
+    readWriteLock.writeLock().lock();
+    try {
+      if (isPersisted) {
+        batchRequestDAO.removeByScheduleId(requestScheduleEntity.getScheduleId());
+        requestScheduleDAO.remove(requestScheduleEntity);
+        cluster.refresh();
+        isPersisted = false;
+      }
+    } finally {
+      readWriteLock.writeLock().unlock();
+    }
+  }
+
+  @Override
+  public String getStatus() {
+    return requestScheduleEntity.getStatus();
+  }
+
+  @Override
+  public void setDescription(String description) {
+    requestScheduleEntity.setDescription(description);
+  }
+
+  @Override
+  public String getDescription() {
+    return requestScheduleEntity.getDescription();
+  }
+
+  /**
+   * Persist @RequestScheduleEntity with @RequestScheduleBatchHostEntity
+   */
+  @Transactional
+  private void persistEntities() {
+    ClusterEntity clusterEntity = clusterDAO.findById(cluster.getClusterId());
+    requestScheduleEntity.setClusterEntity(clusterEntity);
+    requestScheduleEntity.setCreateTimestamp(System.currentTimeMillis());
+    requestScheduleEntity.setUpdateTimestamp(System.currentTimeMillis());
+    requestScheduleDAO.create(requestScheduleEntity);
+
+    persistRequestMapping();
+  }
+
+  @Transactional
+  private void persistRequestMapping() {
+    // Delete existing mappings to support updates
+    if (isPersisted) {
+      batchRequestDAO.removeByScheduleId(requestScheduleEntity.getScheduleId());
+      requestScheduleEntity.getRequestScheduleBatchRequestEntities().clear();
+    }
+
+    if (batch != null) {
+      List<BatchRequest> batchRequests = batch.getBatchRequests();
+      if (batchRequests != null) {
+        // Sort by orderId and assign increasing batch id
+        Collections.sort(batchRequests);
+        Long batchId = 1L;
+        for (BatchRequest batchRequest : batchRequests) {
+          RequestScheduleBatchRequestEntity batchRequestEntity = new
+            RequestScheduleBatchRequestEntity();
+          batchRequestEntity.setBatchId(batchId);
+          batchRequestEntity.setScheduleId(requestScheduleEntity.getScheduleId());
+          batchRequestEntity.setRequestScheduleEntity(requestScheduleEntity);
+          batchRequestEntity.setRequestType(batchRequest.getType());
+          batchRequestEntity.setRequestUri(batchRequest.getUri());
+          batchRequestEntity.setRequestBody(batchRequest.getBody());
+          batchRequestEntity.setReturnCode(batchRequest.getReturnCode());
+          batchRequestEntity.setReturnMessage(batchRequest.getResponseMsg());
+          batchRequestEntity.setRequestStatus(batchRequest.getStatus());
+          batchRequestDAO.create(batchRequestEntity);
+          requestScheduleEntity.getRequestScheduleBatchRequestEntities().add
+            (batchRequestEntity);
+          requestScheduleDAO.merge(requestScheduleEntity);
+          batchId++;
+        }
+      }
+    }
+
+
+  }
+
+  @Transactional
+  private void saveIfPersisted() {
+    if (isPersisted) {
+      requestScheduleEntity.setUpdateTimestamp(System.currentTimeMillis());
+      // Update the Entity object with new settings
+      updateBatchSettings();
+      updateSchedule();
+      // Persist schedule and settings
+      requestScheduleDAO.merge(requestScheduleEntity);
+      // Persist batches of hosts
+      persistRequestMapping();
+    }
+  }
+
+  private void updateBatchSettings() {
+    if (batch != null) {
+      BatchSettings settings = batch.getBatchSettings();
+      if (settings != null) {
+        requestScheduleEntity.setBatchSeparationInMinutes(settings.getBatchSeparationInMinutes());
+        requestScheduleEntity.setBatchTolerationLimit(settings.getTaskFailureToleranceLimit());
+      }
+    }
+  }
+
+  private void updateSchedule() {
+    if (schedule != null) {
+      requestScheduleEntity.setMinutes(schedule.getMinutes());
+      requestScheduleEntity.setHours(schedule.getHours());
+      requestScheduleEntity.setDaysOfMonth(schedule.getDaysOfMonth());
+      requestScheduleEntity.setDayOfWeek(schedule.getDayOfWeek());
+      requestScheduleEntity.setMonth(schedule.getMonth());
+      requestScheduleEntity.setYear(schedule.getYear());
+      requestScheduleEntity.setStartTime(schedule.getStartTime());
+      requestScheduleEntity.setEndTime(schedule.getEndTime());
+    }
+  }
+
+  @Override
+  public void setStatus(Status status) {
+    requestScheduleEntity.setStatus(status.name());
+  }
+
+  @Override
+  public void setLastExecutionStatus(String status) {
+    requestScheduleEntity.setLastExecutionStatus(status);
+  }
+
+  @Override
+  public void setCreateUser(String username) {
+    requestScheduleEntity.setCreateUser(username);
+  }
+
+  @Override
+  public void setUpdateUser(String username) {
+    requestScheduleEntity.setUpdateUser(username);
+  }
+
+  @Override
+  public String getCreateTime() {
+    return DateUtils.convertToReadableTime
+      (requestScheduleEntity.getCreateTimestamp());
+  }
+
+  @Override
+  public String getUpdateTime() {
+    return DateUtils.convertToReadableTime
+      (requestScheduleEntity.getUpdateTimestamp());
+  }
+
+  @Override
+  public String getCreateUser() {
+    return requestScheduleEntity.getCreateUser();
+  }
+
+  @Override
+  public String getUpdateUser() {
+    return requestScheduleEntity.getUpdateUser();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/Schedule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/Schedule.java b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/Schedule.java
new file mode 100644
index 0000000..ff5fb76
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/Schedule.java
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.scheduler;
+
+import org.codehaus.jackson.annotate.JsonProperty;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+public class Schedule {
+  private String minutes;
+  private String hours;
+  private String daysOfMonth;
+  private String month;
+  private String dayOfWeek;
+  private String year;
+  private String startTime;
+  private String endTime;
+
+  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
+  @JsonProperty("minutes")
+  public String getMinutes() {
+    return minutes;
+  }
+
+  public void setMinutes(String minutes) {
+    this.minutes = minutes;
+  }
+
+  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
+  @JsonProperty("hours")
+  public String getHours() {
+    return hours;
+  }
+
+  public void setHours(String hours) {
+    this.hours = hours;
+  }
+
+  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
+  @JsonProperty("days_of_month")
+  public String getDaysOfMonth() {
+    return daysOfMonth;
+  }
+
+  public void setDaysOfMonth(String daysOfMonth) {
+    this.daysOfMonth = daysOfMonth;
+  }
+
+  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
+  @JsonProperty("month")
+  public String getMonth() {
+    return month;
+  }
+
+  public void setMonth(String month) {
+    this.month = month;
+  }
+
+  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
+  @JsonProperty("day_of_week")
+  public String getDayOfWeek() {
+    return dayOfWeek;
+  }
+
+  public void setDayOfWeek(String dayOfWeek) {
+    this.dayOfWeek = dayOfWeek;
+  }
+
+  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
+  @JsonProperty("year")
+  public String getYear() {
+    return year;
+  }
+
+  public void setYear(String year) {
+    this.year = year;
+  }
+
+  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
+  @JsonProperty("start_time")
+  public String getStartTime() {
+    return startTime;
+  }
+
+  public void setStartTime(String startTime) {
+    this.startTime = startTime;
+  }
+
+  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
+  @JsonProperty("end_time")
+  public String getEndTime() {
+    return endTime;
+  }
+
+  public void setEndTime(String endTime) {
+    this.endTime = endTime;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    Schedule schedule = (Schedule) o;
+
+    if (dayOfWeek != null ? !dayOfWeek.equals(schedule.dayOfWeek) : schedule.dayOfWeek != null)
+      return false;
+    if (daysOfMonth != null ? !daysOfMonth.equals(schedule.daysOfMonth) : schedule.daysOfMonth != null)
+      return false;
+    if (endTime != null ? !endTime.equals(schedule.endTime) : schedule.endTime != null)
+      return false;
+    if (hours != null ? !hours.equals(schedule.hours) : schedule.hours != null)
+      return false;
+    if (minutes != null ? !minutes.equals(schedule.minutes) : schedule.minutes != null)
+      return false;
+    if (month != null ? !month.equals(schedule.month) : schedule.month != null)
+      return false;
+    if (startTime != null ? !startTime.equals(schedule.startTime) : schedule.startTime != null)
+      return false;
+    if (year != null ? !year.equals(schedule.year) : schedule.year != null)
+      return false;
+
+    return true;
+  }
+
+  public boolean isEmpty() {
+    return (minutes == null || minutes.isEmpty())
+      && (hours == null || hours.isEmpty())
+      && (dayOfWeek == null || dayOfWeek.isEmpty())
+      && (daysOfMonth == null || daysOfMonth.isEmpty())
+      && (month == null || month.isEmpty())
+      && (year == null || year.isEmpty())
+      && (startTime == null || startTime.isEmpty())
+      && (endTime == null || endTime.isEmpty());
+  }
+
+  @Override
+  public int hashCode() {
+    int result = minutes != null ? minutes.hashCode() : 0;
+    result = 31 * result + (hours != null ? hours.hashCode() : 0);
+    result = 31 * result + (daysOfMonth != null ? daysOfMonth.hashCode() : 0);
+    result = 31 * result + (month != null ? month.hashCode() : 0);
+    result = 31 * result + (dayOfWeek != null ? dayOfWeek.hashCode() : 0);
+    result = 31 * result + (year != null ? year.hashCode() : 0);
+    result = 31 * result + (startTime != null ? startTime.hashCode() : 0);
+    result = 31 * result + (endTime != null ? endTime.hashCode() : 0);
+    return result;
+  }
+
+  @Override
+  public String toString() {
+    return "Schedule{" +
+      "minutes='" + minutes + '\'' +
+      ", hours='" + hours + '\'' +
+      ", days_of_month='" + daysOfMonth + '\'' +
+      ", month='" + month + '\'' +
+      ", day_of_week='" + dayOfWeek + '\'' +
+      ", year='" + year + '\'' +
+      ", startTime='" + startTime + '\'' +
+      ", endTime='" + endTime + '\'' +
+      '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/main/java/org/apache/ambari/server/utils/DateUtils.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/DateUtils.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/DateUtils.java
new file mode 100644
index 0000000..e2cc65f
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/DateUtils.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.utils;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+/**
+ * Static Helper methods for datetime conversions
+ */
+public class DateUtils {
+
+  /**
+   * Milliseconds to readable format in current server timezone
+   * @param timestamp
+   * @return
+   */
+  public static String convertToReadableTime(Long timestamp) {
+    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+    return dateFormat.format(new Date(timestamp));
+  }
+
+  /**
+   * Convert time in given format to milliseconds
+   * @return
+   */
+  public static Long convertToTimestamp(String time, String format) {
+    SimpleDateFormat dateFormat = new SimpleDateFormat(format);
+    try {
+      Date date = dateFormat.parse(time);
+      return date.getTime();
+    } catch (ParseException e) {
+      e.printStackTrace();
+    }
+    return null;
+  }
+
+  /**
+   * Get difference in minutes between old date and now
+   * @param oldTime
+   * @return
+   */
+  public static Long getDateDifferenceInMinutes(Date oldTime) {
+    long diff = oldTime.getTime() - new Date().getTime();
+    return diff / (60 * 1000) % 60;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 27c77e8..9a2ee06 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -52,8 +52,8 @@ CREATE TABLE ambari_sequences (sequence_name VARCHAR(50) NOT NULL, value DECIMAL
 CREATE TABLE confgroupclusterconfigmapping (config_group_id BIGINT NOT NULL, cluster_id BIGINT NOT NULL, config_type VARCHAR(255) NOT NULL, version_tag VARCHAR(255) NOT NULL, user_name VARCHAR(255) DEFAULT '_db', create_timestamp BIGINT NOT NULL, PRIMARY KEY(config_group_id, cluster_id, config_type));
 CREATE TABLE configgroup (group_id BIGINT, cluster_id BIGINT NOT NULL, group_name VARCHAR(255) NOT NULL, tag VARCHAR(1024) NOT NULL, description VARCHAR(1024), create_timestamp BIGINT NOT NULL, PRIMARY KEY(group_id));
 CREATE TABLE configgrouphostmapping (config_group_id BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, PRIMARY KEY(config_group_id, host_name));
-CREATE TABLE ambari.requestschedule (schedule_id bigint, cluster_id BIGINT NOT NULL, request_context varchar(255), status varchar(255), target_type varchar(255), target_name varchar(255) NOT NULL, target_service varchar(255) NOT NULL, target_component varchar(255), batch_requests_by_host boolean, batch_host_count smallint, batch_separation_minutes smallint, batch_toleration_limit smallint, create_user varchar(255), create_timestamp bigint, update_user varchar(255), update_timestamp bigint, minutes varchar(10), hours varchar(10), days_of_month varchar(10), month varchar(10), day_of_week varchar(10), yearToSchedule varchar(10), startTime bigint, endTime bigint, last_execution_status varchar(255), PRIMARY KEY(schedule_id));
-CREATE TABLE ambari.requestschedulebatchhost (schedule_id bigint, batch_id bigint, host_name varchar(255), batch_name varchar(255), PRIMARY KEY(schedule_id, batch_id, host_name));
+CREATE TABLE ambari.requestschedule (schedule_id bigint, cluster_id BIGINT NOT NULL, description varchar(255), status varchar(255), batch_separation_minutesallint, batch_toleration_limit smallint, create_user varchar(255), create_timestamp bigint, update_user varchar(255), update_timestamp bigint, minutes varchar(10), hours varchar(10), days_of_month varchar(10), month varchar(10), day_of_week varchar(10), yearToSchedule varchar(10), startTime varchar(50), endTime varchar(50), last_execution_status varchar(255), PRIMARY KEY(schedule_id));
+CREATE TABLE ambari.requestschedulebatchrequest (schedule_id bigint, batch_id bigint, request_id bigint, request_type varchar(255), request_uri varchar(1024), request_body varchar(4000), request_status varchar(255), return_code smallint, return_message varchar(255), PRIMARY KEY(schedule_id, batch_id));
 
 
 ALTER TABLE users ADD CONSTRAINT UNQ_users_0 UNIQUE (user_name, ldap_user);
@@ -82,8 +82,7 @@ ALTER TABLE confgroupclusterconfigmapping ADD CONSTRAINT FK_confgroupclusterconf
 ALTER TABLE configgroup ADD CONSTRAINT FK_configgroup_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE configgrouphostmapping ADD CONSTRAINT FK_configgrouphostmapping_configgroup_id FOREIGN KEY (config_group_id) REFERENCES configgroup (group_id);
 ALTER TABLE configgrouphostmapping ADD CONSTRAINT FK_configgrouphostmapping_host_name FOREIGN KEY (host_name) REFERENCES hosts (host_name);
-ALTER TABLE ambari.requestschedulebatchhost ADD CONSTRAINT FK_requestschedulebatchhost_host_name FOREIGN KEY (host_name) REFERENCES ambari.hosts (host_name);
-ALTER TABLE ambari.requestschedulebatchhost ADD CONSTRAINT FK_requestschedulebatchhost_schedule FOREIGN KEY (schedule_id) REFERENCES ambari.requestschedule (schedule_id);
+ALTER TABLE ambari.requestschedulebatchrequest ADD CONSTRAINT FK_requestschedulebatchrequest_schedule_id FOREIGN KEY (schedule_id) REFERENCES ambari.requestschedule (schedule_id);
 
 
 INSERT INTO ambari_sequences(sequence_name, value) values ('cluster_id_seq', 0);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 74a4607..071469a 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -43,9 +43,8 @@ CREATE TABLE configgroup (group_id NUMBER(19), cluster_id NUMBER(19) NOT NULL, g
 CREATE TABLE confgroupclusterconfigmapping (config_group_id NUMBER(19) NOT NULL, cluster_id NUMBER(19) NOT NULL, config_type VARCHAR2(255) NOT NULL, version_tag VARCHAR2(255) NOT NULL, user_name VARCHAR2(255) DEFAULT '_db', create_timestamp NUMBER(19) NOT NULL, PRIMARY KEY(config_group_id, cluster_id, config_type));
 CREATE TABLE configgrouphostmapping (config_group_id NUMBER(19) NOT NULL, host_name VARCHAR2(255) NOT NULL, PRIMARY KEY(config_group_id, host_name));
 CREATE TABLE action (action_name VARCHAR2(255) NOT NULL, action_type VARCHAR2(255) NOT NULL, inputs VARCHAR2(1024), target_service VARCHAR2(255), target_component VARCHAR2(255), default_timeout NUMBER(10) NOT NULL, description VARCHAR2(1024), target_type VARCHAR2(255), PRIMARY KEY (action_name));
-CREATE TABLE ambari.requestschedule (schedule_id NUMBER(19), cluster_id NUMBER(19) NOT NULL, request_context VARCHAR2(255), status VARCHAR2(255), target_type VARCHAR2(255), target_name VARCHAR2(255) NOT NULL, target_service VARCHAR2(255) NOT NULL, target_component VARCHAR2(255), batch_requests_by_host char check (batch_requests_by_host in ('FALSE','TRUE')), batch_host_count smallint, batch_separation_minutes smallint, batch_toleration_limit smallint, create_user VARCHAR2(255), create_timestamp NUMBER(19), update_user VARCHAR2(255), update_timestamp NUMBER(19), minutes VARCHAR2(10), hours VARCHAR2(10), days_of_month VARCHAR2(10), month VARCHAR2(10), day_of_week VARCHAR2(10), yearToSchedule VARCHAR2(10), startTime NUMBER(19), endTime NUMBER(19), last_execution_status VARCHAR2(255), PRIMARY KEY(schedule_id));
-CREATE TABLE ambari.requestschedulebatchhost (schedule_id NUMBER(19), batch_id NUMBER(19), host_name VARCHAR2(255), batch_name VARCHAR2(255), PRIMARY KEY(schedule_id, batch_id, host_name));
-
+CREATE TABLE ambari.requestschedule (schedule_id NUMBER(19), cluster_id NUMBER(19) NOT NULL, description VARCHAR2(255), status VARCHAR2(255), batch_separation_minutes smallint, batch_toleration_limit smallint, create_user VARCHAR2(255), create_timestamp NUMBER(19), update_user VARCHAR2(255), update_timestamp NUMBER(19), minutes VARCHAR2(10), hours VARCHAR2(10), days_of_month VARCHAR2(10), month VARCHAR2(10), day_of_week VARCHAR2(10), yearToSchedule VARCHAR2(10), startTime VARCHAR2(50), endTime VARCHAR2(50), last_execution_status VARCHAR2(255), PRIMARY KEY(schedule_id));
+CREATE TABLE ambari.requestschedulebatchrequest (schedule_id NUMBER(19), batch_id NUMBER(19), request_id NUMBER(19), request_type VARCHAR2(255), request_uri VARCHAR2(1024), request_body VARCHAR2(4000), request_status VARCHAR2(255), return_code smallint, return_message VARCHAR2(255), PRIMARY KEY(schedule_id, batch_id));
 
 ALTER TABLE users ADD CONSTRAINT UNQ_users_0 UNIQUE (user_name, ldap_user);
 ALTER TABLE clusterconfig ADD CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
@@ -73,9 +72,7 @@ ALTER TABLE confgroupclusterconfigmapping ADD CONSTRAINT FK_confgroupclusterconf
 ALTER TABLE confgroupclusterconfigmapping ADD CONSTRAINT FK_confgroupclusterconfigmapping_group_id FOREIGN KEY (config_group_id) REFERENCES configgroup (group_id);
 ALTER TABLE confgrouphostmapping ADD CONSTRAINT FK_configgrouphostmapping_configgroup_id FOREIGN KEY (config_group_id) REFERENCES configgroup (group_id);
 ALTER TABLE confgrouphostmapping ADD CONSTRAINT FK_configgrouphostmapping_host_name FOREIGN KEY (host_name) REFERENCES hosts (host_name);
-ALTER TABLE ambari.requestschedulebatchhost ADD CONSTRAINT FK_requestschedulebatchhost_host_name FOREIGN KEY (host_name) REFERENCES ambari.hosts (host_name);
-ALTER TABLE ambari.requestschedulebatchhost ADD CONSTRAINT FK_requestschedulebatchhost_schedule FOREIGN KEY (schedule_id) REFERENCES ambari.requestschedule (schedule_id);
-
+ALTER TABLE ambari.requestschedulebatchrequest ADD CONSTRAINT FK_requestschedulebatchrequest_schedule_id FOREIGN KEY (schedule_id) REFERENCES ambari.requestschedule (schedule_id);
 
 INSERT INTO ambari_sequences(sequence_name, value) values ('host_role_command_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, value) values ('user_id_seq', 1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 2fb484f..b6912d9 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -110,11 +110,11 @@ GRANT ALL PRIVILEGES ON TABLE ambari.configgrouphostmapping TO :username;
 CREATE TABLE ambari.action (action_name VARCHAR(255) NOT NULL, action_type VARCHAR(32) NOT NULL, inputs VARCHAR(1000), target_service VARCHAR(255), target_component VARCHAR(255), default_timeout SMALLINT NOT NULL, description VARCHAR(1000), target_type VARCHAR(32), PRIMARY KEY (action_name));
 GRANT ALL PRIVILEGES ON TABLE ambari.action TO :username;
 
-CREATE TABLE ambari.requestschedule (schedule_id bigint, cluster_id BIGINT NOT NULL, request_context varchar(255), status varchar(255), target_type varchar(255), target_name varchar(255) NOT NULL, target_service varchar(255) NOT NULL, target_component varchar(255), batch_requests_by_host boolean, batch_host_count smallint, batch_separation_minutes smallint, batch_toleration_limit smallint, create_user varchar(255), create_timestamp bigint, update_user varchar(255), update_timestamp bigint, minutes varchar(10), hours varchar(10), days_of_month varchar(10), month varchar(10), day_of_week varchar(10), yearToSchedule varchar(10), startTime bigint, endTime bigint, last_execution_status varchar(255), PRIMARY KEY(schedule_id));
+CREATE TABLE ambari.requestschedule (schedule_id bigint, cluster_id bigint NOT NULL, description varchar(255), status varchar(255), batch_separation_minutes smallint, batch_toleration_limit smallint, create_user varchar(255), create_timestamp bigint, update_user varchar(255), update_timestamp bigint, minutes varchar(10), hours varchar(10), days_of_month varchar(10), month varchar(10), day_of_week varchar(10), yearToSchedule varchar(10), startTime varchar(50), endTime varchar(50), last_execution_status varchar(255), PRIMARY KEY(schedule_id));
 GRANT ALL PRIVILEGES ON TABLE ambari.requestschedule TO :username;
 
-CREATE TABLE ambari.requestschedulebatchhost (schedule_id bigint, batch_id bigint, host_name varchar(255), batch_name varchar(255), PRIMARY KEY(schedule_id, batch_id, host_name));
-GRANT ALL PRIVILEGES ON TABLE ambari.requestschedulebatchhost TO :username;
+CREATE TABLE ambari.requestschedulebatchrequest (schedule_id bigint, batch_id bigint, request_id bigint, request_type varchar(255), request_uri varchar(1024), request_body varchar(4000), request_status varchar(255), return_code smallint, return_message varchar(255), PRIMARY KEY(schedule_id, batch_id));
+GRANT ALL PRIVILEGES ON TABLE ambari.requestschedulebatchrequest TO :username;
 
 --------altering tables by creating foreign keys----------
 ALTER TABLE ambari.clusterconfig ADD CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
@@ -144,8 +144,7 @@ ALTER TABLE ambari.confgroupclusterconfigmapping ADD CONSTRAINT FK_confgroupclus
 ALTER TABLE ambari.confgroupclusterconfigmapping ADD CONSTRAINT FK_confgroupclusterconfigmapping_group_id FOREIGN KEY (config_group_id) REFERENCES ambari.configgroup (group_id);
 ALTER TABLE ambari.configgrouphostmapping ADD CONSTRAINT FK_configgrouphostmapping_configgroup_id FOREIGN KEY (config_group_id) REFERENCES ambari.configgroup (group_id);
 ALTER TABLE ambari.configgrouphostmapping ADD CONSTRAINT FK_configgrouphostmapping_host_name FOREIGN KEY (host_name) REFERENCES ambari.hosts (host_name);
-ALTER TABLE ambari.requestschedulebatchhost ADD CONSTRAINT FK_requestschedulebatchhost_host_name FOREIGN KEY (host_name) REFERENCES ambari.hosts (host_name);
-ALTER TABLE ambari.requestschedulebatchhost ADD CONSTRAINT FK_requestschedulebatchhost_schedule FOREIGN KEY (schedule_id) REFERENCES ambari.requestschedule (schedule_id);
+ALTER TABLE ambari.requestschedulebatchrequest ADD CONSTRAINT FK_requestschedulebatchrequest_schedule_id FOREIGN KEY (schedule_id) REFERENCES ambari.requestschedule (schedule_id);
 
 ---------inserting some data-----------
 BEGIN;

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/main/resources/Ambari-DDL-Postgres-REMOTE-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-REMOTE-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-REMOTE-CREATE.sql
index e82cb3d..e89f72f 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-REMOTE-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-REMOTE-CREATE.sql
@@ -45,8 +45,8 @@ CREATE TABLE ambari.ambari_sequences (sequence_name VARCHAR(255) PRIMARY KEY, "v
 CREATE TABLE ambari.configgroup (group_id BIGINT, cluster_id BIGINT NOT NULL, group_name VARCHAR(255) NOT NULL, tag VARCHAR(1024) NOT NULL, description VARCHAR(1024), create_timestamp BIGINT NOT NULL, PRIMARY KEY(group_id));
 CREATE TABLE ambari.confgroupclusterconfigmapping (config_group_id BIGINT NOT NULL, cluster_id BIGINT NOT NULL, config_type VARCHAR(255) NOT NULL, version_tag VARCHAR(255) NOT NULL, user_name VARCHAR(255) DEFAULT '_db', create_timestamp BIGINT NOT NULL, PRIMARY KEY(config_group_id, cluster_id, config_type));
 CREATE TABLE ambari.configgrouphostmapping (config_group_id BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, PRIMARY KEY(config_group_id, host_name));
-CREATE TABLE ambari.requestschedule (schedule_id bigint, cluster_id BIGINT NOT NULL, request_context varchar(255), status varchar(255), target_type varchar(255), target_name varchar(255) NOT NULL, target_service varchar(255) NOT NULL, target_component varchar(255), batch_requests_by_host boolean, batch_host_count smallint, batch_separation_minutes smallint, batch_toleration_limit smallint, create_user varchar(255), create_timestamp bigint, update_user varchar(255), update_timestamp bigint, minutes varchar(10), hours varchar(10), days_of_month varchar(10), month varchar(10), day_of_week varchar(10), yearToSchedule varchar(10), startTime bigint, endTime bigint, PRIMARY KEY(schedule_id));
-CREATE TABLE ambari.requestschedulebatchhost (schedule_id bigint, batch_id bigint, host_name varchar(255), batch_name varchar(255), PRIMARY KEY(schedule_id, batch_id, host_name));
+CREATE TABLE ambari.requestschedule (schedule_id bigint, cluster_id BIGINT NOT NULL, status varchar(255), batch_separation_minutes smallint, batch_toleration_limit smallint, create_user varchar(255), create_timestamp bigint, update_user varchar(255), update_timestamp bigint, minutes varchar(10), hours varchar(10), days_of_month varchar(10), month varchar(10), day_of_week varchar(10), yearToSchedule varchar(10), startTime varchar(50), endTime varchar(50), last_execution_status varchar(255), PRIMARY KEY(schedule_id));
+CREATE TABLE ambari.requestschedulebatchrequest (schedule_id bigint, batch_id bigint, request_id bigint, request_type varchar(255), request_uri varchar(1024), request_body varchar(4000), request_status varchar(255), return_code smallint, return_message varchar(255), PRIMARY KEY(schedule_id, batch_id));
 
 ALTER TABLE ambari.clusterconfig ADD CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
 ALTER TABLE ambari.clusterservices ADD CONSTRAINT FK_clusterservices_cluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
@@ -75,8 +75,7 @@ ALTER TABLE ambari.confgroupclusterconfigmapping ADD CONSTRAINT FK_confgroupclus
 ALTER TABLE ambari.confgroupclusterconfigmapping ADD CONSTRAINT FK_confgroupclusterconfigmapping_group_id FOREIGN KEY (config_group_id) REFERENCES ambari.configgroup (group_id);
 ALTER TABLE ambari.configgrouphostmapping ADD CONSTRAINT FK_configgrouphostmapping_configgroup_id FOREIGN KEY (config_group_id) REFERENCES ambari.configgroup (group_id);
 ALTER TABLE ambari.configgrouphostmapping ADD CONSTRAINT FK_configgrouphostmapping_host_name FOREIGN KEY (host_name) REFERENCES ambari.hosts (host_name);
-ALTER TABLE ambari.requestschedulebatchhost ADD CONSTRAINT FK_requestschedulebatchhost_host_name FOREIGN KEY (host_name) REFERENCES ambari.hosts (host_name);
-ALTER TABLE ambari.requestschedulebatchhost ADD CONSTRAINT FK_requestschedulebatchhost_schedule FOREIGN KEY (schedule_id) REFERENCES ambari.requestschedule (schedule_id);
+ALTER TABLE ambari.requestschedulebatchrequest ADD CONSTRAINT FK_requestschedulebatchrequest_schedule_id FOREIGN KEY (schedule_id) REFERENCES ambari.requestschedule (schedule_id);
 
 BEGIN;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/main/resources/META-INF/persistence.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/META-INF/persistence.xml b/ambari-server/src/main/resources/META-INF/persistence.xml
index d56378b..6c8a5d6 100644
--- a/ambari-server/src/main/resources/META-INF/persistence.xml
+++ b/ambari-server/src/main/resources/META-INF/persistence.xml
@@ -39,7 +39,7 @@
     <class>org.apache.ambari.server.orm.entities.ConfigGroupHostMappingEntity</class>
     <class>org.apache.ambari.server.orm.entities.ActionEntity</class>
     <class>org.apache.ambari.server.orm.entities.RequestScheduleEntity</class>
-    <class>org.apache.ambari.server.orm.entities.RequestScheduleBatchHostEntity</class>
+    <class>org.apache.ambari.server.orm.entities.RequestScheduleBatchRequestEntity</class>
 
     <properties>
       <!--<property name="javax.persistence.jdbc.url" value="jdbc:postgresql://localhost/ambari" />-->

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/main/resources/key_properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/key_properties.json b/ambari-server/src/main/resources/key_properties.json
index 3a2be50..dc7e23a 100644
--- a/ambari-server/src/main/resources/key_properties.json
+++ b/ambari-server/src/main/resources/key_properties.json
@@ -102,5 +102,9 @@
   "ConfigGroup": {
     "Cluster": "ConfigGroup/cluster_name",
     "ConfigGroup": "ConfigGroup/id"
+  },
+  "RequestSchedule" : {
+    "Cluster": "RequestSchedule/cluster_name",
+    "RequestSchedule": "RequestSchedule/id"
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index e8015c9..952197b 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -105,6 +105,15 @@
         "Requests/progress_percent",
         "_"
     ],
+    "RequestSchedule" : [
+        "RequestSchedule/id",
+        "RequestSchedule/cluster_name",
+        "RequestSchedule/description",
+        "RequestSchedule/status",
+        "RequestSchedule/batch",
+        "RequestSchedule/schedule",
+        "_"
+    ],
     "Task":[
         "Tasks/id",
         "Tasks/request_id",

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
index 7ecba51..a832c8a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
@@ -45,6 +45,9 @@ import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.configgroup.ConfigGroupImpl;
 import org.apache.ambari.server.state.host.HostFactory;
 import org.apache.ambari.server.state.host.HostImpl;
+import org.apache.ambari.server.state.scheduler.RequestExecution;
+import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
+import org.apache.ambari.server.state.scheduler.RequestExecutionImpl;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostImpl;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -275,6 +278,8 @@ public class AgentResourceTest extends JerseyTest {
           Config.class, ConfigImpl.class).build(ConfigFactory.class));
       install(new FactoryModuleBuilder().implement(
         ConfigGroup.class, ConfigGroupImpl.class).build(ConfigGroupFactory.class));
+      install(new FactoryModuleBuilder().implement(RequestExecution.class,
+        RequestExecutionImpl.class).build(RequestExecutionFactory.class));
       install(new FactoryModuleBuilder().build(StageFactory.class));
       install(new FactoryModuleBuilder().build(HostRoleCommandFactory.class));
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestScheduleResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestScheduleResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestScheduleResourceProviderTest.java
new file mode 100644
index 0000000..85a97f6
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestScheduleResourceProviderTest.java
@@ -0,0 +1,404 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.controller.internal;
+
+import junit.framework.Assert;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.RequestScheduleResponse;
+import org.apache.ambari.server.controller.RequestStatusResponse;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.utilities.PredicateBuilder;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.scheduler.Batch;
+import org.apache.ambari.server.state.scheduler.BatchRequest;
+import org.apache.ambari.server.state.scheduler.RequestExecution;
+import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
+import org.apache.ambari.server.state.scheduler.Schedule;
+import org.easymock.Capture;
+import org.easymock.IAnswer;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+
+public class RequestScheduleResourceProviderTest {
+
+  RequestScheduleResourceProvider getResourceProvider
+    (AmbariManagementController managementController) {
+
+    Resource.Type type = Resource.Type.RequestSchedule;
+
+    return (RequestScheduleResourceProvider)
+      AbstractControllerResourceProvider.getResourceProvider(
+        type,
+        PropertyHelper.getPropertyIds(type),
+        PropertyHelper.getKeyPropertyIds(type),
+        managementController
+      );
+  }
+
+  @Test
+  public void testCreateRequestSchedule() throws Exception {
+    AmbariManagementController managementController = createMock(AmbariManagementController.class);
+    RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
+    Clusters clusters = createNiceMock(Clusters.class);
+    Cluster cluster = createNiceMock(Cluster.class);
+    RequestExecutionFactory executionFactory = createNiceMock
+      (RequestExecutionFactory.class);
+    RequestExecution requestExecution = createNiceMock(RequestExecution.class);
+
+    expect(managementController.getClusters()).andReturn(clusters);
+    expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
+    expect(managementController.getRequestExecutionFactory()).andReturn
+      (executionFactory);
+    expect(managementController.getAuthName()).andReturn("admin").anyTimes();
+
+    Capture<Cluster> clusterCapture = new Capture<Cluster>();
+    Capture<Batch> batchCapture = new Capture<Batch>();
+    Capture<Schedule> scheduleCapture = new Capture<Schedule>();
+
+    expect(executionFactory.createNew(capture(clusterCapture),
+      capture(batchCapture), capture(scheduleCapture))).andReturn(requestExecution);
+
+    replay(managementController, clusters, cluster, executionFactory,
+      requestExecution, response);
+
+    RequestScheduleResourceProvider resourceProvider = getResourceProvider
+      (managementController);
+
+    Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
+    Map<String, Object> properties = new LinkedHashMap<String, Object>();
+
+    properties.put(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
+    properties.put(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_DESC_PROPERTY_ID, "some description");
+    properties.put(RequestScheduleResourceProvider
+      .SCHEDULE_DAY_OF_WEEK_PROPERTY_ID, "MON");
+    properties.put(RequestScheduleResourceProvider
+      .SCHEDULE_MINUTES_PROPERTY_ID, "2");
+    properties.put(RequestScheduleResourceProvider
+      .SCHEDULE_END_TIME_PROPERTY_ID, "2013-11-18T14:29:29-08:00");
+    properties.put(RequestScheduleResourceProvider
+      .SCHEDULE_DAYS_OF_MONTH_PROPERTY_ID, "*");
+
+    HashSet<Map<String, Object>> batch = new HashSet<Map<String, Object>>();
+    Map<String, Object> batchSettings = new HashMap<String, Object>();
+    batchSettings.put(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_BATCH_SEPARATION_PROPERTY_ID, "15");
+
+    Map<String, Object> batchRequests = new HashMap<String, Object>();
+    HashSet<Map<String, Object>> requestSet = new HashSet<Map<String, Object>>();
+
+    Map<String, Object> request1 = new HashMap<String, Object>();
+    Map<String, Object> request2 = new HashMap<String, Object>();
+
+    request1.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_TYPE_PROPERTY_ID, BatchRequest.Type.PUT.name());
+    request1.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_ORDER_ID_PROPERTY_ID, "20");
+    request1.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_URI_PROPERTY_ID, "SomeUpdateUri");
+    request1.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_BODY_PROPERTY_ID, "data1");
+
+    request2.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_TYPE_PROPERTY_ID, BatchRequest.Type.DELETE.name());
+    request2.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_ORDER_ID_PROPERTY_ID, "22");
+    request2.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_URI_PROPERTY_ID, "SomeDeleteUri");
+
+    requestSet.add(request1);
+    requestSet.add(request2);
+
+    batchRequests.put(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_BATCH_REQUESTS_PROPERTY_ID, requestSet);
+
+    batch.add(batchSettings);
+    batch.add(batchRequests);
+
+    properties.put(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_BATCH_PROPERTY_ID, batch);
+
+    propertySet.add(properties);
+    Request request = PropertyHelper.getCreateRequest(propertySet, null);
+    resourceProvider.createResources(request);
+
+    verify(managementController, clusters, cluster, executionFactory,
+      requestExecution, response);
+
+    List<BatchRequest> testRequests = batchCapture.getValue().getBatchRequests();
+    Assert.assertNotNull(testRequests);
+    BatchRequest deleteReq = null;
+    BatchRequest putReq = null;
+    for (BatchRequest testBatchRequest : testRequests) {
+      if (testBatchRequest.getType().equals(BatchRequest.Type.DELETE.name())) {
+        deleteReq = testBatchRequest;
+      } else {
+        putReq = testBatchRequest;
+      }
+    }
+    Assert.assertNotNull(deleteReq);
+    Assert.assertNotNull(putReq);
+    Assert.assertEquals("data1", putReq.getBody());
+    Assert.assertNull(deleteReq.getBody());
+  }
+
+  @Test
+  public void testUpdateRequestSchedule() throws Exception {
+    AmbariManagementController managementController = createMock(AmbariManagementController.class);
+    RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
+    Clusters clusters = createNiceMock(Clusters.class);
+    Cluster cluster = createNiceMock(Cluster.class);
+    final RequestExecution requestExecution = createNiceMock(RequestExecution.class);
+    RequestScheduleResponse requestScheduleResponse = createNiceMock
+      (RequestScheduleResponse.class);
+
+    expect(managementController.getClusters()).andReturn(clusters).anyTimes();
+    expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
+    expect(managementController.getAuthName()).andReturn("admin").anyTimes();
+
+    expect(requestExecution.getId()).andReturn(25L).anyTimes();
+    expect(requestExecution.convertToResponse()).andReturn
+      (requestScheduleResponse).anyTimes();
+    expect(requestScheduleResponse.getId()).andReturn(25L).anyTimes();
+    expect(requestScheduleResponse.getClusterName()).andReturn("Cluster100")
+      .anyTimes();
+
+    expect(cluster.getAllRequestExecutions()).andStubAnswer(new IAnswer<Map<Long, RequestExecution>>() {
+      @Override
+      public Map<Long, RequestExecution> answer() throws Throwable {
+        Map<Long, RequestExecution> requestExecutionMap = new HashMap<Long,
+          RequestExecution>();
+        requestExecutionMap.put(requestExecution.getId(), requestExecution);
+        return requestExecutionMap;
+      }
+    });
+
+    replay(managementController, clusters, cluster, requestExecution,
+      response, requestScheduleResponse);
+
+    RequestScheduleResourceProvider resourceProvider = getResourceProvider
+      (managementController);
+
+    Map<String, Object> properties = new LinkedHashMap<String, Object>();
+
+    properties.put(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
+    properties.put(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_DESC_PROPERTY_ID, "some description");
+    properties.put(RequestScheduleResourceProvider
+      .SCHEDULE_DAY_OF_WEEK_PROPERTY_ID, "MON");
+    properties.put(RequestScheduleResourceProvider
+      .SCHEDULE_MINUTES_PROPERTY_ID, "2");
+    properties.put(RequestScheduleResourceProvider
+      .SCHEDULE_END_TIME_PROPERTY_ID, "2013-11-18T14:29:29-08:00");
+    properties.put(RequestScheduleResourceProvider
+      .SCHEDULE_DAYS_OF_MONTH_PROPERTY_ID, "*");
+
+    HashSet<Map<String, Object>> batch = new HashSet<Map<String, Object>>();
+    Map<String, Object> batchSettings = new HashMap<String, Object>();
+    batchSettings.put(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_BATCH_SEPARATION_PROPERTY_ID, "15");
+
+    Map<String, Object> batchRequests = new HashMap<String, Object>();
+    HashSet<Map<String, Object>> requestSet = new HashSet<Map<String, Object>>();
+
+    Map<String, Object> request1 = new HashMap<String, Object>();
+    Map<String, Object> request2 = new HashMap<String, Object>();
+
+    request1.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_TYPE_PROPERTY_ID, BatchRequest.Type.PUT.name());
+    request1.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_ORDER_ID_PROPERTY_ID, "20");
+    request1.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_URI_PROPERTY_ID, "SomeUpdateUri");
+    request1.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_BODY_PROPERTY_ID, "data1");
+
+    request2.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_TYPE_PROPERTY_ID, BatchRequest.Type.DELETE.name());
+    request2.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_ORDER_ID_PROPERTY_ID, "22");
+    request2.put(RequestScheduleResourceProvider
+      .BATCH_REQUEST_URI_PROPERTY_ID, "SomeDeleteUri");
+
+    requestSet.add(request1);
+    requestSet.add(request2);
+
+    batchRequests.put(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_BATCH_REQUESTS_PROPERTY_ID, requestSet);
+
+    batch.add(batchSettings);
+    batch.add(batchRequests);
+
+    properties.put(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_BATCH_PROPERTY_ID, batch);
+
+    Map<String, String> mapRequestProps = new HashMap<String, String>();
+    mapRequestProps.put("context", "Called from a test");
+
+    Request request = PropertyHelper.getUpdateRequest(properties, mapRequestProps);
+    Predicate predicate = new PredicateBuilder().property
+      (RequestScheduleResourceProvider.REQUEST_SCHEDULE_CLUSTER_NAME_PROPERTY_ID)
+      .equals("Cluster100").and().property(RequestScheduleResourceProvider
+        .REQUEST_SCHEDULE_ID_PROPERTY_ID).equals(25L).toPredicate();
+
+    resourceProvider.updateResources(request, predicate);
+
+    verify(managementController, clusters, cluster, requestExecution,
+      response, requestScheduleResponse);
+  }
+
+  @Test
+  public void testGetRequestSchedule() throws Exception {
+    AmbariManagementController managementController = createMock(AmbariManagementController.class);
+    RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
+    Clusters clusters = createNiceMock(Clusters.class);
+    Cluster cluster = createNiceMock(Cluster.class);
+    final RequestExecution requestExecution = createNiceMock(RequestExecution.class);
+    RequestScheduleResponse requestScheduleResponse = createNiceMock
+      (RequestScheduleResponse.class);
+
+    expect(managementController.getClusters()).andReturn(clusters).anyTimes();
+    expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
+    expect(managementController.getAuthName()).andReturn("admin").anyTimes();
+
+    expect(requestExecution.getId()).andReturn(25L).anyTimes();
+    expect(requestExecution.getStatus()).andReturn(RequestExecution.Status
+      .SCHEDULED.name()).anyTimes();
+    expect(requestExecution.convertToResponse()).andReturn
+      (requestScheduleResponse).anyTimes();
+    expect(requestScheduleResponse.getId()).andReturn(25L).anyTimes();
+    expect(requestScheduleResponse.getClusterName()).andReturn("Cluster100")
+      .anyTimes();
+
+    expect(cluster.getAllRequestExecutions()).andStubAnswer(new IAnswer<Map<Long, RequestExecution>>() {
+      @Override
+      public Map<Long, RequestExecution> answer() throws Throwable {
+        Map<Long, RequestExecution> requestExecutionMap = new HashMap<Long,
+          RequestExecution>();
+        requestExecutionMap.put(requestExecution.getId(), requestExecution);
+        return requestExecutionMap;
+      }
+    });
+
+    replay(managementController, clusters, cluster, requestExecution,
+      response, requestScheduleResponse);
+
+    RequestScheduleResourceProvider resourceProvider = getResourceProvider
+      (managementController);
+
+    Map<String, Object> properties = new LinkedHashMap<String, Object>();
+
+    properties.put(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
+    properties.put(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_DESC_PROPERTY_ID, "some description");
+
+    Set<String> propertyIds = new HashSet<String>();
+    propertyIds.add(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_CLUSTER_NAME_PROPERTY_ID);
+    propertyIds.add(RequestScheduleResourceProvider
+      .REQUEST_SCHEDULE_ID_PROPERTY_ID);
+
+    Request request = PropertyHelper.getReadRequest(propertyIds);
+
+    // Read by id
+    Predicate predicate = new PredicateBuilder().property
+      (RequestScheduleResourceProvider.REQUEST_SCHEDULE_CLUSTER_NAME_PROPERTY_ID)
+      .equals("Cluster100").and().property(RequestScheduleResourceProvider
+        .REQUEST_SCHEDULE_ID_PROPERTY_ID).equals(25L).toPredicate();
+
+    Set<Resource> resources = resourceProvider.getResources(request,
+      predicate);
+
+    Assert.assertEquals(1, resources.size());
+    Assert.assertEquals(25L, resources.iterator().next().getPropertyValue
+      (RequestScheduleResourceProvider.REQUEST_SCHEDULE_ID_PROPERTY_ID));
+
+    // Read all
+    predicate = new PredicateBuilder().property
+      (RequestScheduleResourceProvider.REQUEST_SCHEDULE_CLUSTER_NAME_PROPERTY_ID)
+      .equals("Cluster100").toPredicate();
+
+    resources = resourceProvider.getResources(request, predicate);
+
+    Assert.assertEquals(1, resources.size());
+    Assert.assertEquals(25L, resources.iterator().next().getPropertyValue
+      (RequestScheduleResourceProvider.REQUEST_SCHEDULE_ID_PROPERTY_ID));
+
+    verify(managementController, clusters, cluster, requestExecution,
+      response, requestScheduleResponse);
+  }
+
+  @Test
+  public void testDeleteRequestSchedule() throws Exception {
+    AmbariManagementController managementController = createMock(AmbariManagementController.class);
+    Clusters clusters = createNiceMock(Clusters.class);
+    Cluster cluster = createNiceMock(Cluster.class);
+
+    expect(managementController.getAuthName()).andReturn("admin").anyTimes();
+    expect(managementController.getClusters()).andReturn(clusters).anyTimes();
+    expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
+
+    cluster.deleteRequestExecution(1L);
+
+    replay(managementController, clusters, cluster);
+
+    RequestScheduleResourceProvider resourceProvider = getResourceProvider
+      (managementController);
+
+    AbstractResourceProviderTest.TestObserver observer = new AbstractResourceProviderTest.TestObserver();
+
+    ((ObservableResourceProvider) resourceProvider).addObserver(observer);
+
+    Predicate predicate = new PredicateBuilder().property
+      (RequestScheduleResourceProvider.REQUEST_SCHEDULE_CLUSTER_NAME_PROPERTY_ID)
+      .equals("Cluster100").and().property(RequestScheduleResourceProvider
+        .REQUEST_SCHEDULE_ID_PROPERTY_ID).equals(1L).toPredicate();
+
+    resourceProvider.deleteResources(predicate);
+
+    ResourceProviderEvent lastEvent = observer.getLastEvent();
+    Assert.assertNotNull(lastEvent);
+    Assert.assertEquals(Resource.Type.RequestSchedule, lastEvent.getResourceType());
+    Assert.assertEquals(ResourceProviderEvent.Type.Delete, lastEvent.getType());
+    Assert.assertEquals(predicate, lastEvent.getPredicate());
+    Assert.assertNull(lastEvent.getRequest());
+
+    verify(managementController, clusters, cluster);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/test/java/org/apache/ambari/server/orm/InMemoryDefaultTestModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/InMemoryDefaultTestModule.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/InMemoryDefaultTestModule.java
index eed56a4..06d553d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/InMemoryDefaultTestModule.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/InMemoryDefaultTestModule.java
@@ -21,13 +21,11 @@ package org.apache.ambari.server.orm;
 import com.google.inject.AbstractModule;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.ControllerModule;
-
 import java.util.Properties;
 
 public class InMemoryDefaultTestModule extends AbstractModule {
   Properties properties = new Properties();
 
-
   @Override
   protected void configure() {
     properties.setProperty(Configuration.SERVER_PERSISTENCE_TYPE_KEY, "in-memory");

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/RequestScheduleDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/RequestScheduleDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/RequestScheduleDAOTest.java
index 1c6bdb7..4c0d656 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/RequestScheduleDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/RequestScheduleDAOTest.java
@@ -26,8 +26,9 @@ import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.entities.ClusterEntity;
 import org.apache.ambari.server.orm.entities.HostEntity;
-import org.apache.ambari.server.orm.entities.RequestScheduleBatchHostEntity;
+import org.apache.ambari.server.orm.entities.RequestScheduleBatchRequestEntity;
 import org.apache.ambari.server.orm.entities.RequestScheduleEntity;
+import org.apache.ambari.server.state.scheduler.BatchRequest;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -39,7 +40,10 @@ public class RequestScheduleDAOTest {
   private HostDAO hostDAO;
   private ClusterDAO clusterDAO;
   private RequestScheduleDAO requestScheduleDAO;
-  private RequestScheduleBatchHostDAO batchHostDAO;
+  private RequestScheduleBatchRequestDAO batchRequestDAO;
+  private String testUri = "http://localhost/blah";
+  private String testBody = "ValidJson";
+  private String testType = BatchRequest.Type.POST.name();
 
   @Before
   public void setup() throws Exception {
@@ -50,7 +54,7 @@ public class RequestScheduleDAOTest {
     hostDAO = injector.getInstance(HostDAO.class);
     clusterDAO = injector.getInstance(ClusterDAO.class);
     requestScheduleDAO = injector.getInstance(RequestScheduleDAO.class);
-    batchHostDAO = injector.getInstance(RequestScheduleBatchHostDAO.class);
+    batchRequestDAO = injector.getInstance(RequestScheduleBatchRequestDAO.class);
   }
 
   @After
@@ -67,13 +71,7 @@ public class RequestScheduleDAOTest {
 
     scheduleEntity.setClusterEntity(clusterEntity);
     scheduleEntity.setClusterId(clusterEntity.getClusterId());
-    scheduleEntity.setRequestContext("Test");
     scheduleEntity.setStatus("SCHEDULED");
-    scheduleEntity.setTargetType("ACTION");
-    scheduleEntity.setTargetName("REBALANCE");
-    scheduleEntity.setTargetService("HDFS");
-    scheduleEntity.setTargetComponent("DATANODE");
-    scheduleEntity.setBatchRequestByHost(false);
     scheduleEntity.setMinutes("30");
     scheduleEntity.setHours("12");
     scheduleEntity.setDayOfWeek("*");
@@ -87,17 +85,21 @@ public class RequestScheduleDAOTest {
     hostEntity.setOsType("centOS");
     hostDAO.create(hostEntity);
 
-    RequestScheduleBatchHostEntity batchHostEntity = new
-      RequestScheduleBatchHostEntity();
+    RequestScheduleBatchRequestEntity batchRequestEntity = new
+      RequestScheduleBatchRequestEntity();
 
-    batchHostEntity.setBatchId(1L);
-    batchHostEntity.setScheduleId(scheduleEntity.getScheduleId());
-    batchHostEntity.setRequestScheduleEntity(scheduleEntity);
-    batchHostEntity.setHostName(hostEntity.getHostName());
-    batchHostEntity.setRequestScheduleEntity(scheduleEntity);
-    batchHostDAO.create(batchHostEntity);
+    batchRequestEntity.setBatchId(1L);
+    batchRequestEntity.setScheduleId(scheduleEntity.getScheduleId());
+    batchRequestEntity.setRequestScheduleEntity(scheduleEntity);
+    batchRequestEntity.setRequestScheduleEntity(scheduleEntity);
+    batchRequestEntity.setRequestType(testType);
+    batchRequestEntity.setRequestUri(testUri);
+    batchRequestEntity.setRequestBody(testBody);
 
-    scheduleEntity.getRequestScheduleBatchHostEntities().add(batchHostEntity);
+    batchRequestDAO.create(batchRequestEntity);
+
+    scheduleEntity.getRequestScheduleBatchRequestEntities().add
+      (batchRequestEntity);
     scheduleEntity = requestScheduleDAO.merge(scheduleEntity);
 
     return scheduleEntity;
@@ -109,12 +111,13 @@ public class RequestScheduleDAOTest {
 
     Assert.assertTrue(scheduleEntity.getScheduleId() > 0);
     Assert.assertEquals("SCHEDULED", scheduleEntity.getStatus());
-    Assert.assertEquals("REBALANCE", scheduleEntity.getTargetName());
-    Assert.assertEquals("HDFS", scheduleEntity.getTargetService());
-    Assert.assertEquals(false, scheduleEntity.getIsBatchRequestByHost());
     Assert.assertEquals("12", scheduleEntity.getHours());
-    Assert.assertEquals("h1", scheduleEntity
-      .getRequestScheduleBatchHostEntities().iterator().next().getHostName());
+    RequestScheduleBatchRequestEntity batchRequestEntity = scheduleEntity
+      .getRequestScheduleBatchRequestEntities().iterator().next();
+    Assert.assertNotNull(batchRequestEntity);
+    Assert.assertEquals(testUri, batchRequestEntity.getRequestUri());
+    Assert.assertEquals(testType, batchRequestEntity.getRequestType());
+    Assert.assertEquals(testBody, batchRequestEntity.getRequestBody());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionSchedulerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionSchedulerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionSchedulerTest.java
new file mode 100644
index 0000000..f37da29
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionSchedulerTest.java
@@ -0,0 +1,110 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.scheduler;
+
+import junit.framework.Assert;
+import org.apache.ambari.server.configuration.Configuration;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.api.easymock.PowerMock;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+import org.quartz.Scheduler;
+import org.quartz.impl.StdSchedulerFactory;
+import java.util.Properties;
+import static org.easymock.EasyMock.expect;
+import static org.mockito.Mockito.spy;
+import static org.powermock.api.easymock.PowerMock.createNiceMock;
+import static org.powermock.api.easymock.PowerMock.expectNew;
+import static org.powermock.api.easymock.PowerMock.expectPrivate;
+
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({ ExecutionSchedulerImpl.class })
+@PowerMockIgnore("javax.management.*")
+public class ExecutionSchedulerTest {
+
+  private Configuration configuration;
+
+  @Before
+  public void setup() throws Exception {
+    Properties properties = new Properties();
+    properties.setProperty(Configuration.EXECUTION_SCHEDULER_THREADS, "2");
+    properties.setProperty(Configuration.EXECUTION_SCHEDULER_CLUSTERED, "false");
+    properties.setProperty(Configuration.EXECUTION_SCHEDULER_CONNECTIONS, "2");
+    properties.setProperty(Configuration.SERVER_JDBC_DRIVER_KEY, "db.driver");
+    properties.setProperty(Configuration.SERVER_JDBC_URL_KEY, "db.url");
+    properties.setProperty(Configuration.SERVER_JDBC_USER_NAME_KEY, "user");
+    properties.setProperty(Configuration.SERVER_JDBC_USER_PASSWD_KEY,
+      "ambari-server/src/test/resources/password.dat");
+    properties.setProperty(Configuration.SERVER_DB_NAME_KEY, "derby");
+
+    this.configuration = new Configuration(properties);
+
+  }
+
+  @After
+  public void teardown() throws Exception {
+  }
+
+
+  @Test
+  public void testSchedulerInitialize() throws Exception {
+
+    ExecutionSchedulerImpl executionScheduler = spy(new ExecutionSchedulerImpl(configuration));
+
+    Properties actualProperties = executionScheduler
+      .getQuartzSchedulerProperties();
+
+    Assert.assertEquals("2", actualProperties.getProperty("org.quartz.threadPool.threadCount"));
+    Assert.assertEquals("2", actualProperties.getProperty("org.quartz.dataSource.myDS.maxConnections"));
+    Assert.assertEquals("false", actualProperties.getProperty("org.quartz.jobStore.isClustered"));
+    Assert.assertEquals("org.quartz.impl.jdbcjobstore.StdJDBCDelegate",
+      actualProperties.getProperty("org.quartz.jobStore.driverDelegateClass"));
+    Assert.assertEquals("select 0",
+      actualProperties.getProperty("org.quartz.dataSource.myDS.validationQuery"));
+    Assert.assertEquals(ExecutionSchedulerImpl.DEFAULT_SCHEDULER_NAME,
+      actualProperties.getProperty("org.quartz.scheduler.instanceName"));
+    Assert.assertEquals("org.quartz.simpl.SimpleThreadPool",
+      actualProperties.getProperty("org.quartz.threadPool.class"));
+  }
+
+  @Test
+  public void testSchedulerStartStop() throws Exception {
+    StdSchedulerFactory factory = createNiceMock(StdSchedulerFactory.class);
+    Scheduler scheduler = createNiceMock(Scheduler.class);
+
+    expect(factory.getScheduler()).andReturn(scheduler);
+    expectPrivate(scheduler, "start").once();
+    expectNew(StdSchedulerFactory.class).andReturn(factory);
+    expectPrivate(scheduler, "shutdown").once();
+
+    PowerMock.replay(factory, StdSchedulerFactory.class, scheduler);
+
+    ExecutionSchedulerImpl executionScheduler = new ExecutionSchedulerImpl(configuration);
+
+    executionScheduler.startScheduler();
+    executionScheduler.stopScheduler();
+
+    PowerMock.verify(factory, StdSchedulerFactory.class, scheduler);
+
+    Assert.assertTrue(executionScheduler.isInitialized());
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/test/java/org/apache/ambari/server/state/RequestExecutionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/RequestExecutionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/RequestExecutionTest.java
new file mode 100644
index 0000000..aba7e70
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/RequestExecutionTest.java
@@ -0,0 +1,275 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
+import com.google.inject.persist.Transactional;
+import junit.framework.Assert;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.RequestScheduleDAO;
+import org.apache.ambari.server.orm.entities.RequestScheduleBatchRequestEntity;
+import org.apache.ambari.server.orm.entities.RequestScheduleEntity;
+import org.apache.ambari.server.state.scheduler.Batch;
+import org.apache.ambari.server.state.scheduler.BatchRequest;
+import org.apache.ambari.server.state.scheduler.BatchSettings;
+import org.apache.ambari.server.state.scheduler.RequestExecution;
+import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
+import org.apache.ambari.server.state.scheduler.Schedule;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+public class RequestExecutionTest {
+  private Injector injector;
+  private Clusters clusters;
+  private Cluster cluster;
+  private String clusterName;
+  private AmbariMetaInfo metaInfo;
+  private RequestExecutionFactory requestExecutionFactory;
+  private RequestScheduleDAO requestScheduleDAO;
+
+  @Before
+  public void setup() throws Exception {
+    injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    injector.getInstance(GuiceJpaInitializer.class);
+    clusters = injector.getInstance(Clusters.class);
+    metaInfo = injector.getInstance(AmbariMetaInfo.class);
+    requestExecutionFactory = injector.getInstance(RequestExecutionFactory.class);
+    requestScheduleDAO = injector.getInstance(RequestScheduleDAO.class);
+
+    metaInfo.init();
+    clusterName = "foo";
+    clusters.addCluster(clusterName);
+    cluster = clusters.getCluster(clusterName);
+    cluster.setDesiredStackVersion(new StackId("HDP-0.1"));
+    Assert.assertNotNull(cluster);
+    clusters.addHost("h1");
+    clusters.addHost("h2");
+    clusters.addHost("h3");
+    Assert.assertNotNull(clusters.getHost("h1"));
+    Assert.assertNotNull(clusters.getHost("h2"));
+    Assert.assertNotNull(clusters.getHost("h3"));
+    clusters.getHost("h1").persist();
+    clusters.getHost("h2").persist();
+    clusters.getHost("h3").persist();
+  }
+
+  @After
+  public void teardown() throws Exception {
+    injector.getInstance(PersistService.class).stop();
+  }
+
+  @Transactional
+  private RequestExecution createRequestSchedule() throws Exception {
+    Batch batches = new Batch();
+    Schedule schedule = new Schedule();
+
+    BatchSettings batchSettings = new BatchSettings();
+    batchSettings.setTaskFailureToleranceLimit(10);
+    batches.setBatchSettings(batchSettings);
+
+    List<BatchRequest> batchRequests = new ArrayList<BatchRequest>();
+    BatchRequest batchRequest1 = new BatchRequest();
+    batchRequest1.setOrderId(10L);
+    batchRequest1.setType(BatchRequest.Type.DELETE);
+    batchRequest1.setUri("testUri1");
+
+    BatchRequest batchRequest2 = new BatchRequest();
+    batchRequest2.setOrderId(12L);
+    batchRequest2.setType(BatchRequest.Type.POST);
+    batchRequest2.setUri("testUri2");
+    batchRequest2.setBody("testBody");
+
+    batchRequests.add(batchRequest1);
+    batchRequests.add(batchRequest2);
+
+    batches.getBatchRequests().addAll(batchRequests);
+
+    schedule.setMinutes("10");
+    schedule.setEndTime("2014-01-01 00:00:00");
+
+    RequestExecution requestExecution = requestExecutionFactory.createNew
+      (cluster, batches, schedule);
+    requestExecution.setDescription("Test Schedule");
+
+    requestExecution.persist();
+
+    return requestExecution;
+  }
+
+  @Test
+  public void testCreateRequestSchedule() throws Exception {
+    RequestExecution requestExecution = createRequestSchedule();
+    Assert.assertNotNull(requestExecution);
+
+    RequestScheduleEntity scheduleEntity = requestScheduleDAO.findById
+      (requestExecution.getId());
+
+    Assert.assertNotNull(scheduleEntity);
+    Assert.assertEquals(requestExecution.getBatch().getBatchSettings()
+      .getTaskFailureToleranceLimit(), scheduleEntity.getBatchTolerationLimit());
+    Assert.assertEquals(scheduleEntity.getRequestScheduleBatchRequestEntities().size(), 2);
+    Collection<RequestScheduleBatchRequestEntity> batchRequestEntities =
+      scheduleEntity.getRequestScheduleBatchRequestEntities();
+    Assert.assertNotNull(batchRequestEntities);
+    RequestScheduleBatchRequestEntity reqEntity1 = null;
+    RequestScheduleBatchRequestEntity reqEntity2 = null;
+    for (RequestScheduleBatchRequestEntity reqEntity : batchRequestEntities) {
+      if (reqEntity.getRequestUri().equals("testUri1")) {
+        reqEntity1 = reqEntity;
+      } else if (reqEntity.getRequestUri().equals("testUri2")) {
+        reqEntity2 = reqEntity;
+      }
+    }
+    Assert.assertNotNull(reqEntity1);
+    Assert.assertNotNull(reqEntity2);
+    Assert.assertEquals(Long.valueOf(1L), reqEntity1.getBatchId());
+    Assert.assertEquals(Long.valueOf(2L), reqEntity2.getBatchId());
+    Assert.assertEquals(BatchRequest.Type.DELETE.name(), reqEntity1.getRequestType());
+    Assert.assertEquals(BatchRequest.Type.POST.name(), reqEntity2.getRequestType());
+    Assert.assertEquals(requestExecution.getSchedule().getMinutes(),
+      scheduleEntity.getMinutes());
+    Assert.assertEquals(requestExecution.getSchedule().getEndTime(),
+      scheduleEntity.getEndTime());
+  }
+
+  @Test
+  public void testUpdateRequestSchedule() throws Exception {
+    RequestExecution requestExecution = createRequestSchedule();
+    Assert.assertNotNull(requestExecution);
+    Long id = requestExecution.getId();
+    RequestScheduleEntity scheduleEntity = requestScheduleDAO.findById(id);
+    Assert.assertNotNull(scheduleEntity);
+
+    // Read from DB
+    requestExecution = requestExecutionFactory.createExisting(cluster,
+      scheduleEntity);
+
+    // Remove host and add host
+    Batch batches = new Batch();
+
+    List<BatchRequest> batchRequests = new ArrayList<BatchRequest>();
+    BatchRequest batchRequest1 = new BatchRequest();
+    batchRequest1.setOrderId(10L);
+    batchRequest1.setType(BatchRequest.Type.PUT);
+    batchRequest1.setUri("testUri3");
+
+    BatchRequest batchRequest2 = new BatchRequest();
+    batchRequest2.setOrderId(12L);
+    batchRequest2.setType(BatchRequest.Type.POST);
+    batchRequest2.setUri("testUri4");
+    batchRequest2.setBody("testBody");
+
+    batchRequests.add(batchRequest1);
+    batchRequests.add(batchRequest2);
+
+    batches.getBatchRequests().addAll(batchRequests);
+
+
+    requestExecution.setBatch(batches);
+
+    // Change schedule
+    requestExecution.getSchedule().setHours("11");
+
+    // Save
+    requestExecution.persist();
+
+    scheduleEntity = requestScheduleDAO.findById(id);
+    Assert.assertNotNull(scheduleEntity);
+    Collection<RequestScheduleBatchRequestEntity> batchRequestEntities =
+      scheduleEntity.getRequestScheduleBatchRequestEntities();
+    Assert.assertNotNull(batchRequestEntities);
+    RequestScheduleBatchRequestEntity reqEntity1 = null;
+    RequestScheduleBatchRequestEntity reqEntity2 = null;
+    for (RequestScheduleBatchRequestEntity reqEntity : batchRequestEntities) {
+      if (reqEntity.getRequestUri().equals("testUri3")) {
+        reqEntity1 = reqEntity;
+      } else if (reqEntity.getRequestUri().equals("testUri4")) {
+        reqEntity2 = reqEntity;
+      }
+    }
+    Assert.assertNotNull(reqEntity1);
+    Assert.assertNotNull(reqEntity2);
+    Assert.assertEquals(Long.valueOf(1L), reqEntity1.getBatchId());
+    Assert.assertEquals(Long.valueOf(2L), reqEntity2.getBatchId());
+    Assert.assertEquals(BatchRequest.Type.PUT.name(), reqEntity1.getRequestType());
+    Assert.assertEquals(BatchRequest.Type.POST.name(), reqEntity2.getRequestType());
+    Assert.assertEquals("11", scheduleEntity.getHours());
+  }
+
+  @Test
+  public void testGetRequestSchedule() throws Exception {
+    RequestExecution requestExecution = createRequestSchedule();
+    Assert.assertNotNull(requestExecution);
+
+    RequestScheduleEntity scheduleEntity = requestScheduleDAO.findById
+      (requestExecution.getId());
+    Assert.assertNotNull(scheduleEntity);
+
+    Assert.assertNotNull(cluster.getAllRequestExecutions().get
+      (requestExecution.getId()));
+
+    Assert.assertNotNull(scheduleEntity);
+    Assert.assertEquals(requestExecution.getBatch().getBatchSettings()
+      .getTaskFailureToleranceLimit(), scheduleEntity.getBatchTolerationLimit());
+    Assert.assertEquals(scheduleEntity.getRequestScheduleBatchRequestEntities().size(), 2);
+    Collection<RequestScheduleBatchRequestEntity> batchRequestEntities =
+      scheduleEntity.getRequestScheduleBatchRequestEntities();
+    Assert.assertNotNull(batchRequestEntities);
+    RequestScheduleBatchRequestEntity reqEntity1 = null;
+    RequestScheduleBatchRequestEntity reqEntity2 = null;
+    for (RequestScheduleBatchRequestEntity reqEntity : batchRequestEntities) {
+      if (reqEntity.getRequestUri().equals("testUri1")) {
+        reqEntity1 = reqEntity;
+      } else if (reqEntity.getRequestUri().equals("testUri2")) {
+        reqEntity2 = reqEntity;
+      }
+    }
+    Assert.assertNotNull(reqEntity1);
+    Assert.assertNotNull(reqEntity2);
+    Assert.assertEquals(Long.valueOf(1L), reqEntity1.getBatchId());
+    Assert.assertEquals(Long.valueOf(2L), reqEntity2.getBatchId());
+    Assert.assertEquals(BatchRequest.Type.DELETE.name(), reqEntity1.getRequestType());
+    Assert.assertEquals(BatchRequest.Type.POST.name(), reqEntity2.getRequestType());
+    Assert.assertEquals(requestExecution.getSchedule().getMinutes(),
+      scheduleEntity.getMinutes());
+    Assert.assertEquals(requestExecution.getSchedule().getEndTime(),
+      scheduleEntity.getEndTime());
+  }
+
+  @Test
+  public void testDeleteRequestSchedule() throws Exception {
+    RequestExecution requestExecution = createRequestSchedule();
+    Assert.assertNotNull(requestExecution);
+
+    Long id = requestExecution.getId();
+
+    requestExecution.delete();
+
+    Assert.assertNull(requestScheduleDAO.findById(id));
+    Assert.assertNull(cluster.getAllRequestExecutions().get(id));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5dcea372/ambari-server/src/test/resources/password.dat
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/password.dat b/ambari-server/src/test/resources/password.dat
new file mode 100644
index 0000000..03ef607
--- /dev/null
+++ b/ambari-server/src/test/resources/password.dat
@@ -0,0 +1 @@
+bigdata
\ No newline at end of file


Mime
View raw message