chukwa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject svn commit: r783533 - in /hadoop/chukwa/trunk: ./ bin/ contrib/chukwa-pig/ script/ script/pig/
Date Wed, 10 Jun 2009 21:54:01 GMT
Author: eyang
Date: Wed Jun 10 21:54:00 2009
New Revision: 783533

URL: http://svn.apache.org/viewvc?rev=783533&view=rev
Log:
CHUKWA-281. Created pig scripts to perform down sampling. (Cheng Zhang via Eric Yang)

Added:
    hadoop/chukwa/trunk/bin/downSampling.sh
    hadoop/chukwa/trunk/script/
    hadoop/chukwa/trunk/script/pig/
    hadoop/chukwa/trunk/script/pig/Df.pig
    hadoop/chukwa/trunk/script/pig/Hadoop_dfs_FSDirectory.pig
    hadoop/chukwa/trunk/script/pig/Hadoop_dfs_FSNamesystem.pig
    hadoop/chukwa/trunk/script/pig/Hadoop_dfs_datanode.pig
    hadoop/chukwa/trunk/script/pig/Hadoop_dfs_namenode.pig
    hadoop/chukwa/trunk/script/pig/Hadoop_jvm_metrics.pig
    hadoop/chukwa/trunk/script/pig/Hadoop_mapred_jobtracker.pig
    hadoop/chukwa/trunk/script/pig/Hadoop_rpc_metrics.pig
    hadoop/chukwa/trunk/script/pig/SystemMetrics.pig
Modified:
    hadoop/chukwa/trunk/CHANGES.txt
    hadoop/chukwa/trunk/build.xml
    hadoop/chukwa/trunk/contrib/chukwa-pig/chukwa-pig.jar

Modified: hadoop/chukwa/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/CHANGES.txt?rev=783533&r1=783532&r2=783533&view=diff
==============================================================================
--- hadoop/chukwa/trunk/CHANGES.txt (original)
+++ hadoop/chukwa/trunk/CHANGES.txt Wed Jun 10 21:54:00 2009
@@ -4,6 +4,8 @@
 
   NEW FEATURES
 
+    CHUKWA-281. Created pig scripts to perform down sampling. (Cheng Zhang via Eric Yang)
+
     CHUKWA-20. Added pig support for ChukwaRecords. (Jerome Boulon via Eric Yang)
 
     CHUKWA-279. Added swim lane visualization for Hadoop job progress. (Jiaqi Tan via Eric
Yang)

Added: hadoop/chukwa/trunk/bin/downSampling.sh
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/bin/downSampling.sh?rev=783533&view=auto
==============================================================================
--- hadoop/chukwa/trunk/bin/downSampling.sh (added)
+++ hadoop/chukwa/trunk/bin/downSampling.sh Wed Jun 10 21:54:00 2009
@@ -0,0 +1,115 @@
+#!/bin/sh
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if [ "$1" = '-h' ]; then
+        echo "command line options:"
+        echo "  -c <space seperated cluster names> "
+        echo "     optional. if no cluster specified, hadoop dfs command will be used to
get all clusters"
+        echo "  -d <yyyyMMdd>"
+        echo "     optional. if no date is given, yesterday will be used"
+        echo "  -h help"
+        echo "  -m <space seperated metrics list>"
+        echo "     optional. Default value is 'SystemMetrics Hadoop_dfs_namenode Hadoop_dfs_FSDirectory
Hadoop_dfs_datanode Hadoop_rpc_metrics Hadoop_mapred_jobtracker Hadoop_jvm_metrics Hadoop_dfs_FSNamesystem
Df'"
+        echo "  -t times"
+        echo "     optional. default value is '5 30 180 720'"
+        echo "  -n <add|remove>"
+        echo "     Add/remove cron jobs."
+        echo "     optional."
+        echo "     start: add cron entry"
+        echo "     remove: remove cron entry"
+        exit
+fi
+
+pid=$$
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/chukwa-config.sh
+
+while getopts ":c:d:m:n:t:" OPTION
+do
+        case $OPTION in
+        c)
+                clusters=$OPTARG
+                ;;
+        d)
+                day=$OPTARG
+                ;;
+        m)
+                metricsList=$OPTARG
+                ;;
+        n)
+                croncmd=$OPTARG
+                ;;
+        t)
+                timeList=$OPTARG
+        esac
+done;
+
+if [ "$day" = "" ]; then
+        day=`date -d yesterday +%Y%m%d`
+fi
+
+if [ "$clusters" = "" ]; then
+        clusters=`${HADOOP_HOME}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -ls /chukwa/repos
| grep "/chukwa/repos" | cut -f 4 -d "/"`
+fi
+
+if [ "$timeList" = "" ]; then
+        timeList="5 30 180 720"
+fi
+
+if [ "$metricsList" = "" ]; then
+        metricsList="SystemMetrics Hadoop_dfs_namenode Hadoop_dfs_FSDirectory Hadoop_dfs_datanode
Hadoop_rpc_metrics Hadoop_mapred_jobtracker Hadoop_jvm_metrics Hadoop_dfs_FSNamesystem Df"
+fi
+
+# add or remove cron entry
+if [ "$croncmd" != "" ]; then
+        if [ "$croncmd" = "add" ]; then
+                crontab -l | grep -v downSampling > /tmp/crons; echo "0 3 * * * ${CHUKWA_HOME}/bin/downSampling.sh
--config ${CHUKWA_CONF_DIR}" >> /tmp/crons; crontab < /tmp/crons
+        fi
+        if [ "$croncmd" = "remove" ]; then
+                crontab -l | grep -v downSampling > /tmp/crons; crontab < /tmp/crons
+        fi
+        crontab -l
+        exit
+fi
+
+for cluster in $clusters
+do
+        for recType in $metricsList
+        do
+                for time in $timeList
+                do
+                        pig=${CHUKWA_HOME}/script/pig/${recType}.pig
+                        timePeriod=`/usr/bin/expr $time \* 60000`;
+                        newRecType=${recType}-${time}
+                        input="/chukwa/repos/$cluster/$recType/$day/*/*/*.evt"
+                        uniqdir="/chukwa/pig/${cluster}_${newRecType}_${day}_`date +%s`"
+                        output="$uniqdir/${day}.D"
+
+                        # echo chukwa_home:${CHUKWA_HOME} chukwa_conf:${CHUKWA_CONF_DIR}
cluster:$cluster day:$day pig:$pig recType:$recType newRecType:$newRecType time:$time timePeriod:$timePeriod
input:$input output:$output;
+                        echo ${CHUKWA_CONF_DIR} cluster:$cluster day:$day pig:$pig recType:$recType
newRecType:$newRecType time:$time timePeriod:$timePeriod input:$input output:$output;
+                        cmd="${JAVA_HOME}/bin/java -DHADOOP_CONF_DIR=${HADOOP_CONF_DIR} -classpath
${CHUKWA_CORE}:${HADOOP_JAR}:${HADOOP_CONF_DIR}:${CHUKWA_HOME}/lib/pig.jar org.apache.pig.Main
-param chukwaCore=${CHUKWA_CORE} -param chukwaPig=${CHUKWA_HOME}/chukwa-pig.jar -param input='${input}'
-param output='${output}' -param recType='${newRecType}' -param timePeriod=${timePeriod} -param
cluster=${cluster} ${pig}"
+                        # echo $cmd
+                        $cmd
+
+                        cmd="${JAVA_HOME}/bin/java -DAPP=PigDownSampling -Dlog4j.configuration=chukwa-log4j.properties
-DCHUKWA_HOME=${CHUKWA_HOME} -DCHUKWA_CONF_DIR=${CHUKWA_CONF_DIR} -DCHUKWA_LOG_DIR=${CHUKWA_LOG_DIR}
-classpath ${CHUKWA_HOME}/chukwa-pig.jar:${HADOOP_CONF_DIR}:${CLASSPATH}:${CHUKWA_CORE}:${COMMON}:${HADOOP_JAR}:${CHUKWA_CONF_DIR}
org.apache.hadoop.chukwa.tools.PigMover ${cluster} ${newRecType} ${uniqdir}  ${output} /chukwa/postProcess/"
+                        # echo $cmd
+                        $cmd
+                done
+        done
+done

Modified: hadoop/chukwa/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/build.xml?rev=783533&r1=783532&r2=783533&view=diff
==============================================================================
--- hadoop/chukwa/trunk/build.xml (original)
+++ hadoop/chukwa/trunk/build.xml Wed Jun 10 21:54:00 2009
@@ -1088,6 +1088,8 @@
 		<mkdir dir="${build.dir}/${final.name}" />
 		<mkdir dir="${build.dir}/${final.name}/lib" />
 		<mkdir dir="${build.dir}/${final.name}/bin" />
+    <mkdir dir="${build.dir}/${final.name}/script" />
+    <mkdir dir="${build.dir}/${final.name}/script/pig" />
 		<mkdir dir="${build.dir}/${final.name}/docs" />
 		<mkdir dir="${build.dir}/${final.name}/opt" />
 		<mkdir dir="${build.dir}/${final.name}/tools" />
@@ -1115,6 +1117,10 @@
 			<fileset dir="${basedir}/bin" />
 		</copy>
 
+    <copy todir="${build.dir}/${final.name}/script/pig">
+      <fileset dir="${basedir}/script/pig" />
+    </copy>
+
 		<copy todir="${build.dir}/${final.name}/conf">
 			<fileset dir="${build.dir}/conf">
 				<exclude name="hadoop-metrics.properties" />

Modified: hadoop/chukwa/trunk/contrib/chukwa-pig/chukwa-pig.jar
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/contrib/chukwa-pig/chukwa-pig.jar?rev=783533&r1=783532&r2=783533&view=diff
==============================================================================
Binary files - no diff available.

Added: hadoop/chukwa/trunk/script/pig/Df.pig
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/script/pig/Df.pig?rev=783533&view=auto
==============================================================================
--- hadoop/chukwa/trunk/script/pig/Df.pig (added)
+++ hadoop/chukwa/trunk/script/pig/Df.pig Wed Jun 10 21:54:00 2009
@@ -0,0 +1,12 @@
+register $chukwaCore
+register $chukwaPig
+define chukwaLoader org.apache.hadoop.chukwa.ChukwaStorage();
+define timePartition_Df_$timePeriod org.apache.hadoop.chukwa.TimePartition('$timePeriod');
+define seqWriter_Df_$timePeriod org.apache.hadoop.chukwa.ChukwaStorage('c_timestamp','c_recordtype',
'c_application', 'c_cluster','c_source' ,'Mounted on' , 'Available', 'Use%', 'Used');
+A_Df_$timePeriod = load '$input' using  chukwaLoader as (ts: long,fields);
+B_Df_$timePeriod = FOREACH A_Df_$timePeriod GENERATE timePartition_Df_$timePeriod(ts) as
time ,fields#'csource' as g0 ,fields#'Mounted on' as g1 , fields#'Available' as f0, fields#'Use%'
as f1, fields#'Used' as f2;
+C_Df_$timePeriod = group B_Df_$timePeriod by (time,g0 ,g1 );
+D_Df_$timePeriod = FOREACH C_Df_$timePeriod generate group.time as ts, '$recType', 'downsampling
$timePeriod', '$cluster', group.g0 , group.g1 , AVG(B_Df_$timePeriod.f0) as f0, AVG(B_Df_$timePeriod.f1)
as f1, AVG(B_Df_$timePeriod.f2) as f2;
+-- describe D_Df_$timePeriod;
+-- dump D_Df_$timePeriod;
+store D_Df_$timePeriod into '$output' using seqWriter_Df_$timePeriod;

Added: hadoop/chukwa/trunk/script/pig/Hadoop_dfs_FSDirectory.pig
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/script/pig/Hadoop_dfs_FSDirectory.pig?rev=783533&view=auto
==============================================================================
--- hadoop/chukwa/trunk/script/pig/Hadoop_dfs_FSDirectory.pig (added)
+++ hadoop/chukwa/trunk/script/pig/Hadoop_dfs_FSDirectory.pig Wed Jun 10 21:54:00 2009
@@ -0,0 +1,12 @@
+register $chukwaCore
+register $chukwaPig
+define chukwaLoader org.apache.hadoop.chukwa.ChukwaStorage();
+define timePartition_Hadoop_dfs_FSDirectory_$timePeriod org.apache.hadoop.chukwa.TimePartition('$timePeriod');
+define seqWriter_Hadoop_dfs_FSDirectory_$timePeriod org.apache.hadoop.chukwa.ChukwaStorage('c_timestamp','c_recordtype',
'c_application', 'c_cluster','c_source' , 'files_deleted');
+A_Hadoop_dfs_FSDirectory_$timePeriod = load '$input' using  chukwaLoader as (ts: long,fields);
+B_Hadoop_dfs_FSDirectory_$timePeriod = FOREACH A_Hadoop_dfs_FSDirectory_$timePeriod GENERATE
timePartition_Hadoop_dfs_FSDirectory_$timePeriod(ts) as time ,fields#'csource' as g0 , fields#'files_deleted'
as f0;
+C_Hadoop_dfs_FSDirectory_$timePeriod = group B_Hadoop_dfs_FSDirectory_$timePeriod by (time,g0
);
+D_Hadoop_dfs_FSDirectory_$timePeriod = FOREACH C_Hadoop_dfs_FSDirectory_$timePeriod generate
group.time as ts, '$recType', 'downsampling $timePeriod', '$cluster', group.g0 , AVG(B_Hadoop_dfs_FSDirectory_$timePeriod.f0)
as f0;
+-- describe D_Hadoop_dfs_FSDirectory_$timePeriod;
+-- dump D_Hadoop_dfs_FSDirectory_$timePeriod;
+store D_Hadoop_dfs_FSDirectory_$timePeriod into '$output' using seqWriter_Hadoop_dfs_FSDirectory_$timePeriod;

Added: hadoop/chukwa/trunk/script/pig/Hadoop_dfs_FSNamesystem.pig
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/script/pig/Hadoop_dfs_FSNamesystem.pig?rev=783533&view=auto
==============================================================================
--- hadoop/chukwa/trunk/script/pig/Hadoop_dfs_FSNamesystem.pig (added)
+++ hadoop/chukwa/trunk/script/pig/Hadoop_dfs_FSNamesystem.pig Wed Jun 10 21:54:00 2009
@@ -0,0 +1,12 @@
+register $chukwaCore
+register $chukwaPig
+define chukwaLoader org.apache.hadoop.chukwa.ChukwaStorage();
+define timePartition_Hadoop_dfs_FSNamesystem_$timePeriod org.apache.hadoop.chukwa.TimePartition('$timePeriod');
+define seqWriter_Hadoop_dfs_FSNamesystem_$timePeriod org.apache.hadoop.chukwa.ChukwaStorage('c_timestamp','c_recordtype',
'c_application', 'c_cluster','c_source' , 'BlocksTotalCapacityRemainingGBCapacityTotalGBCapacityUsedGBFilesTotalPendingReplicationBlocksScheduledReplicationBlocksTotalLoadUnderReplicatedBlocks');
+A_Hadoop_dfs_FSNamesystem_$timePeriod = load '$input' using  chukwaLoader as (ts: long,fields);
+B_Hadoop_dfs_FSNamesystem_$timePeriod = FOREACH A_Hadoop_dfs_FSNamesystem_$timePeriod GENERATE
timePartition_Hadoop_dfs_FSNamesystem_$timePeriod(ts) as time ,fields#'csource' as g0 , fields#'BlocksTotalCapacityRemainingGBCapacityTotalGBCapacityUsedGBFilesTotalPendingReplicationBlocksScheduledReplicationBlocksTotalLoadUnderReplicatedBlocks'
as f0;
+C_Hadoop_dfs_FSNamesystem_$timePeriod = group B_Hadoop_dfs_FSNamesystem_$timePeriod by (time,g0
);
+D_Hadoop_dfs_FSNamesystem_$timePeriod = FOREACH C_Hadoop_dfs_FSNamesystem_$timePeriod generate
group.time as ts, '$recType', 'downsampling $timePeriod', '$cluster', group.g0 , AVG(B_Hadoop_dfs_FSNamesystem_$timePeriod.f0)
as f0;
+-- describe D_Hadoop_dfs_FSNamesystem_$timePeriod;
+-- dump D_Hadoop_dfs_FSNamesystem_$timePeriod;
+store D_Hadoop_dfs_FSNamesystem_$timePeriod into '$output' using seqWriter_Hadoop_dfs_FSNamesystem_$timePeriod;

Added: hadoop/chukwa/trunk/script/pig/Hadoop_dfs_datanode.pig
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/script/pig/Hadoop_dfs_datanode.pig?rev=783533&view=auto
==============================================================================
--- hadoop/chukwa/trunk/script/pig/Hadoop_dfs_datanode.pig (added)
+++ hadoop/chukwa/trunk/script/pig/Hadoop_dfs_datanode.pig Wed Jun 10 21:54:00 2009
@@ -0,0 +1,12 @@
+register $chukwaCore
+register $chukwaPig
+define chukwaLoader org.apache.hadoop.chukwa.ChukwaStorage();
+define timePartition_Hadoop_dfs_datanode_$timePeriod org.apache.hadoop.chukwa.TimePartition('$timePeriod');
+define seqWriter_Hadoop_dfs_datanode_$timePeriod org.apache.hadoop.chukwa.ChukwaStorage('c_timestamp','c_recordtype',
'c_application', 'c_cluster','c_source' , 'replaceblockop_avg_time', 'block_verification_failures',
'readmetadataop_avg_time', 'blocks_read', 'blocks_written', 'readblockop_avg_time', 'writes_from_remote_client',
'blocks_replicated', 'replaceblockop_num_ops', 'bytes_read', 'bytes_written', 'writeblockop_num_ops',
'reads_from_remote_client', 'readblockop_num_ops', 'copyblockop_avg_time', 'blockreports_avg_time',
'heartbeats_num_ops', 'writeblockop_avg_time', 'reads_from_local_client', 'blocks_verified',
'writes_from_local_client', 'heartbeats_avg_time', 'copyblockop_num_ops', 'readmetadataop_num_ops',
'blocks_removed', 'blockreports_num_ops');
+A_Hadoop_dfs_datanode_$timePeriod = load '$input' using  chukwaLoader as (ts: long,fields);
+B_Hadoop_dfs_datanode_$timePeriod = FOREACH A_Hadoop_dfs_datanode_$timePeriod GENERATE timePartition_Hadoop_dfs_datanode_$timePeriod(ts)
as time ,fields#'hostname' as g0 , fields#'replaceblockop_avg_time' as f0, fields#'block_verification_failures'
as f1, fields#'readmetadataop_avg_time' as f2, fields#'blocks_read' as f3, fields#'blocks_written'
as f4, fields#'readblockop_avg_time' as f5, fields#'writes_from_remote_client' as f6, fields#'blocks_replicated'
as f7, fields#'replaceblockop_num_ops' as f8, fields#'bytes_read' as f9, fields#'bytes_written'
as f10, fields#'writeblockop_num_ops' as f11, fields#'reads_from_remote_client' as f12, fields#'readblockop_num_ops'
as f13, fields#'copyblockop_avg_time' as f14, fields#'blockreports_avg_time' as f15, fields#'heartbeats_num_ops'
as f16, fields#'writeblockop_avg_time' as f17, fields#'reads_from_local_client' as f18, fields#'blocks_verified'
as f19, fields#'writes_from_local_client' as f20, fields#'heartbeats_avg_time' as f21, fi
 elds#'copyblockop_num_ops' as f22, fields#'readmetadataop_num_ops' as f23, fields#'blocks_removed'
as f24, fields#'blockreports_num_ops' as f25;
+C_Hadoop_dfs_datanode_$timePeriod = group B_Hadoop_dfs_datanode_$timePeriod by (time,g0 );
+D_Hadoop_dfs_datanode_$timePeriod = FOREACH C_Hadoop_dfs_datanode_$timePeriod generate group.time
as ts, '$recType', 'downsampling $timePeriod', '$cluster', group.g0 , AVG(B_Hadoop_dfs_datanode_$timePeriod.f0)
as f0, AVG(B_Hadoop_dfs_datanode_$timePeriod.f1) as f1, AVG(B_Hadoop_dfs_datanode_$timePeriod.f2)
as f2, AVG(B_Hadoop_dfs_datanode_$timePeriod.f3) as f3, AVG(B_Hadoop_dfs_datanode_$timePeriod.f4)
as f4, AVG(B_Hadoop_dfs_datanode_$timePeriod.f5) as f5, AVG(B_Hadoop_dfs_datanode_$timePeriod.f6)
as f6, AVG(B_Hadoop_dfs_datanode_$timePeriod.f7) as f7, AVG(B_Hadoop_dfs_datanode_$timePeriod.f8)
as f8, AVG(B_Hadoop_dfs_datanode_$timePeriod.f9) as f9, AVG(B_Hadoop_dfs_datanode_$timePeriod.f10)
as f10, AVG(B_Hadoop_dfs_datanode_$timePeriod.f11) as f11, AVG(B_Hadoop_dfs_datanode_$timePeriod.f12)
as f12, AVG(B_Hadoop_dfs_datanode_$timePeriod.f13) as f13, AVG(B_Hadoop_dfs_datanode_$timePeriod.f14)
as f14, AVG(B_Hadoop_dfs_datanode_$timePeriod.f15) as f15, AVG(B_Hadoop_dfs_datanode
 _$timePeriod.f16) as f16, AVG(B_Hadoop_dfs_datanode_$timePeriod.f17) as f17, AVG(B_Hadoop_dfs_datanode_$timePeriod.f18)
as f18, AVG(B_Hadoop_dfs_datanode_$timePeriod.f19) as f19, AVG(B_Hadoop_dfs_datanode_$timePeriod.f20)
as f20, AVG(B_Hadoop_dfs_datanode_$timePeriod.f21) as f21, AVG(B_Hadoop_dfs_datanode_$timePeriod.f22)
as f22, AVG(B_Hadoop_dfs_datanode_$timePeriod.f23) as f23, AVG(B_Hadoop_dfs_datanode_$timePeriod.f24)
as f24, AVG(B_Hadoop_dfs_datanode_$timePeriod.f25) as f25;
+-- describe D_Hadoop_dfs_datanode_$timePeriod;
+-- dump D_Hadoop_dfs_datanode_$timePeriod;
+store D_Hadoop_dfs_datanode_$timePeriod into '$output' using seqWriter_Hadoop_dfs_datanode_$timePeriod;

Added: hadoop/chukwa/trunk/script/pig/Hadoop_dfs_namenode.pig
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/script/pig/Hadoop_dfs_namenode.pig?rev=783533&view=auto
==============================================================================
--- hadoop/chukwa/trunk/script/pig/Hadoop_dfs_namenode.pig (added)
+++ hadoop/chukwa/trunk/script/pig/Hadoop_dfs_namenode.pig Wed Jun 10 21:54:00 2009
@@ -0,0 +1,12 @@
+register $chukwaCore
+register $chukwaPig
+define chukwaLoader org.apache.hadoop.chukwa.ChukwaStorage();
+define timePartition_Hadoop_dfs_namenode_$timePeriod org.apache.hadoop.chukwa.TimePartition('$timePeriod');
+define seqWriter_Hadoop_dfs_namenode_$timePeriod org.apache.hadoop.chukwa.ChukwaStorage('c_timestamp','c_recordtype',
'c_application', 'c_cluster','c_source' , 'createfileops', 'addblockops', 'safemodetime',
'syncs_avg_time', 'blockreport_avg_time', 'filesrenamed', 'getlistingops', 'deletefileops',
'transactions_num_ops', 'fsimageloadtime', 'blockscorrupted', 'getblocklocations', 'filescreated',
'blockreport_num_ops', 'syncs_num_ops', 'transactions_avg_time');
+A_Hadoop_dfs_namenode_$timePeriod = load '$input' using  chukwaLoader as (ts: long,fields);
+B_Hadoop_dfs_namenode_$timePeriod = FOREACH A_Hadoop_dfs_namenode_$timePeriod GENERATE timePartition_Hadoop_dfs_namenode_$timePeriod(ts)
as time ,fields#'csource' as g0 , fields#'createfileops' as f0, fields#'addblockops' as f1,
fields#'safemodetime' as f2, fields#'syncs_avg_time' as f3, fields#'blockreport_avg_time'
as f4, fields#'filesrenamed' as f5, fields#'getlistingops' as f6, fields#'deletefileops' as
f7, fields#'transactions_num_ops' as f8, fields#'fsimageloadtime' as f9, fields#'blockscorrupted'
as f10, fields#'getblocklocations' as f11, fields#'filescreated' as f12, fields#'blockreport_num_ops'
as f13, fields#'syncs_num_ops' as f14, fields#'transactions_avg_time' as f15;
+C_Hadoop_dfs_namenode_$timePeriod = group B_Hadoop_dfs_namenode_$timePeriod by (time,g0 );
+D_Hadoop_dfs_namenode_$timePeriod = FOREACH C_Hadoop_dfs_namenode_$timePeriod generate group.time
as ts, '$recType', 'downsampling $timePeriod', '$cluster', group.g0 , AVG(B_Hadoop_dfs_namenode_$timePeriod.f0)
as f0, AVG(B_Hadoop_dfs_namenode_$timePeriod.f1) as f1, AVG(B_Hadoop_dfs_namenode_$timePeriod.f2)
as f2, AVG(B_Hadoop_dfs_namenode_$timePeriod.f3) as f3, AVG(B_Hadoop_dfs_namenode_$timePeriod.f4)
as f4, AVG(B_Hadoop_dfs_namenode_$timePeriod.f5) as f5, AVG(B_Hadoop_dfs_namenode_$timePeriod.f6)
as f6, AVG(B_Hadoop_dfs_namenode_$timePeriod.f7) as f7, AVG(B_Hadoop_dfs_namenode_$timePeriod.f8)
as f8, AVG(B_Hadoop_dfs_namenode_$timePeriod.f9) as f9, AVG(B_Hadoop_dfs_namenode_$timePeriod.f10)
as f10, AVG(B_Hadoop_dfs_namenode_$timePeriod.f11) as f11, AVG(B_Hadoop_dfs_namenode_$timePeriod.f12)
as f12, AVG(B_Hadoop_dfs_namenode_$timePeriod.f13) as f13, AVG(B_Hadoop_dfs_namenode_$timePeriod.f14)
as f14, AVG(B_Hadoop_dfs_namenode_$timePeriod.f15) as f15;
+-- describe D_Hadoop_dfs_namenode_$timePeriod;
+-- dump D_Hadoop_dfs_namenode_$timePeriod;
+store D_Hadoop_dfs_namenode_$timePeriod into '$output' using seqWriter_Hadoop_dfs_namenode_$timePeriod;

Added: hadoop/chukwa/trunk/script/pig/Hadoop_jvm_metrics.pig
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/script/pig/Hadoop_jvm_metrics.pig?rev=783533&view=auto
==============================================================================
--- hadoop/chukwa/trunk/script/pig/Hadoop_jvm_metrics.pig (added)
+++ hadoop/chukwa/trunk/script/pig/Hadoop_jvm_metrics.pig Wed Jun 10 21:54:00 2009
@@ -0,0 +1,12 @@
+register $chukwaCore
+register $chukwaPig
+define chukwaLoader org.apache.hadoop.chukwa.ChukwaStorage();
+define timePartition_Hadoop_jvm_metrics_$timePeriod org.apache.hadoop.chukwa.TimePartition('$timePeriod');
+define seqWriter_Hadoop_jvm_metrics_$timePeriod org.apache.hadoop.chukwa.ChukwaStorage('c_timestamp','c_recordtype',
'c_application', 'c_cluster','c_source' ,'processName' , 'memHeapCommittedM', 'logFatal',
'threadsWaiting', 'gcCount', 'threadsBlocked', 'logError', 'logWarn', 'memNonHeapCommittedM',
'gcTimeMillis', 'memNonHeapUsedM', 'logInfo', 'memHeapUsedM', 'threadsNew', 'threadsTerminated',
'threadsTimedWaiting', 'threadsRunnable');
+A_Hadoop_jvm_metrics_$timePeriod = load '$input' using  chukwaLoader as (ts: long,fields);
+B_Hadoop_jvm_metrics_$timePeriod = FOREACH A_Hadoop_jvm_metrics_$timePeriod GENERATE timePartition_Hadoop_jvm_metrics_$timePeriod(ts)
as time ,fields#'csource' as g0 ,fields#'processName' as g1 , fields#'memHeapCommittedM' as
f0, fields#'logFatal' as f1, fields#'threadsWaiting' as f2, fields#'gcCount' as f3, fields#'threadsBlocked'
as f4, fields#'logError' as f5, fields#'logWarn' as f6, fields#'memNonHeapCommittedM' as f7,
fields#'gcTimeMillis' as f8, fields#'memNonHeapUsedM' as f9, fields#'logInfo' as f10, fields#'memHeapUsedM'
as f11, fields#'threadsNew' as f12, fields#'threadsTerminated' as f13, fields#'threadsTimedWaiting'
as f14, fields#'threadsRunnable' as f15;
+C_Hadoop_jvm_metrics_$timePeriod = group B_Hadoop_jvm_metrics_$timePeriod by (time,g0 ,g1
);
+D_Hadoop_jvm_metrics_$timePeriod = FOREACH C_Hadoop_jvm_metrics_$timePeriod generate group.time
as ts, '$recType', 'downsampling $timePeriod', '$cluster', group.g0 , group.g1 , AVG(B_Hadoop_jvm_metrics_$timePeriod.f0)
as f0, AVG(B_Hadoop_jvm_metrics_$timePeriod.f1) as f1, AVG(B_Hadoop_jvm_metrics_$timePeriod.f2)
as f2, AVG(B_Hadoop_jvm_metrics_$timePeriod.f3) as f3, AVG(B_Hadoop_jvm_metrics_$timePeriod.f4)
as f4, AVG(B_Hadoop_jvm_metrics_$timePeriod.f5) as f5, AVG(B_Hadoop_jvm_metrics_$timePeriod.f6)
as f6, AVG(B_Hadoop_jvm_metrics_$timePeriod.f7) as f7, AVG(B_Hadoop_jvm_metrics_$timePeriod.f8)
as f8, AVG(B_Hadoop_jvm_metrics_$timePeriod.f9) as f9, AVG(B_Hadoop_jvm_metrics_$timePeriod.f10)
as f10, AVG(B_Hadoop_jvm_metrics_$timePeriod.f11) as f11, AVG(B_Hadoop_jvm_metrics_$timePeriod.f12)
as f12, AVG(B_Hadoop_jvm_metrics_$timePeriod.f13) as f13, AVG(B_Hadoop_jvm_metrics_$timePeriod.f14)
as f14, AVG(B_Hadoop_jvm_metrics_$timePeriod.f15) as f15;
+-- describe D_Hadoop_jvm_metrics_$timePeriod;
+-- dump D_Hadoop_jvm_metrics_$timePeriod;
+store D_Hadoop_jvm_metrics_$timePeriod into '$output' using seqWriter_Hadoop_jvm_metrics_$timePeriod;

Added: hadoop/chukwa/trunk/script/pig/Hadoop_mapred_jobtracker.pig
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/script/pig/Hadoop_mapred_jobtracker.pig?rev=783533&view=auto
==============================================================================
--- hadoop/chukwa/trunk/script/pig/Hadoop_mapred_jobtracker.pig (added)
+++ hadoop/chukwa/trunk/script/pig/Hadoop_mapred_jobtracker.pig Wed Jun 10 21:54:00 2009
@@ -0,0 +1,12 @@
+register $chukwaCore
+register $chukwaPig
+define chukwaLoader org.apache.hadoop.chukwa.ChukwaStorage();
+define timePartition_Hadoop_mapred_jobtracker_$timePeriod org.apache.hadoop.chukwa.TimePartition('$timePeriod');
+define seqWriter_Hadoop_mapred_jobtracker_$timePeriod org.apache.hadoop.chukwa.ChukwaStorage('c_timestamp','c_recordtype',
'c_application', 'c_cluster','c_source' , 'reduces_completed', 'maps_launched', 'jobs_completed',
'reduces_launched', 'maps_completed', 'jobs_submitted');
+A_Hadoop_mapred_jobtracker_$timePeriod = load '$input' using  chukwaLoader as (ts: long,fields);
+B_Hadoop_mapred_jobtracker_$timePeriod = FOREACH A_Hadoop_mapred_jobtracker_$timePeriod GENERATE
timePartition_Hadoop_mapred_jobtracker_$timePeriod(ts) as time ,fields#'csource' as g0 , fields#'reduces_completed'
as f0, fields#'maps_launched' as f1, fields#'jobs_completed' as f2, fields#'reduces_launched'
as f3, fields#'maps_completed' as f4, fields#'jobs_submitted' as f5;
+C_Hadoop_mapred_jobtracker_$timePeriod = group B_Hadoop_mapred_jobtracker_$timePeriod by
(time,g0 );
+D_Hadoop_mapred_jobtracker_$timePeriod = FOREACH C_Hadoop_mapred_jobtracker_$timePeriod generate
group.time as ts, '$recType', 'downsampling $timePeriod', '$cluster', group.g0 , AVG(B_Hadoop_mapred_jobtracker_$timePeriod.f0)
as f0, AVG(B_Hadoop_mapred_jobtracker_$timePeriod.f1) as f1, AVG(B_Hadoop_mapred_jobtracker_$timePeriod.f2)
as f2, AVG(B_Hadoop_mapred_jobtracker_$timePeriod.f3) as f3, AVG(B_Hadoop_mapred_jobtracker_$timePeriod.f4)
as f4, AVG(B_Hadoop_mapred_jobtracker_$timePeriod.f5) as f5;
+-- describe D_Hadoop_mapred_jobtracker_$timePeriod;
+-- dump D_Hadoop_mapred_jobtracker_$timePeriod;
+store D_Hadoop_mapred_jobtracker_$timePeriod into '$output' using seqWriter_Hadoop_mapred_jobtracker_$timePeriod;

Added: hadoop/chukwa/trunk/script/pig/Hadoop_rpc_metrics.pig
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/script/pig/Hadoop_rpc_metrics.pig?rev=783533&view=auto
==============================================================================
--- hadoop/chukwa/trunk/script/pig/Hadoop_rpc_metrics.pig (added)
+++ hadoop/chukwa/trunk/script/pig/Hadoop_rpc_metrics.pig Wed Jun 10 21:54:00 2009
@@ -0,0 +1,12 @@
+register $chukwaCore
+register $chukwaPig
+define chukwaLoader org.apache.hadoop.chukwa.ChukwaStorage();
+define timePartition_Hadoop_rpc_metrics_$timePeriod org.apache.hadoop.chukwa.TimePartition('$timePeriod');
+define seqWriter_Hadoop_rpc_metrics_$timePeriod org.apache.hadoop.chukwa.ChukwaStorage('c_timestamp','c_recordtype',
'c_application', 'c_cluster','c_source' , 'getjobcounters_avg_time', 'gettaskdiagnostics_num_ops',
'getbuildversion_num_ops', 'getprotocolversion_num_ops', 'getsystemdir_num_ops', 'submitjob_avg_time',
'gettaskcompletionevents_avg_time', 'getjobprofile_num_ops', 'gettaskdiagnostics_avg_time',
'getjobstatus_avg_time', 'getbuildversion_avg_time', 'gettaskcompletionevents_num_ops', 'rpcprocessingtime_avg_time',
'submitjob_num_ops', 'getsystemdir_avg_time', 'getjobcounters_num_ops', 'getjobprofile_avg_time',
'getnewjobid_num_ops', 'getjobstatus_num_ops', 'heartbeat_num_ops', 'getprotocolversion_avg_time',
'heartbeat_avg_time', 'rpcprocessingtime_num_ops', 'getnewjobid_avg_time');
+A_Hadoop_rpc_metrics_$timePeriod = load '$input' using  chukwaLoader as (ts: long,fields);
+B_Hadoop_rpc_metrics_$timePeriod = FOREACH A_Hadoop_rpc_metrics_$timePeriod GENERATE timePartition_Hadoop_rpc_metrics_$timePeriod(ts)
as time ,fields#'csource' as g0 , fields#'getjobcounters_avg_time' as f0, fields#'gettaskdiagnostics_num_ops'
as f1, fields#'getbuildversion_num_ops' as f2, fields#'getprotocolversion_num_ops' as f3,
fields#'getsystemdir_num_ops' as f4, fields#'submitjob_avg_time' as f5, fields#'gettaskcompletionevents_avg_time'
as f6, fields#'getjobprofile_num_ops' as f7, fields#'gettaskdiagnostics_avg_time' as f8, fields#'getjobstatus_avg_time'
as f9, fields#'getbuildversion_avg_time' as f10, fields#'gettaskcompletionevents_num_ops'
as f11, fields#'rpcprocessingtime_avg_time' as f12, fields#'submitjob_num_ops' as f13, fields#'getsystemdir_avg_time'
as f14, fields#'getjobcounters_num_ops' as f15, fields#'getjobprofile_avg_time' as f16, fields#'getnewjobid_num_ops'
as f17, fields#'getjobstatus_num_ops' as f18, fields#'heartbeat_num_ops' as f19, fields#'getprot
 ocolversion_avg_time' as f20, fields#'heartbeat_avg_time' as f21, fields#'rpcprocessingtime_num_ops'
as f22, fields#'getnewjobid_avg_time' as f23;
+C_Hadoop_rpc_metrics_$timePeriod = group B_Hadoop_rpc_metrics_$timePeriod by (time,g0 );
+D_Hadoop_rpc_metrics_$timePeriod = FOREACH C_Hadoop_rpc_metrics_$timePeriod generate group.time
as ts, '$recType', 'downsampling $timePeriod', '$cluster', group.g0 , AVG(B_Hadoop_rpc_metrics_$timePeriod.f0)
as f0, AVG(B_Hadoop_rpc_metrics_$timePeriod.f1) as f1, AVG(B_Hadoop_rpc_metrics_$timePeriod.f2)
as f2, AVG(B_Hadoop_rpc_metrics_$timePeriod.f3) as f3, AVG(B_Hadoop_rpc_metrics_$timePeriod.f4)
as f4, AVG(B_Hadoop_rpc_metrics_$timePeriod.f5) as f5, AVG(B_Hadoop_rpc_metrics_$timePeriod.f6)
as f6, AVG(B_Hadoop_rpc_metrics_$timePeriod.f7) as f7, AVG(B_Hadoop_rpc_metrics_$timePeriod.f8)
as f8, AVG(B_Hadoop_rpc_metrics_$timePeriod.f9) as f9, AVG(B_Hadoop_rpc_metrics_$timePeriod.f10)
as f10, AVG(B_Hadoop_rpc_metrics_$timePeriod.f11) as f11, AVG(B_Hadoop_rpc_metrics_$timePeriod.f12)
as f12, AVG(B_Hadoop_rpc_metrics_$timePeriod.f13) as f13, AVG(B_Hadoop_rpc_metrics_$timePeriod.f14)
as f14, AVG(B_Hadoop_rpc_metrics_$timePeriod.f15) as f15, AVG(B_Hadoop_rpc_metrics_$timePeriod.f16)
a
 s f16, AVG(B_Hadoop_rpc_metrics_$timePeriod.f17) as f17, AVG(B_Hadoop_rpc_metrics_$timePeriod.f18)
as f18, AVG(B_Hadoop_rpc_metrics_$timePeriod.f19) as f19, AVG(B_Hadoop_rpc_metrics_$timePeriod.f20)
as f20, AVG(B_Hadoop_rpc_metrics_$timePeriod.f21) as f21, AVG(B_Hadoop_rpc_metrics_$timePeriod.f22)
as f22, AVG(B_Hadoop_rpc_metrics_$timePeriod.f23) as f23;
+-- describe D_Hadoop_rpc_metrics_$timePeriod;
+-- dump D_Hadoop_rpc_metrics_$timePeriod;
+store D_Hadoop_rpc_metrics_$timePeriod into '$output' using seqWriter_Hadoop_rpc_metrics_$timePeriod;

Added: hadoop/chukwa/trunk/script/pig/SystemMetrics.pig
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/script/pig/SystemMetrics.pig?rev=783533&view=auto
==============================================================================
--- hadoop/chukwa/trunk/script/pig/SystemMetrics.pig (added)
+++ hadoop/chukwa/trunk/script/pig/SystemMetrics.pig Wed Jun 10 21:54:00 2009
@@ -0,0 +1,12 @@
+register $chukwaCore
+register $chukwaPig
+define chukwaLoader org.apache.hadoop.chukwa.ChukwaStorage();
+define timePartition_SystemMetrics_$timePeriod org.apache.hadoop.chukwa.TimePartition('$timePeriod');
+define seqWriter_SystemMetrics_$timePeriod org.apache.hadoop.chukwa.ChukwaStorage('c_timestamp','c_recordtype',
'c_application', 'c_cluster','c_source' , 'cpu_si%', 'mem_buffers_pcnt', 'tasks_stopped',
'sdb.rkb/s', 'cpu_hi%', 'eth0.rxdrop/s', '%idle', 'eth0.rxbyt/s', 'sdb.wkb/s', '%sys', 'eth1.txpck/s',
'mem_cached_pcnt', 'swap_used_pcnt', 'sdd.%util', 'eth1.rxdrop/s', '%system', '%memused',
'sdc.%util', 'sda.rkb/s', 'sda.wkb/s', 'mem_free', 'ldavg-5', 'tasks_total', 'tasks_zombie',
'sdd.wkb/s', 'sdb.%util', 'tasks_running', 'eth0.txbyt/s', '%nice', 'sdd.rkb/s', 'eth1.txdrop/s',
'sda.%util', 'eth0.rxpck/s', 'mem_total', 'sdc.wkb/s', 'eth0.txdrop/s', 'mem_used', 'mem_shared',
'eth1.txbyt/s', '%iowait', 'kbcached', 'eth1.rxpck/s', 'eth1.txerr/s', 'sdc.rkb/s', 'ldavg-1',
'eth0.txerr/s', 'eth1.rxerr/s', 'eth0.rxerr/s', 'mem_buffers', '%user', 'eth0.txpck/s', 'ldavg-15',
'tasks_sleeping', 'eth0_busy_pcnt', 'eth1_busy_pcnt', 'eth1.rxbyt/s');
+A_SystemMetrics_$timePeriod = load '$input' using  chukwaLoader as (ts: long,fields);
+B_SystemMetrics_$timePeriod = FOREACH A_SystemMetrics_$timePeriod GENERATE timePartition_SystemMetrics_$timePeriod(ts)
as time ,fields#'csource' as g0 , fields#'cpu_si%' as f0, fields#'mem_buffers_pcnt' as f1,
fields#'tasks_stopped' as f2, fields#'sdb.rkb/s' as f3, fields#'cpu_hi%' as f4, fields#'eth0.rxdrop/s'
as f5, fields#'%idle' as f6, fields#'eth0.rxbyt/s' as f7, fields#'sdb.wkb/s' as f8, fields#'%sys'
as f9, fields#'eth1.txpck/s' as f10, fields#'mem_cached_pcnt' as f11, fields#'swap_used_pcnt'
as f12, fields#'sdd.%util' as f13, fields#'eth1.rxdrop/s' as f14, fields#'%system' as f15,
fields#'%memused' as f16, fields#'sdc.%util' as f17, fields#'sda.rkb/s' as f18, fields#'sda.wkb/s'
as f19, fields#'mem_free' as f20, fields#'ldavg-5' as f21, fields#'tasks_total' as f22, fields#'tasks_zombie'
as f23, fields#'sdd.wkb/s' as f24, fields#'sdb.%util' as f25, fields#'tasks_running' as f26,
fields#'eth0.txbyt/s' as f27, fields#'%nice' as f28, fields#'sdd.rkb/s' as f29, fields#'eth
 1.txdrop/s' as f30, fields#'sda.%util' as f31, fields#'eth0.rxpck/s' as f32, fields#'mem_total'
as f33, fields#'sdc.wkb/s' as f34, fields#'eth0.txdrop/s' as f35, fields#'mem_used' as f36,
fields#'mem_shared' as f37, fields#'eth1.txbyt/s' as f38, fields#'%iowait' as f39, fields#'kbcached'
as f40, fields#'eth1.rxpck/s' as f41, fields#'eth1.txerr/s' as f42, fields#'sdc.rkb/s' as
f43, fields#'ldavg-1' as f44, fields#'eth0.txerr/s' as f45, fields#'eth1.rxerr/s' as f46,
fields#'eth0.rxerr/s' as f47, fields#'mem_buffers' as f48, fields#'%user' as f49, fields#'eth0.txpck/s'
as f50, fields#'ldavg-15' as f51, fields#'tasks_sleeping' as f52, fields#'eth0_busy_pcnt'
as f53, fields#'eth1_busy_pcnt' as f54, fields#'eth1.rxbyt/s' as f55;
+C_SystemMetrics_$timePeriod = group B_SystemMetrics_$timePeriod by (time,g0 );
+D_SystemMetrics_$timePeriod = FOREACH C_SystemMetrics_$timePeriod generate group.time as
ts, '$recType', 'downsampling $timePeriod', '$cluster', group.g0 , AVG(B_SystemMetrics_$timePeriod.f0)
as f0, AVG(B_SystemMetrics_$timePeriod.f1) as f1, AVG(B_SystemMetrics_$timePeriod.f2) as f2,
AVG(B_SystemMetrics_$timePeriod.f3) as f3, AVG(B_SystemMetrics_$timePeriod.f4) as f4, AVG(B_SystemMetrics_$timePeriod.f5)
as f5, AVG(B_SystemMetrics_$timePeriod.f6) as f6, AVG(B_SystemMetrics_$timePeriod.f7) as f7,
AVG(B_SystemMetrics_$timePeriod.f8) as f8, AVG(B_SystemMetrics_$timePeriod.f9) as f9, AVG(B_SystemMetrics_$timePeriod.f10)
as f10, AVG(B_SystemMetrics_$timePeriod.f11) as f11, AVG(B_SystemMetrics_$timePeriod.f12)
as f12, AVG(B_SystemMetrics_$timePeriod.f13) as f13, AVG(B_SystemMetrics_$timePeriod.f14)
as f14, AVG(B_SystemMetrics_$timePeriod.f15) as f15, AVG(B_SystemMetrics_$timePeriod.f16)
as f16, AVG(B_SystemMetrics_$timePeriod.f17) as f17, AVG(B_SystemMetrics_$timePeriod.f18)
as f18
 , AVG(B_SystemMetrics_$timePeriod.f19) as f19, AVG(B_SystemMetrics_$timePeriod.f20) as f20,
AVG(B_SystemMetrics_$timePeriod.f21) as f21, AVG(B_SystemMetrics_$timePeriod.f22) as f22,
AVG(B_SystemMetrics_$timePeriod.f23) as f23, AVG(B_SystemMetrics_$timePeriod.f24) as f24,
AVG(B_SystemMetrics_$timePeriod.f25) as f25, AVG(B_SystemMetrics_$timePeriod.f26) as f26,
AVG(B_SystemMetrics_$timePeriod.f27) as f27, AVG(B_SystemMetrics_$timePeriod.f28) as f28,
AVG(B_SystemMetrics_$timePeriod.f29) as f29, AVG(B_SystemMetrics_$timePeriod.f30) as f30,
AVG(B_SystemMetrics_$timePeriod.f31) as f31, AVG(B_SystemMetrics_$timePeriod.f32) as f32,
AVG(B_SystemMetrics_$timePeriod.f33) as f33, AVG(B_SystemMetrics_$timePeriod.f34) as f34,
AVG(B_SystemMetrics_$timePeriod.f35) as f35, AVG(B_SystemMetrics_$timePeriod.f36) as f36,
AVG(B_SystemMetrics_$timePeriod.f37) as f37, AVG(B_SystemMetrics_$timePeriod.f38) as f38,
AVG(B_SystemMetrics_$timePeriod.f39) as f39, AVG(B_SystemMetrics_$timePeriod.f40) as f4
 0, AVG(B_SystemMetrics_$timePeriod.f41) as f41, AVG(B_SystemMetrics_$timePeriod.f42) as f42,
AVG(B_SystemMetrics_$timePeriod.f43) as f43, AVG(B_SystemMetrics_$timePeriod.f44) as f44,
AVG(B_SystemMetrics_$timePeriod.f45) as f45, AVG(B_SystemMetrics_$timePeriod.f46) as f46,
AVG(B_SystemMetrics_$timePeriod.f47) as f47, AVG(B_SystemMetrics_$timePeriod.f48) as f48,
AVG(B_SystemMetrics_$timePeriod.f49) as f49, AVG(B_SystemMetrics_$timePeriod.f50) as f50,
AVG(B_SystemMetrics_$timePeriod.f51) as f51, AVG(B_SystemMetrics_$timePeriod.f52) as f52,
AVG(B_SystemMetrics_$timePeriod.f53) as f53, AVG(B_SystemMetrics_$timePeriod.f54) as f54,
AVG(B_SystemMetrics_$timePeriod.f55) as f55;
+-- describe D_SystemMetrics_$timePeriod;
+-- dump D_SystemMetrics_$timePeriod;
+store D_SystemMetrics_$timePeriod into '$output' using seqWriter_SystemMetrics_$timePeriod;



Mime
View raw message