hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pras...@apache.org
Subject svn commit: r772012 - in /hadoop/hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/hooks/ test/org/apache/hadoop/hive/ql/hooks/ test/queries/clientnegative/ test/queries/clientpositive/ test/results/clientnegative/ test/results/clientpositive/
Date Tue, 05 May 2009 23:29:25 GMT
Author: prasadc
Date: Tue May  5 23:29:24 2009
New Revision: 772012

URL: http://svn.apache.org/viewvc?rev=772012&view=rev
Log:
HIVE-463 Add preexecute hook

Added:
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java   (with
props)
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java   (with
props)
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java   (with
props)
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java 
 (with props)
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/bad_exec_hooks.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/no_hooks.q
    hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_exec_hooks.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/no_hooks.q.out

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java?rev=772012&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java Tue May
 5 23:29:24 2009
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.hooks;
+
+import java.util.Set;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+/**
+ * The pre execute hook interface. A list of such hooks can
+ * be configured to be called after compilation and before 
+ * execution.
+ */
+public interface PreExecute {
+
+  /**
+   * The run command that is called just before the execution of the
+   * query.
+   * 
+   * @param sess The session state.
+   * @param inputs The set of input tables and partitions.
+   * @param outputs The set of output tables, partitions, local and hdfs directories.
+   * @param ugi The user group security information.
+   */
+  public void run(SessionState sess, Set<ReadEntity> inputs, 
+      Set<WriteEntity> outputs, UserGroupInformation ugi)
+    throws Exception;
+  
+}

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java?rev=772012&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java Tue May
 5 23:29:24 2009
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.hooks;
+
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import java.util.Map;
+import java.net.URI;
+
+/**
+ * This class encapsulates the information on the partition and
+ * tables that are read by the query.
+ */
+public class ReadEntity {
+  
+  /**
+   * The partition. This is null for a non partitioned table.
+   */
+  private Partition p;
+  
+  /**
+   * The table.
+   */
+  private Table t;
+
+  /**
+   * Constructor.
+   * 
+   * @param t The Table that the query reads from.
+   */
+  public ReadEntity(Table t) {
+    this.t = t;
+    this.p = null;
+  }
+  
+  /**
+   * Constructor given a partiton.
+   * 
+   * @param p The partition that the query reads from.
+   */
+  public ReadEntity(Partition p) {
+    this.t = p.getTable();
+    this.p = p;
+  }
+  /**
+   * Enum that tells what time of a read entity this is.
+   */
+  public static enum Type {TABLE, PARTITION};
+  
+  /**
+   * Get the type.
+   */
+  public Type getType() {
+    return p == null ? Type.TABLE : Type.PARTITION;
+  }
+  
+  /**
+   * Get the parameter map of the Entity.
+   */
+  public Map<String, String> getParameter() {
+    if (p != null) {
+      return p.getTPartition().getParameters();
+    }
+    else {
+      return t.getTTable().getParameters();
+    }
+  }
+  
+  /**
+   * Get the location of the entity.
+   */
+  public URI getLocation() {
+    if (p != null) {
+      return p.getDataLocation();
+    }
+    else {
+      return t.getDataLocation();
+    }
+  }
+  
+  /**
+   * toString function.
+   */
+  @Override
+  public String toString() {
+    if (p != null) {
+      return p.getTable().getDbName() + "/" + p.getTable().getName() + "/" + p.getName();
+    }
+    else {
+      return t.getDbName() + "/" + t.getName();
+    }
+  }
+  
+  /**
+   * Equals function.
+   */
+  @Override
+  public boolean equals(Object o) {
+    if (o == null)
+      return false;
+    
+    if (o instanceof ReadEntity) {
+      ReadEntity ore = (ReadEntity)o;
+      return (toString().equalsIgnoreCase(ore.toString()));
+    }
+    else
+      return false;
+  }
+  
+  /**
+   * Hashcode function.
+   */
+  @Override
+  public int hashCode() {
+    return toString().hashCode();
+  }
+}

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java?rev=772012&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java Tue May
 5 23:29:24 2009
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.hooks;
+
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import java.net.URI;
+
+/**
+ * This class encapsulates an object that is being written to
+ * by the query. This object may be a table, partition, dfs
+ * directory or a local directory.
+ */
+public class WriteEntity {
+
+  /**
+   * The type of the write entity.
+   */
+  public static enum Type {TABLE, PARTITION, DFS_DIR, LOCAL_DIR};
+  
+  /**
+   * The type.
+   */
+  private Type typ;
+  
+  /**
+   * The table. This is null if this is a directory.
+   */
+  private Table t;
+
+  /**
+   * The partition.This is null if this object is not a partition.
+   */
+  private Partition p;
+  
+  /**
+   * The directory if this is a directory.
+   */
+  private String d;
+  
+  /**
+   * Constructor for a table.
+   * 
+   * @param t Table that is written to.
+   */
+  public WriteEntity(Table t) {
+    this.d = null;
+    this.p = null;
+    this.t = t;
+    this.typ = Type.TABLE;
+  }
+  
+  /**
+   * Constructor for a partition.
+   * 
+   * @param p Partition that is written to.
+   */
+  public WriteEntity(Partition p) {
+    this.d = null;
+    this.p = p;
+    this.t = p.getTable();
+    this.typ = Type.PARTITION;
+  }
+  
+  /**
+   * Constructor for a file.
+   * 
+   * @param d The name of the directory that is being written to.
+   * @param islocal Flag to decide whether this directory is local or in dfs.
+   */
+  public WriteEntity(String d, boolean islocal) {
+    this.d = d;
+    this.p = null;
+    this.t = null;
+    if (islocal) {
+      this.typ = Type.LOCAL_DIR;
+    }
+    else {
+      this.typ = Type.DFS_DIR;
+    }
+  }
+  
+  /**
+   * Get the type of the entity.
+   */
+  public Type getType() {
+    return typ;
+  }
+  
+  /**
+   * Get the location of the entity.
+   */
+  public URI getLocation() throws Exception {
+    if (typ == Type.TABLE)
+      return t.getDataLocation();
+    
+    if (typ == Type.PARTITION)
+      return p.getDataLocation();
+    
+    if (typ == Type.DFS_DIR || typ == Type.LOCAL_DIR)
+      return new URI(d);
+    
+    return null;
+  }
+  
+  /**
+   * toString function.
+   */
+  public String toString() {
+    switch(typ) {
+    case TABLE:
+      return t.getDbName() + "/" + t.getName();
+    case PARTITION:
+      return t.getDbName() + "/" + t.getName() + "/" + p.getName();
+    default:
+      return d;
+    }
+  }
+  
+  /**
+   * Equals function.
+   */
+  @Override
+  public boolean equals(Object o) {
+    if (o == null)
+      return false;
+    
+    if (o instanceof WriteEntity) {
+      WriteEntity ore = (WriteEntity)o;
+      return (toString().equalsIgnoreCase(ore.toString()));
+    }
+    else
+      return false;
+  }
+  
+  /**
+   * Hashcode function.
+   */
+  @Override
+  public int hashCode() {
+    return toString().hashCode();
+  }
+
+}

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java?rev=772012&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java (added)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java Tue
May  5 23:29:24 2009
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.hooks;
+
+import java.util.Set;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+
+/**
+ * Implementation of a pre execute hook that simply prints out its
+ * parameters to standard output.
+ */
+public class PreExecutePrinter implements PreExecute {
+
+  @Override
+  public void run(SessionState sess, Set<ReadEntity> inputs,
+      Set<WriteEntity> outputs, UserGroupInformation ugi) 
+    throws Exception {
+
+    LogHelper console = SessionState.getConsole();
+
+    if (console == null)
+      return;
+    
+    if (sess != null)
+      console.printError("query: " + sess.getCmd().trim());
+
+    for(ReadEntity re: inputs) {
+      console.printError("Input: " + re.toString());
+    }
+    for(WriteEntity we: outputs) {
+      console.printError("Output: " + we.toString());
+    }
+  }
+
+}

Propchange: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/bad_exec_hooks.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/bad_exec_hooks.q?rev=772012&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/bad_exec_hooks.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/bad_exec_hooks.q Tue May  5 23:29:24
2009
@@ -0,0 +1,6 @@
+set hive.exec.pre.hooks="org.this.is.a.bad.class";
+
+EXPLAIN
+SELECT x.* FROM SRC x LIMIT 20;
+
+SELECT x.* FROM SRC x LIMIT 20;

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/no_hooks.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/no_hooks.q?rev=772012&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/no_hooks.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/no_hooks.q Tue May  5 23:29:24 2009
@@ -0,0 +1,6 @@
+set hive.exec.pre.hooks=;
+
+EXPLAIN
+SELECT *  FROM src src1 JOIN src src2 WHERE src1.key < 10 and src2.key < 10 SORT BY
src1.key, src1.value, src2.key, src2.value;
+
+SELECT *  FROM src src1 JOIN src src2 WHERE src1.key < 10 and src2.key < 10 SORT BY
src1.key, src1.value, src2.key, src2.value;

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_exec_hooks.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_exec_hooks.q.out?rev=772012&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_exec_hooks.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_exec_hooks.q.out Tue May  5 23:29:24
2009
@@ -0,0 +1,2 @@
+Pre Exec Hook Class not found:"org.this.is.a.bad.class"
+FAILED: Unknown exception : "org.this.is.a.bad.class"

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/no_hooks.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/no_hooks.q.out?rev=772012&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/no_hooks.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/no_hooks.q.out Tue May  5 23:29:24
2009
@@ -0,0 +1,198 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src src1) (TOK_TABREF src src2))) (TOK_INSERT
(TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE
(and (< (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src2) key) 10)))
(TOK_SORTBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src1) key)) (TOK_TABSORTCOLNAMEASC
(. (TOK_TABLE_OR_COL src1) value)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) key))
(TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) value)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src2 
+            Reduce Output Operator
+              sort order: 
+              tag: 1
+              value expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+        src1 
+            Reduce Output Operator
+              sort order: 
+              tag: 0
+              value expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE.0} {VALUE.1}
+            1 {VALUE.0} {VALUE.1}
+          Filter Operator
+            predicate:
+                expr: ((UDFToDouble(0) < UDFToDouble(10)) and (UDFToDouble(2) < UDFToDouble(10)))
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: 0
+                    type: string
+                    expr: 1
+                    type: string
+                    expr: 2
+                    type: string
+                    expr: 3
+                    type: string
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    name: binary_table
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/378069303/1108957977.10002

+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: string
+                  expr: 2
+                  type: string
+                  expr: 3
+                  type: string
+            sort order: ++++
+            tag: -1
+            value expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: string
+                  expr: 2
+                  type: string
+                  expr: 3
+                  type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	2	val_2
+0	val_0	2	val_2
+0	val_0	2	val_2
+0	val_0	4	val_4
+0	val_0	4	val_4
+0	val_0	4	val_4
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	8	val_8
+0	val_0	8	val_8
+0	val_0	8	val_8
+0	val_0	9	val_9
+0	val_0	9	val_9
+0	val_0	9	val_9
+2	val_2	0	val_0
+2	val_2	0	val_0
+2	val_2	0	val_0
+2	val_2	2	val_2
+2	val_2	4	val_4
+2	val_2	5	val_5
+2	val_2	5	val_5
+2	val_2	5	val_5
+2	val_2	8	val_8
+2	val_2	9	val_9
+4	val_4	0	val_0
+4	val_4	0	val_0
+4	val_4	0	val_0
+4	val_4	2	val_2
+4	val_4	4	val_4
+4	val_4	5	val_5
+4	val_4	5	val_5
+4	val_4	5	val_5
+4	val_4	8	val_8
+4	val_4	9	val_9
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	8	val_8
+5	val_5	8	val_8
+5	val_5	8	val_8
+5	val_5	9	val_9
+5	val_5	9	val_9
+5	val_5	9	val_9
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	2	val_2
+8	val_8	4	val_4
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	8	val_8
+8	val_8	9	val_9
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	2	val_2
+9	val_9	4	val_4
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	8	val_8
+9	val_9	9	val_9



Mime
View raw message