asterixdb-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ima...@apache.org
Subject [44/58] [abbrv] [partial] incubator-asterixdb git commit: Added support of typed indexes over open fields & indexes over nested fields
Date Fri, 24 Apr 2015 18:43:10 GMT
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/04b2b77a/asterix-app/data/spatial/spatialData.json
----------------------------------------------------------------------
diff --git a/asterix-app/data/spatial/spatialData.json b/asterix-app/data/spatial/spatialData.json
index 9c78064..7396936 100644
--- a/asterix-app/data/spatial/spatialData.json
+++ b/asterix-app/data/spatial/spatialData.json
@@ -1,21 +1,21 @@
-{"id": 1, "point": point("4.1,7.0"), "kwds": "sign ahead", "line1": line("4.0,7.0 9.0,7.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.2,7.0"), "circle" : circle("1.0,1.0 10.0")}
-{"id": 2, "point": point("40.2152,-75.0449"), "kwds": "factory hosedan", "line1": line("-4.0,2.0 2.0,2.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("2.0,3.0 2.0")}
-{"id": 3, "point": point("43.5083,-79.3007"), "kwds": "enterprisecamp torcamp", "line1": line("3.0,0.0 0.0,4.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("5.5,1.0 10.0")}
-{"id": 4, "point": point("43.5083,-79.3007"), "kwds": "enterprisecamp torcamp", "line1": line("4.0,7.0 2.0,17.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0 2.0,1.0 1.0,0.0"), "poly2": polygon("2.0,1.0 2.0,2.0 3.0,2.0 3.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("77.0,4.0 30.0")}
-{"id": 5, "point": point("43.5083,-79.3007"), "kwds": "enterprisecamp torcamp", "line1": line("4.0,7.0 2.0,17.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("100.0,100.0 100.0,400.0 300.0,400.0 300.0,100.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("88.0,1.0 10.0")}
-{"id": 6, "point": point("43.5083,-79.3007"), "kwds": "enterprisecamp torcamp", "line1": line("0.0,5.0 1.0,7.0"), "line2": line("4.0,7.0 2.0,-17.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("3.1,1.0 2.9,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,1.0 10.0")}
-{"id": 7, "point": point("43.5083,-79.3007"), "kwds": "enterprisecamp torcamp", "line1": line("0.0,5.0 4.0,7.1"), "line2": line("4.0,7.0 2.0,-17.0"), "poly1": polygon("-5.0,-2.0 -4.0,-1.0 -3.0,-1.0 -2.0,-2.0 -4.0,-4.0 -5.0,-3.0"), "poly2": polygon("3.0,1.0 3.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("3.0,6.0 5.0,7.0"), "circle" : circle("13.0,75.0 1.0")}
-{"id": 8, "point": point("43.5083,-79.3007"), "kwds": "enterprisecamp torcamp", "line1": line("4.0,7.0 2.0,17.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("-5.0,-2.0 -4.0,-1.0 -3.0,-1.0 -2.0,-2.0 -4.0,-4.0 -5.0,-3.0"), "poly2": polygon("-3.0,-3.0 -1.0,-3.0 -3.0,-5.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("76.0,87.0 50.0")}
-{"id": 9, "point": point("5.0,1.0"), "kwds": "sign ahead", "line1": line("5.0,1.0 5.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("11.0,14.0 15.0")}
-{"id": 10, "point": point("2.0,3.0"), "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("6.01,1.0 6.0,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,76.0 17.0")}
-{"id": 11, "point": point("4.9,0.0"), "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("4.9,0.1 4.9,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("22.0,35.0 144.0")}
-{"id": 12, "point": point("6.0,3.0"), "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("4.0,1.0 4.0,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,23.0 12.0")}
-{"id": 13, "point": point("5.0,5.0"), "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("6.0,1.0 6.0,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("30.0,11.0 11.0")}
-{"id": 14, "point": point("5.1,5.1"), "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("5.0,1.0 5.0,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,66.0 17.0")}
-{"id": 15, "point": point("-2.0,3.0"), "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("5.1,1.0 5.1,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("12.0,87.0 10.0")}
-{"id": 16, "point": point("-2.0,3.0"), "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,35.0 10.0")}
-{"id": 17, "point": point("4.1,7.0"), "kwds": "sign ahead", "line1": line("4.0,7.0 9.0,7.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("0.0,6.0 0.0,0.0 3.0,0.0 4.0,1.0 6.0,1.0 8.0,0.0 12.0,0.0 13.0,2.0 8.0,2.0 8.0,4.0 11.0,4.0 11.0,6.0 6.0,6.0 4.0,3.0 2.0,6.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,51.0 10.0")}
-{"id": 18, "point": point("-2.0,3.0"), "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("5.0,1.0 7.0,1.0 7.0,4.0 6.0,2.0 5.0,4.0"), "poly2": polygon("6.0,3.0 7.0,5.0 6.0,7.0 5.0,5.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("43.0,45.0 12.0")}
-{"id": 19, "point": point("-2.0,3.0"), "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("5.0,1.0 7.0,1.0 7.0,4.0 6.0,2.0 5.0,4.0"), "poly2": polygon("6.0,1.0 7.0,5.0 6.0,7.0 5.0,5.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("65.0,2.0 13.0")}
-{"id": 20, "point": point("4.0,3.0"), "kwds": "sign ahead", "line1": line("20.0,20.0 30.0,40.0"), "line2": line("5.0,8.0 0.0,1.0"), "poly1": polygon("4.0,1.0 4.0,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("50.0,10.0 50.0,40.0 70.0,40.0 70.0,10.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,23.0 12.0")}
-{"id": 21, "point": point("0.0,5.0"), "kwds": "sign ahead", "line1": line("0.0,5.0 0.0,40.0"), "line2": line("5.0,8.0 0.0,1.0"), "poly1": polygon("5.1,5.1 14.0,14.0 22.0,14.0 22.0,10.0"), "poly2": polygon("50.0,10.0 50.0,40.0 70.0,40.0 70.0,10.0"), "rec": rectangle("0.0,0.0 5.1,5.1"), "circle" : circle("1.0,23.0 12.0")}
\ No newline at end of file
+{"id": 1, "kwds": "sign ahead", "line1": line("4.0,7.0 9.0,7.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.2,7.0"), "circle" : circle("1.0,1.0 10.0"), "point": point("4.1,7.0")}
+{"id": 2, "kwds": "factory hosedan", "line1": line("-4.0,2.0 2.0,2.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("2.0,3.0 2.0"), "point": point("40.2152,-75.0449")}
+{"id": 3, "kwds": "enterprisecamp torcamp", "line1": line("3.0,0.0 0.0,4.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("5.5,1.0 10.0"), "point": point("43.5083,-79.3007")}
+{"id": 4, "kwds": "enterprisecamp torcamp", "line1": line("4.0,7.0 2.0,17.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0 2.0,1.0 1.0,0.0"), "poly2": polygon("2.0,1.0 2.0,2.0 3.0,2.0 3.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("77.0,4.0 30.0"), "point": point("43.5083,-79.3007")}
+{"id": 5, "kwds": "enterprisecamp torcamp", "line1": line("4.0,7.0 2.0,17.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("100.0,100.0 100.0,400.0 300.0,400.0 300.0,100.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("88.0,1.0 10.0"), "point": point("43.5083,-79.3007")}
+{"id": 6, "kwds": "enterprisecamp torcamp", "line1": line("0.0,5.0 1.0,7.0"), "line2": line("4.0,7.0 2.0,-17.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("3.1,1.0 2.9,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,1.0 10.0"), "point": point("43.5083,-79.3007")}
+{"id": 7, "kwds": "enterprisecamp torcamp", "line1": line("0.0,5.0 4.0,7.1"), "line2": line("4.0,7.0 2.0,-17.0"), "poly1": polygon("-5.0,-2.0 -4.0,-1.0 -3.0,-1.0 -2.0,-2.0 -4.0,-4.0 -5.0,-3.0"), "poly2": polygon("3.0,1.0 3.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("3.0,6.0 5.0,7.0"), "circle" : circle("13.0,75.0 1.0"), "point": point("43.5083,-79.3007")}
+{"id": 8, "kwds": "enterprisecamp torcamp", "line1": line("4.0,7.0 2.0,17.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("-5.0,-2.0 -4.0,-1.0 -3.0,-1.0 -2.0,-2.0 -4.0,-4.0 -5.0,-3.0"), "poly2": polygon("-3.0,-3.0 -1.0,-3.0 -3.0,-5.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("76.0,87.0 50.0"), "point": point("43.5083,-79.3007")}
+{"id": 9, "kwds": "sign ahead", "line1": line("5.0,1.0 5.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("11.0,14.0 15.0"), "point": point("5.0,1.0")}
+{"id": 10, "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("6.01,1.0 6.0,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,76.0 17.0"), "point": point("2.0,3.0")}
+{"id": 11, "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("4.9,0.1 4.9,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("22.0,35.0 144.0"), "point": point("4.9,0.0")}
+{"id": 12, "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("4.0,1.0 4.0,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,23.0 12.0"), "point": point("6.0,3.0")}
+{"id": 13, "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("6.0,1.0 6.0,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("30.0,11.0 11.0"), "point": point("5.0,5.0")}
+{"id": 14, "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("5.0,1.0 5.0,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,66.0 17.0"), "point": point("5.1,5.1")}
+{"id": 15, "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("5.1,1.0 5.1,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("12.0,87.0 10.0"), "point": point("-2.0,3.0")}
+{"id": 16, "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,35.0 10.0"), "point": point("-2.0,3.0")}
+{"id": 17, "kwds": "sign ahead", "line1": line("4.0,7.0 9.0,7.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("0.0,6.0 0.0,0.0 3.0,0.0 4.0,1.0 6.0,1.0 8.0,0.0 12.0,0.0 13.0,2.0 8.0,2.0 8.0,4.0 11.0,4.0 11.0,6.0 6.0,6.0 4.0,3.0 2.0,6.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,51.0 10.0"), "point": point("4.1,7.0")}
+{"id": 18, "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("5.0,1.0 7.0,1.0 7.0,4.0 6.0,2.0 5.0,4.0"), "poly2": polygon("6.0,3.0 7.0,5.0 6.0,7.0 5.0,5.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("43.0,45.0 12.0"), "point": point("-2.0,3.0")}
+{"id": 19, "kwds": "sign ahead", "line1": line("1.0,2.0 3.0,4.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("5.0,1.0 7.0,1.0 7.0,4.0 6.0,2.0 5.0,4.0"), "poly2": polygon("6.0,1.0 7.0,5.0 6.0,7.0 5.0,5.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("65.0,2.0 13.0"), "point": point("-2.0,3.0")}
+{"id": 20, "kwds": "sign ahead", "line1": line("20.0,20.0 30.0,40.0"), "line2": line("5.0,8.0 0.0,1.0"), "poly1": polygon("4.0,1.0 4.0,4.0 12.0,4.0 12.0,1.0"), "poly2": polygon("50.0,10.0 50.0,40.0 70.0,40.0 70.0,10.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,23.0 12.0"), "point": point("4.0,3.0")}
+{"id": 21, "kwds": "sign ahead", "line1": line("0.0,5.0 0.0,40.0"), "line2": line("5.0,8.0 0.0,1.0"), "poly1": polygon("5.1,5.1 14.0,14.0 22.0,14.0 22.0,10.0"), "poly2": polygon("50.0,10.0 50.0,40.0 70.0,40.0 70.0,10.0"), "rec": rectangle("0.0,0.0 5.1,5.1"), "circle" : circle("1.0,23.0 12.0"), "point": point("0.0,5.0")}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/04b2b77a/asterix-app/data/tinysocial/twm-nested.adm
----------------------------------------------------------------------
diff --git a/asterix-app/data/tinysocial/twm-nested.adm b/asterix-app/data/tinysocial/twm-nested.adm
new file mode 100644
index 0000000..e8aa8ad
--- /dev/null
+++ b/asterix-app/data/tinysocial/twm-nested.adm
@@ -0,0 +1,12 @@
+{"tweetid":1,"user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416,"sender-location":point("47.44,80.65")},"send-time":datetime("2008-04-26T10:10:00"),"referred-topics":{{"t-mobile","customization"}},"message-text":" love t-mobile its customization is good:)"}
+{"tweetid":2,"user":{"screen-name":"ColineGeyer@63","lang":"en","friends_count":121,"statuses_count":362,"name":"Coline Geyer","followers_count":17159,"sender-location":point("32.84,67.14")},"send-time":datetime("2010-05-13T10:10:00"),"referred-topics":{{"verizon","shortcut-menu"}},"message-text":" like verizon its shortcut-menu is awesome:)"}
+{"tweetid":3,"user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416,"sender-location":point("29.72,75.8")},"send-time":datetime("2006-11-04T10:10:00"),"referred-topics":{{"motorola","speed"}},"message-text":" like motorola the speed is good:)"}
+{"tweetid":4,"user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416,"sender-location":point("39.28,70.48")},"send-time":datetime("2011-12-26T10:10:00"),"referred-topics":{{"sprint","voice-command"}},"message-text":" like sprint the voice-command is mind-blowing:)"}
+{"tweetid":5,"user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416,"sender-location":point("40.09,92.69")},"send-time":datetime("2006-08-04T10:10:00"),"referred-topics":{{"motorola","speed"}},"message-text":" can't stand motorola its speed is terrible:("}
+{"tweetid":6,"user":{"screen-name":"ColineGeyer@63","lang":"en","friends_count":121,"statuses_count":362,"name":"Coline Geyer","followers_count":17159,"sender-location":point("47.51,83.99")},"send-time":datetime("2010-05-07T10:10:00"),"referred-topics":{{"iphone","voice-clarity"}},"message-text":" like iphone the voice-clarity is good:)"}
+{"tweetid":7,"user":{"screen-name":"ChangEwing_573","lang":"en","friends_count":182,"statuses_count":394,"name":"Chang Ewing","followers_count":32136,"sender-location":point("36.21,72.6")},"send-time":datetime("2011-08-25T10:10:00"),"referred-topics":{{"samsung","platform"}},"message-text":" like samsung the platform is good"}
+{"tweetid":8,"user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416,"sender-location":point("46.05,93.34")},"send-time":datetime("2005-10-14T10:10:00"),"referred-topics":{{"t-mobile","shortcut-menu"}},"message-text":" like t-mobile the shortcut-menu is awesome:)"}
+{"tweetid":9,"user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416,"sender-location":point("36.86,74.62")},"send-time":datetime("2012-07-21T10:10:00"),"referred-topics":{{"verizon","voicemail-service"}},"message-text":" love verizon its voicemail-service is awesome"}
+{"tweetid":10,"user":{"screen-name":"ColineGeyer@63","lang":"en","friends_count":121,"statuses_count":362,"name":"Coline Geyer","followers_count":17159,"sender-location":point("29.15,76.53")},"send-time":datetime("2008-01-26T10:10:00"),"referred-topics":{{"verizon","voice-clarity"}},"message-text":" hate verizon its voice-clarity is OMG:("}
+{"tweetid":11,"user":{"screen-name":"NilaMilliron_tw","lang":"en","friends_count":445,"statuses_count":164,"name":"Nila Milliron","followers_count":22649,"sender-location":point("37.59,68.42")},"send-time":datetime("2008-03-09T10:10:00"),"referred-topics":{{"iphone","platform"}},"message-text":" can't stand iphone its platform is terrible"}
+{"tweetid":12,"user":{"screen-name":"OliJackson_512","lang":"en","friends_count":445,"statuses_count":164,"name":"Oli Jackson","followers_count":22649,"sender-location":point("24.82,94.63")},"send-time":datetime("2010-02-13T10:10:00"),"referred-topics":{{"samsung","voice-command"}},"message-text":" like samsung the voice-command is amazing:)"}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/04b2b77a/asterix-app/data/tpch0.001/lineitem.tbl
----------------------------------------------------------------------
diff --git a/asterix-app/data/tpch0.001/lineitem.tbl b/asterix-app/data/tpch0.001/lineitem.tbl
index 58d47c6..9ce6203 100644
--- a/asterix-app/data/tpch0.001/lineitem.tbl
+++ b/asterix-app/data/tpch0.001/lineitem.tbl
@@ -6002,4 +6002,4 @@
 5987|176|5|2|20|21523.40|0.10|0.06|N|O|1996-11-28|1996-09-17|1996-12-05|TAKE BACK RETURN|RAIL|ing excuses nag quickly always bold|
 5987|92|3|3|43|42659.87|0.08|0.04|N|O|1996-10-30|1996-10-13|1996-11-12|NONE|AIR|theodolites wake above the furiously b|
 5987|97|1|4|37|36892.33|0.08|0.08|N|O|1996-10-15|1996-10-27|1996-11-09|NONE|MAIL|le furiously carefully special |
-5988|172|1|1|41|43958.97|0.08|0.03|R|F|1994-01-20|1994-02-06|1994-02-10|COLLECT COD|AIR|the pending, express reque|
+5988|172|1|1|41|43958.97|0.08|0.03|R|F|1994-01-20|1994-02-06|1994-02-10|COLLECT COD|AIR|the pending, express reque|
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/04b2b77a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
index 05dd499..a1c5348 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
@@ -96,9 +96,12 @@ public class AsterixHyracksIntegrationUtil {
     }
 
     public static void deinit() throws Exception {
-        if (nc2 != null) nc2.stop();
-        if (nc1 != null) nc1.stop();
-        if (cc != null) cc.stop();
+        if (nc2 != null)
+            nc2.stop();
+        if (nc1 != null)
+            nc1.stop();
+        if (cc != null)
+            cc.stop();
     }
 
     public static void runJob(JobSpecification spec) throws Exception {
@@ -110,11 +113,10 @@ public class AsterixHyracksIntegrationUtil {
 
     /**
      * main method to run a simple 2 node cluster in-process
+     * suggested VM arguments: <code>-enableassertions -Xmx2048m -Dfile.encoding=UTF-8</code>
      *
-     * suggested VM arguments:
-     * <code>-enableassertions -Xmx2048m -Dfile.encoding=UTF-8</code>
-     *
-     * @param args unused
+     * @param args
+     *            unused
      */
     public static void main(String[] args) {
         Runtime.getRuntime().addShutdownHook(new Thread() {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/04b2b77a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index d8a71b8..52fc25e 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -31,6 +31,7 @@ import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import org.apache.commons.lang3.StringUtils;
 import org.json.JSONArray;
 import org.json.JSONException;
 import org.json.JSONObject;
@@ -64,14 +65,13 @@ import edu.uci.ics.asterix.aql.expression.InternalDetailsDecl;
 import edu.uci.ics.asterix.aql.expression.LoadStatement;
 import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
 import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
-import edu.uci.ics.asterix.aql.expression.RunStatement;
 import edu.uci.ics.asterix.aql.expression.Query;
 import edu.uci.ics.asterix.aql.expression.RefreshExternalDatasetStatement;
+import edu.uci.ics.asterix.aql.expression.RunStatement;
 import edu.uci.ics.asterix.aql.expression.SetStatement;
 import edu.uci.ics.asterix.aql.expression.TypeDecl;
 import edu.uci.ics.asterix.aql.expression.TypeDropStatement;
-import edu.uci.ics.asterix.aql.expression.VarIdentifier;
-import edu.uci.ics.asterix.aql.expression.VariableExpr;
+import edu.uci.ics.asterix.aql.expression.TypeExpression;
 import edu.uci.ics.asterix.aql.expression.WriteStatement;
 import edu.uci.ics.asterix.aql.util.FunctionUtils;
 import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
@@ -122,6 +122,7 @@ import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.asterix.om.types.TypeSignature;
 import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceSecondaryIndexInsertDeleteRule;
 import edu.uci.ics.asterix.result.ResultReader;
 import edu.uci.ics.asterix.result.ResultUtils;
 import edu.uci.ics.asterix.runtime.job.listener.JobEventListenerFactory;
@@ -152,8 +153,8 @@ import edu.uci.ics.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRunt
 import edu.uci.ics.hyracks.algebricks.runtime.serializer.ResultSerializerFactoryProvider;
 import edu.uci.ics.hyracks.algebricks.runtime.writers.PrinterBasedWriterFactory;
 import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
 import edu.uci.ics.hyracks.api.dataset.ResultSetId;
@@ -514,15 +515,16 @@ public class AqlTranslator extends AbstractAqlTranslator {
                     if (itemType.getTypeTag() != ATypeTag.RECORD) {
                         throw new AlgebricksException("Can only partition ARecord's.");
                     }
-                    List<String> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
+                    List<List<String>> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
                             .getPartitioningExprs();
                     boolean autogenerated = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated();
                     ARecordType aRecordType = (ARecordType) itemType;
-                    aRecordType.validatePartitioningExpressions(partitioningExprs, autogenerated);
+                    List<IAType> partitioningTypes = aRecordType.validatePartitioningExpressions(partitioningExprs,
+                            autogenerated);
 
                     String ngName = ngNameId != null ? ngNameId.getValue() : configureNodegroupForDataset(dd,
                             dataverseName, mdTxnCtx);
-                    String filterField = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getFilterField();
+                    List<String> filterField = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getFilterField();
                     if (compactionPolicy == null) {
                         if (filterField != null) {
                             // If the dataset has a filter and the user didn't specify a merge policy, then we will pick the
@@ -541,7 +543,8 @@ public class AqlTranslator extends AbstractAqlTranslator {
                     }
                     datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
                             InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
-                            ngName, autogenerated, compactionPolicy, compactionPolicyProperties, filterField);
+                            partitioningTypes, ngName, autogenerated, compactionPolicy, compactionPolicyProperties,
+                            filterField);
                     break;
                 }
                 case EXTERNAL: {
@@ -713,6 +716,7 @@ public class AqlTranslator extends AbstractAqlTranslator {
 
     }
 
+    @SuppressWarnings("unchecked")
     private void handleCreateIndexStatement(AqlMetadataProvider metadataProvider, Statement stmt,
             IHyracksClientConnection hcc) throws Exception {
         ProgressState progress = ProgressState.NO_PROGRESS;
@@ -736,7 +740,6 @@ public class AqlTranslator extends AbstractAqlTranslator {
         Index filesIndex = null;
         boolean datasetLocked = false;
         try {
-
             ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
                     datasetName);
             if (ds == null) {
@@ -753,7 +756,49 @@ public class AqlTranslator extends AbstractAqlTranslator {
                     itemTypeName);
             IAType itemType = dt.getDatatype();
             ARecordType aRecordType = (ARecordType) itemType;
-            aRecordType.validateKeyFields(stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getIndexType());
+
+            List<List<String>> indexFields = new ArrayList<List<String>>();
+            List<IAType> indexFieldTypes = new ArrayList<IAType>();
+            for (Pair<List<String>, TypeExpression> fieldExpr : stmtCreateIndex.getFieldExprs()) {
+                IAType fieldType = null;
+                boolean isOpen = aRecordType.isOpen();
+                ARecordType subType = aRecordType;
+                int i = 0;
+                if (fieldExpr.first.size() > 1 && !isOpen) {
+                    for (; i < fieldExpr.first.size() - 1;) {
+                        subType = (ARecordType) subType.getFieldType(fieldExpr.first.get(i));
+                        i++;
+                        if (subType.isOpen()) {
+                            isOpen = true;
+                            break;
+                        };
+                    }
+                }
+                if (fieldExpr.second == null) {
+                    fieldType = subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size()));
+                } else {
+                    if (!stmtCreateIndex.isEnforced())
+                        throw new AlgebricksException("Cannot create typed index on \"" + fieldExpr.first
+                                + "\" field without enforcing it's type");
+                    if (!isOpen)
+                        throw new AlgebricksException("Typed index on \"" + fieldExpr.first
+                                + "\" field could be created only for open datatype");
+                    Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, fieldExpr.second,
+                            indexName, dataverseName);
+                    TypeSignature typeSignature = new TypeSignature(dataverseName, indexName);
+                    fieldType = typeMap.get(typeSignature);
+                }
+                if (fieldType == null)
+                    throw new AlgebricksException("Unknown type " + fieldExpr.second);
+                if (isOpen && fieldType.getTypeTag().isDerivedType())
+                    MetadataManager.INSTANCE.addDatatype(mdTxnCtx, new Datatype(dataverseName, indexName, fieldType,
+                            false));
+
+                indexFields.add(fieldExpr.first);
+                indexFieldTypes.add(fieldType);
+            }
+
+            aRecordType.validateKeyFields(indexFields, indexFieldTypes, stmtCreateIndex.getIndexType());
 
             if (idx != null) {
                 if (stmtCreateIndex.getIfNotExists()) {
@@ -770,9 +815,9 @@ public class AqlTranslator extends AbstractAqlTranslator {
                     || stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX
                     || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
                     || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
-                List<String> partitioningKeys = DatasetUtils.getPartitioningKeys(ds);
-                for (String partitioningKey : partitioningKeys) {
-                    IAType keyType = aRecordType.getFieldType(partitioningKey);
+                List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(ds);
+                for (List<String> partitioningKey : partitioningKeys) {
+                    IAType keyType = aRecordType.getSubFieldType(partitioningKey);
                     ITypeTraits typeTrait = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
 
                     // If it is not a fixed length
@@ -835,7 +880,9 @@ public class AqlTranslator extends AbstractAqlTranslator {
                     // Add an entry for the files index
                     filesIndex = new Index(dataverseName, datasetName,
                             ExternalIndexingOperations.getFilesIndexName(datasetName), IndexType.BTREE,
-                            ExternalIndexingOperations.FILE_INDEX_FIELDS, false, IMetadataEntity.PENDING_ADD_OP);
+                            ExternalIndexingOperations.FILE_INDEX_FIELD_NAMES,
+                            ExternalIndexingOperations.FILE_INDEX_FIELD_TYPES, false, false,
+                            IMetadataEntity.PENDING_ADD_OP);
                     MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
                     // Add files to the external files index
                     for (ExternalFile file : externalFilesSnapshot) {
@@ -853,22 +900,42 @@ public class AqlTranslator extends AbstractAqlTranslator {
                 }
             }
 
+            //check whether there exists another enforced index on the same field
+            if (stmtCreateIndex.isEnforced()) {
+                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(
+                        metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
+                for (Index index : indexes) {
+                    if (index.getKeyFieldNames().equals(indexFields)
+                            && !index.getKeyFieldTypes().equals(indexFieldTypes) && index.isEnforcingKeyFileds())
+                        throw new AsterixException("Cannot create index " + indexName + " , enforced index "
+                                + index.getIndexName() + " on field \"" + StringUtils.join(indexFields, ',')
+                                + "\" already exist");
+                }
+            }
+
             //#. add a new index with PendingAddOp
-            Index index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(),
-                    stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getGramLength(), false,
+            Index index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(), indexFields,
+                    indexFieldTypes, stmtCreateIndex.getGramLength(), stmtCreateIndex.isEnforced(), false,
                     IMetadataEntity.PENDING_ADD_OP);
             MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
 
+            ARecordType enforcedType = null;
+            if (stmtCreateIndex.isEnforced()) {
+                enforcedType = IntroduceSecondaryIndexInsertDeleteRule.createEnforcedType(aRecordType, index);
+            }
+
             //#. prepare to create the index artifact in NC.
             CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
-                    index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
-            spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, metadataProvider);
+                    index.getDatasetName(), index.getKeyFieldNames(), index.getKeyFieldTypes(),
+                    index.isEnforcingKeyFileds(), index.getGramLength(), index.getIndexType());
+            spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, aRecordType, enforcedType, metadataProvider);
             if (spec == null) {
                 throw new AsterixException("Failed to create job spec for creating index '"
                         + stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
             }
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             bActiveTxn = false;
+
             progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
 
             //#. create the index artifact in NC.
@@ -880,8 +947,9 @@ public class AqlTranslator extends AbstractAqlTranslator {
 
             //#. load data into the index in NC.
             cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName, index.getDatasetName(),
-                    index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
-            spec = IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, metadataProvider);
+                    index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(),
+                    index.getGramLength(), index.getIndexType());
+            spec = IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, aRecordType, enforcedType, metadataProvider);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             bActiveTxn = false;
 
@@ -943,9 +1011,9 @@ public class AqlTranslator extends AbstractAqlTranslator {
                 try {
                     JobSpecification jobSpec = IndexOperations
                             .buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds);
+
                     MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                     bActiveTxn = false;
-
                     runJob(hcc, jobSpec, true);
                 } catch (Exception e2) {
                     e.addSuppressed(e2);
@@ -1027,8 +1095,8 @@ public class AqlTranslator extends AbstractAqlTranslator {
                 if (builtinTypeMap.get(typeName) != null) {
                     throw new AlgebricksException("Cannot redefine builtin type " + typeName + ".");
                 } else {
-                    Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, (TypeDecl) stmt,
-                            dataverseName);
+                    Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx,
+                            stmtCreateType.getTypeDef(), stmtCreateType.getIdent().getValue(), dataverseName);
                     TypeSignature typeSignature = new TypeSignature(dataverseName, typeName);
                     IAType type = typeMap.get(typeSignature);
                     MetadataManager.INSTANCE.addDatatype(mdTxnCtx, new Datatype(dataverseName, typeName, type, false));
@@ -1414,9 +1482,11 @@ public class AqlTranslator extends AbstractAqlTranslator {
 
                 //#. mark PendingDropOp on the existing index
                 MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
-                MetadataManager.INSTANCE.addIndex(mdTxnCtx,
+                MetadataManager.INSTANCE.addIndex(
+                        mdTxnCtx,
                         new Index(dataverseName, datasetName, indexName, index.getIndexType(),
-                                index.getKeyFieldNames(), index.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
+                                index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index
+                                        .isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
 
                 //#. commit the existing transaction before calling runJob.
                 MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -1468,17 +1538,20 @@ public class AqlTranslator extends AbstractAqlTranslator {
                             MetadataManager.INSTANCE.addIndex(
                                     mdTxnCtx,
                                     new Index(dataverseName, datasetName, externalIndex.getIndexName(), externalIndex
-                                            .getIndexType(), externalIndex.getKeyFieldNames(), externalIndex
-                                            .isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
+                                            .getIndexType(), externalIndex.getKeyFieldNames(),
+                                            index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), externalIndex
+                                                    .isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
                         }
                     }
                 }
 
                 //#. mark PendingDropOp on the existing index
                 MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
-                MetadataManager.INSTANCE.addIndex(mdTxnCtx,
+                MetadataManager.INSTANCE.addIndex(
+                        mdTxnCtx,
                         new Index(dataverseName, datasetName, indexName, index.getIndexType(),
-                                index.getKeyFieldNames(), index.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
+                                index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index
+                                        .isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
 
                 //#. commit the existing transaction before calling runJob.
                 MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -2024,33 +2097,51 @@ public class AqlTranslator extends AbstractAqlTranslator {
                 throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse "
                         + dataverseName + ".");
             }
+
+            String itemTypeName = ds.getItemTypeName();
+            Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataverseName,
+                    itemTypeName);
+
+            // Prepare jobs to compact the datatset and its indexes
             List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
             if (indexes.size() == 0) {
                 throw new AlgebricksException("Cannot compact the extrenal dataset " + datasetName
                         + " because it has no indexes");
             }
+
             if (ds.getDatasetType() == DatasetType.INTERNAL) {
                 for (int j = 0; j < indexes.size(); j++) {
                     if (indexes.get(j).isSecondaryIndex()) {
                         CompiledIndexCompactStatement cics = new CompiledIndexCompactStatement(dataverseName,
                                 datasetName, indexes.get(j).getIndexName(), indexes.get(j).getKeyFieldNames(), indexes
-                                        .get(j).getGramLength(), indexes.get(j).getIndexType());
-                        jobsToExecute
-                                .add(IndexOperations.buildSecondaryIndexCompactJobSpec(cics, metadataProvider, ds));
+                                        .get(j).getKeyFieldTypes(), indexes.get(j).isEnforcingKeyFileds(), indexes.get(
+                                        j).getGramLength(), indexes.get(j).getIndexType());
+
+                        Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(
+                                metadataProvider.getMetadataTxnContext(), dataverseName);
+                        jobsToExecute.add(DatasetOperations.compactDatasetJobSpec(dataverse, datasetName,
+                                metadataProvider));
+
                     }
                 }
-                Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(),
-                        dataverseName);
-                jobsToExecute.add(DatasetOperations.compactDatasetJobSpec(dataverse, datasetName, metadataProvider));
             } else {
                 for (int j = 0; j < indexes.size(); j++) {
                     if (!ExternalIndexingOperations.isFileIndex(indexes.get(j))) {
                         CompiledIndexCompactStatement cics = new CompiledIndexCompactStatement(dataverseName,
                                 datasetName, indexes.get(j).getIndexName(), indexes.get(j).getKeyFieldNames(), indexes
-                                        .get(j).getGramLength(), indexes.get(j).getIndexType());
-                        jobsToExecute
-                                .add(IndexOperations.buildSecondaryIndexCompactJobSpec(cics, metadataProvider, ds));
+                                        .get(j).getKeyFieldTypes(), indexes.get(j).isEnforcingKeyFileds(), indexes.get(
+                                        j).getGramLength(), indexes.get(j).getIndexType());
+                        ARecordType aRecordType = (ARecordType) dt.getDatatype();
+                        ARecordType enforcedType = null;
+                        if (cics.isEnforced()) {
+                            enforcedType = IntroduceSecondaryIndexInsertDeleteRule.createEnforcedType(aRecordType,
+                                    indexes.get(j));
+                        }
+                        jobsToExecute.add(IndexOperations.buildSecondaryIndexCompactJobSpec(cics, aRecordType,
+                                enforcedType, metadataProvider, ds));
+
                     }
+
                 }
                 jobsToExecute.add(ExternalIndexingOperations.compactFilesIndexJobSpec(ds, metadataProvider));
             }
@@ -2396,8 +2487,8 @@ public class AqlTranslator extends AbstractAqlTranslator {
         }
     }
 
-    private void handleRunStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws AsterixException, Exception {
+    private void handleRunStatement(AqlMetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc)
+            throws AsterixException, Exception {
         RunStatement runStmt = (RunStatement) stmt;
         switch (runStmt.getSystem()) {
             case "pregel":
@@ -2422,7 +2513,7 @@ public class AqlTranslator extends AbstractAqlTranslator {
         String datasetNameFrom = pregelixStmt.getDatasetNameFrom().getValue();
         String datasetNameTo = pregelixStmt.getDatasetNameTo().getValue();
 
-        if(dataverseNameFrom != dataverseNameTo) {
+        if (dataverseNameFrom != dataverseNameTo) {
             throw new AlgebricksException("Pregelix statements across different dataverses are not supported.");
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/04b2b77a/asterix-app/src/main/java/edu/uci/ics/asterix/file/ExternalIndexingOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/ExternalIndexingOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/ExternalIndexingOperations.java
index da12f14..ba6357b 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/ExternalIndexingOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/ExternalIndexingOperations.java
@@ -16,6 +16,7 @@ package edu.uci.ics.asterix.file;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.Date;
 import java.util.Iterator;
@@ -64,6 +65,7 @@ import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
 import edu.uci.ics.asterix.metadata.utils.ExternalDatasetsRegistry;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
 import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
@@ -99,9 +101,11 @@ import edu.uci.ics.hyracks.storage.common.file.LocalResource;
 
 public class ExternalIndexingOperations {
 
-    public static final ArrayList<String> FILE_INDEX_FIELDS = new ArrayList<String>();
+    public static final List<List<String>> FILE_INDEX_FIELD_NAMES = new ArrayList<List<String>>();
+    public static final ArrayList<IAType> FILE_INDEX_FIELD_TYPES = new ArrayList<IAType>();
     static {
-        FILE_INDEX_FIELDS.add("");
+        FILE_INDEX_FIELD_NAMES.add(new ArrayList<String>(Arrays.asList("")));
+        FILE_INDEX_FIELD_TYPES.add(BuiltinType.ASTRING);
     }
 
     public static boolean isIndexible(ExternalDatasetDetails ds) {
@@ -472,9 +476,9 @@ public class ExternalIndexingOperations {
         }
 
         CompiledCreateIndexStatement ccis = new CompiledCreateIndexStatement(index.getIndexName(),
-                index.getDataverseName(), index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(),
-                index.getIndexType());
-        return IndexOperations.buildSecondaryIndexLoadingJobSpec(ccis, metadataProvider, files);
+                index.getDataverseName(), index.getDatasetName(), index.getKeyFieldNames(), index.getKeyFieldTypes(),
+                index.isEnforcingKeyFileds(), index.getGramLength(), index.getIndexType());
+        return IndexOperations.buildSecondaryIndexLoadingJobSpec(ccis, null, null, metadataProvider, files);
     }
 
     public static JobSpecification buildCommitJob(Dataset ds, List<Index> indexes, AqlMetadataProvider metadataProvider)
@@ -559,7 +563,7 @@ public class ExternalIndexingOperations {
             AsterixStorageProperties storageProperties, AqlMetadataProvider metadataProvider, JobSpecification spec)
             throws AlgebricksException, AsterixException {
         int numPrimaryKeys = getRIDSize(ds);
-        List<String> secondaryKeyFields = index.getKeyFieldNames();
+        List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
         secondaryKeyFields.size();
         ARecordType itemType = (ARecordType) metadataProvider.findType(ds.getDataverseName(), ds.getItemTypeName());
         Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableKeyFieldType(secondaryKeyFields.get(0), itemType);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/04b2b77a/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
index 17b0b57..53dfc6e 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
@@ -27,6 +27,7 @@ import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
 import edu.uci.ics.asterix.metadata.entities.Dataset;
 import edu.uci.ics.asterix.metadata.entities.ExternalFile;
 import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
 import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
 import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
@@ -50,32 +51,38 @@ public class IndexOperations {
             .getPhysicalOptimizationConfig();
 
     public static JobSpecification buildSecondaryIndexCreationJobSpec(CompiledCreateIndexStatement createIndexStmt,
-            AqlMetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
+            ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider)
+            throws AsterixException, AlgebricksException {
         SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
                 .createIndexOperationsHelper(createIndexStmt.getIndexType(), createIndexStmt.getDataverseName(),
                         createIndexStmt.getDatasetName(), createIndexStmt.getIndexName(),
-                        createIndexStmt.getKeyFields(), createIndexStmt.getGramLength(), metadataProvider,
-                        physicalOptimizationConfig);
+                        createIndexStmt.getKeyFields(), createIndexStmt.getKeyFieldTypes(),
+                        createIndexStmt.isEnforced(), createIndexStmt.getGramLength(), metadataProvider,
+                        physicalOptimizationConfig, recType, enforcedType);
         return secondaryIndexHelper.buildCreationJobSpec();
     }
 
     public static JobSpecification buildSecondaryIndexLoadingJobSpec(CompiledCreateIndexStatement createIndexStmt,
-            AqlMetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
+            ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider)
+            throws AsterixException, AlgebricksException {
         SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
                 .createIndexOperationsHelper(createIndexStmt.getIndexType(), createIndexStmt.getDataverseName(),
                         createIndexStmt.getDatasetName(), createIndexStmt.getIndexName(),
-                        createIndexStmt.getKeyFields(), createIndexStmt.getGramLength(), metadataProvider,
-                        physicalOptimizationConfig);
+                        createIndexStmt.getKeyFields(), createIndexStmt.getKeyFieldTypes(),
+                        createIndexStmt.isEnforced(), createIndexStmt.getGramLength(), metadataProvider,
+                        physicalOptimizationConfig, recType, enforcedType);
         return secondaryIndexHelper.buildLoadingJobSpec();
     }
-    
+
     public static JobSpecification buildSecondaryIndexLoadingJobSpec(CompiledCreateIndexStatement createIndexStmt,
-            AqlMetadataProvider metadataProvider, List<ExternalFile> files) throws AsterixException, AlgebricksException {
+            ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider,
+            List<ExternalFile> files) throws AsterixException, AlgebricksException {
         SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
                 .createIndexOperationsHelper(createIndexStmt.getIndexType(), createIndexStmt.getDataverseName(),
                         createIndexStmt.getDatasetName(), createIndexStmt.getIndexName(),
-                        createIndexStmt.getKeyFields(), createIndexStmt.getGramLength(), metadataProvider,
-                        physicalOptimizationConfig);
+                        createIndexStmt.getKeyFields(), createIndexStmt.getKeyFieldTypes(),
+                        createIndexStmt.isEnforced(), createIndexStmt.getGramLength(), metadataProvider,
+                        physicalOptimizationConfig, recType, enforcedType);
         secondaryIndexHelper.setExternalFiles(files);
         return secondaryIndexHelper.buildLoadingJobSpec();
     }
@@ -108,12 +115,14 @@ public class IndexOperations {
     }
 
     public static JobSpecification buildSecondaryIndexCompactJobSpec(CompiledIndexCompactStatement indexCompactStmt,
-            AqlMetadataProvider metadataProvider, Dataset dataset) throws AsterixException, AlgebricksException {
+            ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider, Dataset dataset)
+            throws AsterixException, AlgebricksException {
         SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
                 .createIndexOperationsHelper(indexCompactStmt.getIndexType(), indexCompactStmt.getDataverseName(),
                         indexCompactStmt.getDatasetName(), indexCompactStmt.getIndexName(),
-                        indexCompactStmt.getKeyFields(), indexCompactStmt.getGramLength(), metadataProvider,
-                        physicalOptimizationConfig);
+                        indexCompactStmt.getKeyFields(), indexCompactStmt.getKeyTypes(), indexCompactStmt.isEnforced(),
+                        indexCompactStmt.getGramLength(), metadataProvider, physicalOptimizationConfig, recType,
+                        enforcedType);
         return secondaryIndexHelper.buildCompactJobSpec();
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/04b2b77a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeOperationsHelper.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeOperationsHelper.java
index 9b799d5..a061451 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeOperationsHelper.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeOperationsHelper.java
@@ -14,17 +14,24 @@
  */
 package edu.uci.ics.asterix.file;
 
+import java.util.List;
+
 import edu.uci.ics.asterix.common.api.ILocalResourceMetadata;
 import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
 import edu.uci.ics.asterix.common.config.GlobalConfig;
 import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
 import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
 import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeWithBuddyIOOperationCallbackFactory;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.external.IndexingConstants;
 import edu.uci.ics.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
 import edu.uci.ics.asterix.metadata.utils.ExternalDatasetsRegistry;
+import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
 import edu.uci.ics.asterix.transaction.management.resource.ExternalBTreeWithBuddyLocalResourceMetadata;
 import edu.uci.ics.asterix.transaction.management.resource.LSMBTreeLocalResourceMetadata;
@@ -32,12 +39,20 @@ import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResour
 import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
 import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
 import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
 import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
+import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
 import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
 import edu.uci.ics.hyracks.algebricks.runtime.operators.base.SinkRuntimeFactory;
 import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
@@ -124,11 +139,16 @@ public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelp
             ExternalDataScanOperatorDescriptor primaryScanOp = createExternalIndexingOp(spec);
 
             // Assign op.
+            AbstractOperatorDescriptor sourceOp = primaryScanOp;
+            if (isEnforcingKeyTypes) {
+                sourceOp = createCastOp(spec, primaryScanOp, numSecondaryKeys, dataset.getDatasetType());
+                spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
+            }
             AlgebricksMetaOperatorDescriptor asterixAssignOp = createExternalAssignOp(spec, numSecondaryKeys);
 
             // If any of the secondary fields are nullable, then add a select op that filters nulls.
             AlgebricksMetaOperatorDescriptor selectOp = null;
-            if (anySecondaryKeyIsNullable) {
+            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
                 selectOp = createFilterNullsSelectOp(spec, numSecondaryKeys);
             }
 
@@ -160,8 +180,8 @@ public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelp
                 spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBulkLoadOp, 0, metaOp, 0);
                 root = metaOp;
             }
-            spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, asterixAssignOp, 0);
-            if (anySecondaryKeyIsNullable) {
+            spec.connect(new OneToOneConnectorDescriptor(spec), sourceOp, 0, asterixAssignOp, 0);
+            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
                 spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, selectOp, 0);
                 spec.connect(new OneToOneConnectorDescriptor(spec), selectOp, 0, sortOp, 0);
             } else {
@@ -179,11 +199,16 @@ public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelp
             BTreeSearchOperatorDescriptor primaryScanOp = createPrimaryIndexScanOp(spec);
 
             // Assign op.
-            AlgebricksMetaOperatorDescriptor asterixAssignOp = createAssignOp(spec, primaryScanOp, numSecondaryKeys);
+            AbstractOperatorDescriptor sourceOp = primaryScanOp;
+            if (isEnforcingKeyTypes) {
+                sourceOp = createCastOp(spec, primaryScanOp, numSecondaryKeys, dataset.getDatasetType());
+                spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
+            }
+            AlgebricksMetaOperatorDescriptor asterixAssignOp = createAssignOp(spec, sourceOp, numSecondaryKeys);
 
             // If any of the secondary fields are nullable, then add a select op that filters nulls.
             AlgebricksMetaOperatorDescriptor selectOp = null;
-            if (anySecondaryKeyIsNullable) {
+            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
                 selectOp = createFilterNullsSelectOp(spec, numSecondaryKeys);
             }
 
@@ -207,8 +232,8 @@ public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelp
                     new IPushRuntimeFactory[] { new SinkRuntimeFactory() }, new RecordDescriptor[] { secondaryRecDesc });
             // Connect the operators.
             spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
-            spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, asterixAssignOp, 0);
-            if (anySecondaryKeyIsNullable) {
+            spec.connect(new OneToOneConnectorDescriptor(spec), sourceOp, 0, asterixAssignOp, 0);
+            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
                 spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, selectOp, 0);
                 spec.connect(new OneToOneConnectorDescriptor(spec), selectOp, 0, sortOp, 0);
             } else {
@@ -264,4 +289,71 @@ public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelp
         spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
         return spec;
     }
+
+    @Override
+    @SuppressWarnings("rawtypes")
+    protected void setSecondaryRecDescAndComparators(IndexType indexType, List<List<String>> secondaryKeyFields,
+            List<IAType> secondaryKeyTypes, int gramLength, AqlMetadataProvider metadataProvider)
+            throws AlgebricksException, AsterixException {
+        secondaryFieldAccessEvalFactories = new ICopyEvaluatorFactory[numSecondaryKeys + numFilterFields];
+        secondaryComparatorFactories = new IBinaryComparatorFactory[numSecondaryKeys + numPrimaryKeys];
+        secondaryBloomFilterKeyFields = new int[numSecondaryKeys];
+        ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys + numSecondaryKeys
+                + numFilterFields];
+        ISerializerDeserializer[] enforcedRecFields = new ISerializerDeserializer[1 + numPrimaryKeys + numFilterFields];
+        secondaryTypeTraits = new ITypeTraits[numSecondaryKeys + numPrimaryKeys];
+        ITypeTraits[] enforcedTypeTraits = new ITypeTraits[1 + numPrimaryKeys];
+        ISerializerDeserializerProvider serdeProvider = metadataProvider.getFormat().getSerdeProvider();
+        ITypeTraitProvider typeTraitProvider = metadataProvider.getFormat().getTypeTraitProvider();
+        IBinaryComparatorFactoryProvider comparatorFactoryProvider = metadataProvider.getFormat()
+                .getBinaryComparatorFactoryProvider();
+        // Record column is 0 for external datasets, numPrimaryKeys for internal ones
+        int recordColumn = dataset.getDatasetType() == DatasetType.INTERNAL ? numPrimaryKeys : 0;
+        for (int i = 0; i < numSecondaryKeys; i++) {
+            secondaryFieldAccessEvalFactories[i] = metadataProvider.getFormat().getFieldAccessEvaluatorFactory(
+                    isEnforcingKeyTypes ? enforcedItemType : itemType, secondaryKeyFields.get(i), recordColumn);
+            Pair<IAType, Boolean> keyTypePair = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(i),
+                    secondaryKeyFields.get(i), itemType);
+            IAType keyType = keyTypePair.first;
+            anySecondaryKeyIsNullable = anySecondaryKeyIsNullable || keyTypePair.second;
+            ISerializerDeserializer keySerde = serdeProvider.getSerializerDeserializer(keyType);
+            secondaryRecFields[i] = keySerde;
+            secondaryComparatorFactories[i] = comparatorFactoryProvider.getBinaryComparatorFactory(keyType, true);
+            secondaryTypeTraits[i] = typeTraitProvider.getTypeTrait(keyType);
+            secondaryBloomFilterKeyFields[i] = i;
+        }
+        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
+            // Add serializers and comparators for primary index fields.
+            for (int i = 0; i < numPrimaryKeys; i++) {
+                secondaryRecFields[numSecondaryKeys + i] = primaryRecDesc.getFields()[i];
+                enforcedRecFields[i] = primaryRecDesc.getFields()[i];
+                secondaryTypeTraits[numSecondaryKeys + i] = primaryRecDesc.getTypeTraits()[i];
+                enforcedTypeTraits[i] = primaryRecDesc.getTypeTraits()[i];
+                secondaryComparatorFactories[numSecondaryKeys + i] = primaryComparatorFactories[i];
+            }
+        } else {
+            // Add serializers and comparators for RID fields.
+            for (int i = 0; i < numPrimaryKeys; i++) {
+                secondaryRecFields[numSecondaryKeys + i] = IndexingConstants.getSerializerDeserializer(i);
+                enforcedRecFields[i] = IndexingConstants.getSerializerDeserializer(i);
+                secondaryTypeTraits[numSecondaryKeys + i] = IndexingConstants.getTypeTraits(i);
+                enforcedTypeTraits[i] = IndexingConstants.getTypeTraits(i);
+                secondaryComparatorFactories[numSecondaryKeys + i] = IndexingConstants.getComparatorFactory(i);
+            }
+        }
+        enforcedRecFields[numPrimaryKeys] = serdeProvider.getSerializerDeserializer(itemType);
+
+        if (numFilterFields > 0) {
+            secondaryFieldAccessEvalFactories[numSecondaryKeys] = metadataProvider.getFormat()
+                    .getFieldAccessEvaluatorFactory(itemType, filterFieldName, numPrimaryKeys);
+            Pair<IAType, Boolean> keyTypePair = Index.getNonNullableKeyFieldType(filterFieldName, itemType);
+            IAType type = keyTypePair.first;
+            ISerializerDeserializer serde = serdeProvider.getSerializerDeserializer(type);
+            secondaryRecFields[numPrimaryKeys + numSecondaryKeys] = serde;
+        }
+
+        secondaryRecDesc = new RecordDescriptor(secondaryRecFields, secondaryTypeTraits);
+        enforcedRecDesc = new RecordDescriptor(enforcedRecFields, enforcedTypeTraits);
+
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/04b2b77a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexOperationsHelper.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexOperationsHelper.java
index 63c7411..cccb461 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexOperationsHelper.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexOperationsHelper.java
@@ -41,7 +41,6 @@ import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
 import edu.uci.ics.asterix.metadata.entities.Dataset;
 import edu.uci.ics.asterix.metadata.entities.ExternalFile;
-import edu.uci.ics.asterix.metadata.entities.Index;
 import edu.uci.ics.asterix.metadata.external.IndexingConstants;
 import edu.uci.ics.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
 import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
@@ -49,6 +48,7 @@ import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
 import edu.uci.ics.asterix.runtime.evaluators.functions.AndDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.CastRecordDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.IsNullDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.NotDescriptor;
 import edu.uci.ics.asterix.runtime.job.listener.JobEventListenerFactory;
@@ -62,9 +62,7 @@ import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
 import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
 import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter;
 import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
 import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
-import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
 import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
 import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
 import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
@@ -114,6 +112,7 @@ public abstract class SecondaryIndexOperationsHelper {
     protected AlgebricksPartitionConstraint secondaryPartitionConstraint;
     protected String secondaryIndexName;
     protected boolean anySecondaryKeyIsNullable = false;
+    protected boolean isEnforcingKeyTypes = false;
 
     protected long numElementsHint;
     protected IBinaryComparatorFactory[] primaryComparatorFactories;
@@ -128,9 +127,11 @@ public abstract class SecondaryIndexOperationsHelper {
     protected IAsterixPropertiesProvider propertiesProvider;
     protected ILSMMergePolicyFactory mergePolicyFactory;
     protected Map<String, String> mergePolicyFactoryProperties;
+    protected RecordDescriptor enforcedRecDesc;
+    protected ARecordType enforcedItemType;
 
     protected int numFilterFields;
-    protected String filterFieldName;
+    protected List<String> filterFieldName;
     protected ITypeTraits[] filterTypeTraits;
     protected IBinaryComparatorFactory[] filterCmpFactories;
     protected int[] secondaryFilterFields;
@@ -147,9 +148,10 @@ public abstract class SecondaryIndexOperationsHelper {
     }
 
     public static SecondaryIndexOperationsHelper createIndexOperationsHelper(IndexType indexType, String dataverseName,
-            String datasetName, String indexName, List<String> secondaryKeyFields, int gramLength,
-            AqlMetadataProvider metadataProvider, PhysicalOptimizationConfig physOptConf) throws AsterixException,
-            AlgebricksException {
+            String datasetName, String indexName, List<List<String>> secondaryKeyFields, List<IAType> secondaryKeyTypes,
+            boolean isEnforced, int gramLength, AqlMetadataProvider metadataProvider,
+            PhysicalOptimizationConfig physOptConf, ARecordType recType, ARecordType enforcedType)
+            throws AsterixException, AlgebricksException {
         IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
         SecondaryIndexOperationsHelper indexOperationsHelper = null;
         switch (indexType) {
@@ -173,8 +175,8 @@ public abstract class SecondaryIndexOperationsHelper {
                 throw new AsterixException("Unknown Index Type: " + indexType);
             }
         }
-        indexOperationsHelper.init(indexType, dataverseName, datasetName, indexName, secondaryKeyFields, gramLength,
-                metadataProvider);
+        indexOperationsHelper.init(indexType, dataverseName, datasetName, indexName, secondaryKeyFields,
+                secondaryKeyTypes, isEnforced, gramLength, metadataProvider, recType, enforcedType);
         return indexOperationsHelper;
     }
 
@@ -184,18 +186,20 @@ public abstract class SecondaryIndexOperationsHelper {
 
     public abstract JobSpecification buildCompactJobSpec() throws AsterixException, AlgebricksException;
 
-    protected void init(IndexType indexType, String dvn, String dsn, String in, List<String> secondaryKeyFields,
-            int gramLength, AqlMetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
+    protected void init(IndexType indexType, String dvn, String dsn, String in, List<List<String>> secondaryKeyFields,
+            List<IAType> secondaryKeyTypes, boolean isEnforced, int gramLength, AqlMetadataProvider metadataProvider,
+            ARecordType aRecType, ARecordType enforcedType) throws AsterixException, AlgebricksException {
         this.metadataProvider = metadataProvider;
         dataverseName = dvn == null ? metadataProvider.getDefaultDataverseName() : dvn;
         datasetName = dsn;
         secondaryIndexName = in;
+        isEnforcingKeyTypes = isEnforced;
         dataset = metadataProvider.findDataset(dataverseName, datasetName);
         if (dataset == null) {
             throw new AsterixException("Unknown dataset " + datasetName);
         }
-
-        itemType = (ARecordType) metadataProvider.findType(dataset.getDataverseName(), dataset.getItemTypeName());
+        itemType = aRecType;
+        enforcedItemType = enforcedType;
         payloadSerde = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(itemType);
         numSecondaryKeys = secondaryKeyFields.size();
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
@@ -220,7 +224,8 @@ public abstract class SecondaryIndexOperationsHelper {
             primaryPartitionConstraint = primarySplitsAndConstraint.second;
             setPrimaryRecDescAndComparators();
         }
-        setSecondaryRecDescAndComparators(indexType, secondaryKeyFields, gramLength, metadataProvider);
+        setSecondaryRecDescAndComparators(indexType, secondaryKeyFields, secondaryKeyTypes, gramLength,
+                metadataProvider);
         numElementsHint = metadataProvider.getCardinalityPerPartitionHint(dataset);
         Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
                 metadataProvider.getMetadataTxnContext());
@@ -248,7 +253,7 @@ public abstract class SecondaryIndexOperationsHelper {
 
         IAType type;
         try {
-            type = itemType.getFieldType(filterFieldName);
+            type = itemType.getSubFieldType(filterFieldName);
         } catch (IOException e) {
             throw new AlgebricksException(e);
         }
@@ -261,7 +266,7 @@ public abstract class SecondaryIndexOperationsHelper {
     protected abstract int getNumSecondaryKeys();
 
     protected void setPrimaryRecDescAndComparators() throws AlgebricksException {
-        List<String> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
+        List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
         int numPrimaryKeys = partitioningKeys.size();
         ISerializerDeserializer[] primaryRecFields = new ISerializerDeserializer[numPrimaryKeys + 1];
         ITypeTraits[] primaryTypeTraits = new ITypeTraits[numPrimaryKeys + 1];
@@ -271,7 +276,7 @@ public abstract class SecondaryIndexOperationsHelper {
         for (int i = 0; i < numPrimaryKeys; i++) {
             IAType keyType;
             try {
-                keyType = itemType.getFieldType(partitioningKeys.get(i));
+                keyType = itemType.getSubFieldType(partitioningKeys.get(i));
             } catch (IOException e) {
                 throw new AlgebricksException(e);
             }
@@ -286,65 +291,9 @@ public abstract class SecondaryIndexOperationsHelper {
         primaryRecDesc = new RecordDescriptor(primaryRecFields, primaryTypeTraits);
     }
 
-    protected void setSecondaryRecDescAndComparators(IndexType indexType, List<String> secondaryKeyFields,
-            int gramLength, AqlMetadataProvider metadataProvider) throws AlgebricksException, AsterixException {
-        secondaryFieldAccessEvalFactories = new ICopyEvaluatorFactory[numSecondaryKeys + numFilterFields];
-        if (indexType == IndexType.RTREE) {
-            secondaryComparatorFactories = new IBinaryComparatorFactory[numSecondaryKeys];
-        } else {
-            secondaryComparatorFactories = new IBinaryComparatorFactory[numSecondaryKeys + numPrimaryKeys];
-        }
-        secondaryBloomFilterKeyFields = new int[numSecondaryKeys];
-        ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys + numSecondaryKeys
-                + numFilterFields];
-        secondaryTypeTraits = new ITypeTraits[numSecondaryKeys + numPrimaryKeys];
-        ISerializerDeserializerProvider serdeProvider = metadataProvider.getFormat().getSerdeProvider();
-        ITypeTraitProvider typeTraitProvider = metadataProvider.getFormat().getTypeTraitProvider();
-        IBinaryComparatorFactoryProvider comparatorFactoryProvider = metadataProvider.getFormat()
-                .getBinaryComparatorFactoryProvider();
-        // Record column is 0 for external datasets, numPrimaryKeys for internal ones
-        int recordColumn = dataset.getDatasetType() == DatasetType.INTERNAL ? numPrimaryKeys : 0;
-        for (int i = 0; i < numSecondaryKeys; i++) {
-            secondaryFieldAccessEvalFactories[i] = metadataProvider.getFormat().getFieldAccessEvaluatorFactory(
-                    itemType, secondaryKeyFields.get(i), recordColumn);
-            Pair<IAType, Boolean> keyTypePair = Index.getNonNullableKeyFieldType(secondaryKeyFields.get(i), itemType);
-            IAType keyType = keyTypePair.first;
-            anySecondaryKeyIsNullable = anySecondaryKeyIsNullable || keyTypePair.second;
-            ISerializerDeserializer keySerde = serdeProvider.getSerializerDeserializer(keyType);
-            secondaryRecFields[i] = keySerde;
-            secondaryComparatorFactories[i] = comparatorFactoryProvider.getBinaryComparatorFactory(keyType, true);
-            secondaryTypeTraits[i] = typeTraitProvider.getTypeTrait(keyType);
-            secondaryBloomFilterKeyFields[i] = i;
-        }
-        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
-            // Add serializers and comparators for primary index fields.
-            for (int i = 0; i < numPrimaryKeys; i++) {
-                secondaryRecFields[numSecondaryKeys + i] = primaryRecDesc.getFields()[i];
-                secondaryTypeTraits[numSecondaryKeys + i] = primaryRecDesc.getTypeTraits()[i];
-                if (indexType != IndexType.RTREE) {
-                    secondaryComparatorFactories[numSecondaryKeys + i] = primaryComparatorFactories[i];
-                }
-            }
-        } else {
-            // Add serializers and comparators for RID fields.
-            for (int i = 0; i < numPrimaryKeys; i++) {
-                secondaryRecFields[numSecondaryKeys + i] = IndexingConstants.getSerializerDeserializer(i);
-                secondaryTypeTraits[numSecondaryKeys + i] = IndexingConstants.getTypeTraits(i);
-                if (indexType != IndexType.RTREE) {
-                    secondaryComparatorFactories[numSecondaryKeys + i] = IndexingConstants.getComparatorFactory(i);
-                }
-            }
-        }
-        if (numFilterFields > 0) {
-            secondaryFieldAccessEvalFactories[numSecondaryKeys] = metadataProvider.getFormat()
-                    .getFieldAccessEvaluatorFactory(itemType, filterFieldName, numPrimaryKeys);
-            Pair<IAType, Boolean> keyTypePair = Index.getNonNullableKeyFieldType(filterFieldName, itemType);
-            IAType type = keyTypePair.first;
-            ISerializerDeserializer serde = serdeProvider.getSerializerDeserializer(type);
-            secondaryRecFields[numPrimaryKeys + numSecondaryKeys] = serde;
-        }
-        secondaryRecDesc = new RecordDescriptor(secondaryRecFields);
-    }
+    protected abstract void setSecondaryRecDescAndComparators(IndexType indexType, List<List<String>> secondaryKeyFields,
+            List<IAType> secondaryKeyTypes, int gramLength, AqlMetadataProvider metadataProvider)
+            throws AlgebricksException, AsterixException;
 
     protected AbstractOperatorDescriptor createDummyKeyProviderOp(JobSpecification spec) throws AsterixException,
             AlgebricksException {
@@ -403,7 +352,7 @@ public abstract class SecondaryIndexOperationsHelper {
     }
 
     protected AlgebricksMetaOperatorDescriptor createAssignOp(JobSpecification spec,
-            BTreeSearchOperatorDescriptor primaryScanOp, int numSecondaryKeyFields) throws AlgebricksException {
+            AbstractOperatorDescriptor primaryScanOp, int numSecondaryKeyFields) throws AlgebricksException {
         int[] outColumns = new int[numSecondaryKeyFields + numFilterFields];
         int[] projectionList = new int[numSecondaryKeyFields + numPrimaryKeys + numFilterFields];
         for (int i = 0; i < numSecondaryKeyFields + numFilterFields; i++) {
@@ -433,6 +382,37 @@ public abstract class SecondaryIndexOperationsHelper {
         return asterixAssignOp;
     }
 
+    protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec,
+            AbstractOperatorDescriptor primaryScanOp, int numSecondaryKeyFields, DatasetType dsType) {
+        CastRecordDescriptor castFuncDesc = (CastRecordDescriptor) CastRecordDescriptor.FACTORY
+                .createFunctionDescriptor();
+        castFuncDesc.reset(enforcedItemType, itemType);
+
+        int[] outColumns = new int[1];
+        int[] projectionList = new int[1 + numPrimaryKeys];
+        int recordIdx;
+        //external datascan operator returns a record as the first field, instead of the last in internal case
+        if (dsType == DatasetType.EXTERNAL) {
+            recordIdx = 0;
+            outColumns[0] = 0;
+        } else {
+            recordIdx = numPrimaryKeys;
+            outColumns[0] = numPrimaryKeys;
+        }
+        for (int i = 0; i <= numPrimaryKeys; i++) {
+            projectionList[i] = i;
+        }
+        ICopyEvaluatorFactory[] castEvalFact = new ICopyEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) };
+        IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1];
+        sefs[0] = new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter(
+                castFuncDesc.createEvaluatorFactory(castEvalFact));
+        AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
+        AlgebricksMetaOperatorDescriptor castRecAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1,
+                new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { enforcedRecDesc });
+
+        return castRecAssignOp;
+    }
+
     protected ExternalSortOperatorDescriptor createSortOp(JobSpecification spec,
             IBinaryComparatorFactory[] secondaryComparatorFactories, RecordDescriptor secondaryRecDesc) {
         int[] sortFields = new int[secondaryComparatorFactories.length];


Mime
View raw message