fixing hashjoin now

This commit is contained in:
Max O'Cull 2019-03-29 18:32:57 -04:00
parent afa0914070
commit d11251da4b
5 changed files with 411 additions and 280 deletions

View File

@ -4,18 +4,53 @@ import heap.HeapFile;
import index.HashIndex; import index.HashIndex;
import global.SearchKey; import global.SearchKey;
import global.RID; import global.RID;
import java.util.List;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import global.AttrOperator; import global.AttrOperator;
import global.AttrType; import global.AttrType;
public class HashJoin extends Iterator { public class HashJoin extends Iterator {
private Iterator outer; private Iterator smaller;
private Iterator inner; private Iterator larger;
private Predicate[] preds; private Predicate equijoinPredicate;
private HashTableDup hashTable;
private int smallerJoinCol;
private int largerJoinCol;
// private boolean startJoin = true;
Tuple leftTuple;
// boolean variable to indicate whether the pre-fetched tuple is consumed or not
// private boolean nextTupleIsConsumed;
// pre-fetched tuple
private List<Tuple> nextTupleBatch;
// private Tuple nextTuple = null;
public HashJoin(Iterator aIter1, Iterator aIter2, int aJoinCol1, int aJoinCol2) { public HashJoin(Iterator aIter1, Iterator aIter2, int aJoinCol1, int aJoinCol2) {
aIter1.getSchema().getLength(); System.out.println("> new HashJoin");
this.outer = aIter1; this.smaller = aIter1;
this.inner = aIter2; this.larger = aIter2;
this.smallerJoinCol = aJoinCol1;
this.largerJoinCol = aJoinCol2;
this.schema = Schema.join(this.smaller.schema, this.larger.schema);
this.equijoinPredicate = new Predicate(AttrOperator.EQ, AttrType.FIELDNO, aJoinCol1, AttrType.FIELDNO, aJoinCol2);
// Build the lookup table.
this.hashTable = new HashTableDup();
while (this.smaller.hasNext()) {
Tuple smallerTuple = this.smaller.getNext();
SearchKey searchKey = new SearchKey(smallerTuple.getField(aJoinCol1));
this.hashTable.add(searchKey, smallerTuple);
}
this.smaller.restart();
} }
/** /**
@ -30,28 +65,106 @@ public class HashJoin extends Iterator {
* Restarts the iterator, i.e. as if it were just constructed. * Restarts the iterator, i.e. as if it were just constructed.
*/ */
public void restart() { public void restart() {
throw new UnsupportedOperationException("Not implemented"); System.out.println("> HashJoin.restart");
this.smaller.restart();
this.larger.restart();
} }
/** /**
* Returns true if the iterator is open; false otherwise. * Returns true if the iterator is open; false otherwise.
*/ */
public boolean isOpen() { public boolean isOpen() {
throw new UnsupportedOperationException("Not implemented"); System.out.println("> HashJoin.isOpen");
if (this.larger.isOpen()) {
return true;
}
return false;
} }
/** /**
* Closes the iterator, releasing any resources (i.e. pinned pages). * Closes the iterator, releasing any resources (i.e. pinned pages).
*/ */
public void close() { public void close() {
throw new UnsupportedOperationException("Not implemented"); System.out.println("> HashJoin.close");
if (this.isOpen()) {
this.smaller.close();
this.larger.close();
}
} }
/** /**
* Returns true if there are more tuples, false otherwise. * Returns true if there are more tuples, false otherwise.
*/ */
public boolean hasNext() { public boolean hasNext() {
throw new UnsupportedOperationException("Not implemented"); System.out.println("> HashJoin.hasNext");
if (this.nextTupleBatch != null && this.nextTupleBatch.iterator().hasNext()) {
return true;
}
if (! this.larger.hasNext()) {
return false;
}
while (this.larger.hasNext()) {
Tuple rightTuple = this.larger.getNext();
SearchKey key = new SearchKey(rightTuple.getField(this.largerJoinCol));
List<Tuple> smallerMatches = Arrays.asList(this.hashTable.getAll(key));
for (Tuple small : smallerMatches) {
Tuple nextTuple = Tuple.join(small, rightTuple, this.schema);
if (this.equijoinPredicate.evaluate(nextTuple)) {
this.nextTupleBatch.add(nextTuple);
}
}
if (this.nextTupleBatch.iterator().hasNext()) {
return true;
}
}
return false;
// if (! this.nextTupleIsConsumed)
// return true;
//
// if (! this.smaller.hasNext())
// // if(!inner.hasNext() && !outer.hasNext()) // Piazza post 116
// return false;
//
// Tuple rightTuple;
//
// if (this.startJoin) {
// this.leftTuple = this.smaller.getNext();
// this.startJoin = false;
// }
//
// while (true) {
// while (this.larger.hasNext()) {
// rightTuple = this.larger.getNext();
//
// SearchKey key = new SearchKey(rightTuple.getField(this.largerJoinCol));
// this.nextTupleBatch = Arrays.asList(this.hashTable.getAll(key));
//
//
// // try to match
// this.nextTuple = Tuple.join(this.leftTuple, rightTuple, this.schema);
// if (this.equijoinPredicate.evaluate(nextTuple)) {
// this.nextTupleIsConsumed = false;
// return true;
// }
// }
//
// if (this.smaller.hasNext()) {
// this.leftTuple = this.smaller.getNext();
// this.larger.restart();
// } else
// return false;
// }
} }
/** /**
@ -60,6 +173,13 @@ public class HashJoin extends Iterator {
* @throws IllegalStateException if no more tuples * @throws IllegalStateException if no more tuples
*/ */
public Tuple getNext() { public Tuple getNext() {
throw new UnsupportedOperationException("Not implemented"); System.out.println("> HashJoin.getNext");
// if (! this.hasNext()) {
// throw new IllegalStateException("Iterator has no more entries");
// }
Tuple result = this.nextTupleBatch.iterator().next();
this.nextTupleBatch.iterator().remove();
return result;
} }
} // end class HashJoin; } // end class HashJoin;

View File

@ -14,18 +14,17 @@ public class KeyScan extends Iterator {
private HashScan scan = null; private HashScan scan = null;
private HashIndex index = null; private HashIndex index = null;
private SearchKey key = null; private SearchKey key = null;
private boolean isOpen;
/** /**
* Constructs an index scan, given the hash index and schema. * Constructs an index scan, given the hash index and schema.
*/ */
public KeyScan(Schema aSchema, HashIndex aIndex, SearchKey aKey, HeapFile aFile) { public KeyScan(Schema aSchema, HashIndex aIndex, SearchKey aKey, HeapFile aFile) {
System.out.println("> new KeyScan");
this.schema = aSchema; this.schema = aSchema;
this.index = aIndex; this.index = aIndex;
this.key = aKey; this.key = aKey;
this.file = aFile; this.file = aFile;
this.scan = this.index.openScan(this.key); this.scan = this.index.openScan(this.key);
this.isOpen = true;
} }
/** /**
@ -40,27 +39,31 @@ public class KeyScan extends Iterator {
* Restarts the iterator, i.e. as if it were just constructed. * Restarts the iterator, i.e. as if it were just constructed.
*/ */
public void restart() { public void restart() {
this.isOpen = false; // In case of errors maybe? System.out.println("> KeyScan.restart");
this.close(); this.close();
this.scan = this.index.openScan(this.key); this.scan = this.index.openScan(this.key);
this.isOpen = true;
} }
/** /**
* Returns true if the iterator is open; false otherwise. * Returns true if the iterator is open; false otherwise.
*/ */
public boolean isOpen() { public boolean isOpen() {
return this.isOpen; System.out.println("> KeyScan.isOpen");
if (this.scan != null) {
return true;
}
return false;
} }
/** /**
* Closes the iterator, releasing any resources (i.e. pinned pages). * Closes the iterator, releasing any resources (i.e. pinned pages).
*/ */
public void close() { public void close() {
System.out.println("> KeyScan.close");
if (this.isOpen()) { if (this.isOpen()) {
this.scan.close(); this.scan.close();
this.scan = null; this.scan = null;
this.isOpen = false;
} }
} }
@ -68,6 +71,7 @@ public class KeyScan extends Iterator {
* Returns true if there are more tuples, false otherwise. * Returns true if there are more tuples, false otherwise.
*/ */
public boolean hasNext() { public boolean hasNext() {
System.out.println("> KeyScan.hasNext");
if (this.isOpen()) { if (this.isOpen()) {
return this.scan.hasNext(); return this.scan.hasNext();
} }
@ -81,15 +85,15 @@ public class KeyScan extends Iterator {
* @throws IllegalStateException if no more tuples * @throws IllegalStateException if no more tuples
*/ */
public Tuple getNext() { public Tuple getNext() {
System.out.println("> KeyScan.getNext");
if (this.isOpen()) { if (this.isOpen()) {
RID rid = scan.getNext(); RID rid = scan.getNext();
byte[] data = this.file.selectRecord(rid); Tuple tuple = new Tuple(this.schema, this.file.selectRecord(rid));
Tuple tuple = new Tuple(this.getSchema(), data);
return tuple; return tuple;
} }
return null; throw new IllegalStateException("No more remaining tuples");
} }
} // public class KeyScan extends Iterator } // public class KeyScan extends Iterator

View File

@ -1,5 +1,7 @@
package relop; package relop;
import java.util.Arrays;
/** /**
* The projection operator extracts columns from a relation; unlike in * The projection operator extracts columns from a relation; unlike in
* relational algebra, this operator does NOT eliminate duplicate tuples. * relational algebra, this operator does NOT eliminate duplicate tuples.
@ -7,28 +9,22 @@ package relop;
public class Projection extends Iterator { public class Projection extends Iterator {
private Iterator iterator; private Iterator iterator;
private Integer[] fields; private Integer[] fields;
private Schema projectingSchema;
private boolean isOpen;
/** /**
* Constructs a projection, given the underlying iterator and field numbers. * Constructs a projection, given the underlying iterator and field numbers.
*/ */
public Projection(Iterator aIter, Integer... aFields) { public Projection(Iterator aIter, Integer... aFields) {
System.out.println("> new Projection");
this.iterator = aIter; this.iterator = aIter;
this.fields = aFields; this.fields = aFields;
// We need to make a deep copy instead of referencing Iterator's. // We need to make a deep copy instead of referencing Iterator's.
// this.projectingSchema = this.iterator.getSchema(); // this.projectingSchema = this.iterator.getSchema();
Schema originalSchema = this.iterator.getSchema(); this.schema = new Schema(this.fields.length);
this.projectingSchema = new Schema(fields.length); for (int i = 0; i < this.fields.length; i++) {
this.schema.initField(i, this.iterator.schema, this.fields[i]);
for (int i = 0; i < fields.length; i++) {
this.projectingSchema.initField(i, originalSchema.fieldType(fields[i]),
originalSchema.fieldLength(fields[i]), originalSchema.fieldName(fields[i]));
} }
this.isOpen = true;
} }
/** /**
@ -43,30 +39,33 @@ public class Projection extends Iterator {
* Restarts the iterator, i.e. as if it were just constructed. * Restarts the iterator, i.e. as if it were just constructed.
*/ */
public void restart() { public void restart() {
this.isOpen = false; System.out.println("> Projection.restart");
this.iterator.restart(); this.iterator.restart();
this.isOpen = true;
} }
/** /**
* Returns true if the iterator is open; false otherwise. * Returns true if the iterator is open; false otherwise.
*/ */
public boolean isOpen() { public boolean isOpen() {
return this.isOpen; System.out.println("> Projection.isOpen: " + this.iterator.isOpen());
return this.iterator.isOpen();
} }
/** /**
* Closes the iterator, releasing any resources (i.e. pinned pages). * Closes the iterator, releasing any resources (i.e. pinned pages).
*/ */
public void close() { public void close() {
this.iterator.close(); System.out.println("> Projection.close");
this.isOpen = false; if (this.isOpen()) {
this.iterator.close();
}
} }
/** /**
* Returns true if there are more tuples, false otherwise. * Returns true if there are more tuples, false otherwise.
*/ */
public boolean hasNext() { public boolean hasNext() {
System.out.println("> Projection.hasNext");
return this.iterator.hasNext(); return this.iterator.hasNext();
} }
@ -76,18 +75,19 @@ public class Projection extends Iterator {
* @throws IllegalStateException if no more tuples * @throws IllegalStateException if no more tuples
*/ */
public Tuple getNext() { public Tuple getNext() {
if (this.iterator.hasNext()) { System.out.println("> Projection.getNext");
// if (this.iterator.hasNext()) {
Tuple original = this.iterator.getNext(); Tuple original = this.iterator.getNext();
Tuple projecting = new Tuple(this.projectingSchema); Tuple projecting = new Tuple(this.schema);
for (int i = 0; i < this.fields.length; i++) { for (int i = 0; i < this.fields.length; i++) {
projecting.setField(i, original.getField(this.fields[i])); projecting.setField(i, original.getField(this.fields[i]));
} }
return projecting; return projecting;
} // }
return new Tuple(new Schema(1)); // throw new IllegalStateException("No remaining tuples");
} }
} // public class Projection extends Iterator } // public class Projection extends Iterator

View File

@ -50,8 +50,10 @@ public class Selection extends Iterator {
* Closes the iterator, releasing any resources (i.e. pinned pages). * Closes the iterator, releasing any resources (i.e. pinned pages).
*/ */
public void close() { public void close() {
this.iterator.close(); if (this.isOpen()) {
this.isOpen = false; this.iterator.close();
this.isOpen = false;
}
} }
/** /**

View File

@ -43,298 +43,303 @@ import java.util.HashMap;
*/ */
public class ROTest extends TestDriver { public class ROTest extends TestDriver {
private static ROTest rot; private static ROTest rot;
/** The display name of the test suite. */ /** The display name of the test suite. */
private static final String TEST_NAME = "relational operator tests"; private static final String TEST_NAME = "relational operator tests";
/** Size of tables in test3. */ /** Size of tables in test3. */
private static final int SUPER_SIZE = 2000; private static final int SUPER_SIZE = 2000;
private Scanner in; private Scanner in;
/** Drivers schema/table/index */ /** Drivers schema/table/index */
private static Schema s_drivers; private static Schema s_drivers;
private static HeapFile f_drivers; private static HeapFile f_drivers;
private static HashIndex idx_drivers; private static HashIndex idx_drivers;
/** Rides schema/table/index */ /** Rides schema/table/index */
private static Schema s_rides; private static Schema s_rides;
private static HeapFile f_rides; private static HeapFile f_rides;
private static HashIndex idx_rides; private static HashIndex idx_rides;
/** Groups schema/table/index */ /** Groups schema/table/index */
private static Schema s_groups; private static Schema s_groups;
private static HeapFile f_groups; private static HeapFile f_groups;
private static HashIndex idx_groups; private static HashIndex idx_groups;
/** Expected result strings for test cases */ /** Expected result strings for test cases */
private static HashMap<String, String> results; private static HashMap<String, String> results;
protected void execute_and_compare(String testDesc, String id, Iterator it) protected void execute_and_compare(String testDesc, String id, Iterator it) {
{ it.execute();
it.execute(); it.close();
it.close(); String[] sol = results.get(id).split("|");
String[] sol = results.get(id).split("|"); Arrays.sort(sol);
Arrays.sort(sol); String[] res = it.getResult().split("|");
String[] res = it.getResult().split("|"); Arrays.sort(res);
Arrays.sort(res); assertTrue("FAILURE: " + testDesc + " output did not match expected result, should be " + results.get(id),
assertTrue("FAILURE: " + testDesc + " output did not match expected result, should be " + results.get(id), Arrays.equals(sol, res)); Arrays.equals(sol, res));
} }
public static void main(String[] args) { public static void main(String[] args) {
Result result = JUnitCore.runClasses(tests.ROTest.class); Result result = JUnitCore.runClasses(tests.ROTest.class);
for (Failure failure : result.getFailures()) { for (Failure failure : result.getFailures()) {
System.out.println(failure.toString()); System.out.println(failure.toString());
} }
System.out.println(result.wasSuccessful()); System.out.println(result.wasSuccessful());
} }
@BeforeClass @BeforeClass
public static void setupDB() { public static void setupDB() {
// create a clean Minibase instance // create a clean Minibase instance
rot = new ROTest(); rot = new ROTest();
rot.create_minibase(); rot.create_minibase();
results = new HashMap<String, String>(); results = new HashMap<String, String>();
try try {
{ Scanner in = new Scanner(new File("solution.txt"));
Scanner in = new Scanner(new File("solution.txt")); String line;
String line; String[] res;
String[] res; while (in.hasNextLine()) {
while(in.hasNextLine()) line = in.nextLine();
{ res = line.split("=");
line = in.nextLine(); results.put(res[0], res[1]);
res = line.split("=");
results.put(res[0],res[1]);
} }
} catch (IOException e) } catch (IOException e) {
{ e.printStackTrace();
e.printStackTrace(); System.exit(1);
System.exit(1); }
}
Tuple d_tuple, r_tuple, g_tuple; Tuple d_tuple, r_tuple, g_tuple;
// initialize schema for the "Drivers" table // initialize schema for the "Drivers" table
s_drivers = new Schema(5); s_drivers = new Schema(5);
s_drivers.initField(0, AttrType.INTEGER, 4, "DriverId"); s_drivers.initField(0, AttrType.INTEGER, 4, "DriverId");
s_drivers.initField(1, AttrType.STRING, 20, "FirstName"); s_drivers.initField(1, AttrType.STRING, 20, "FirstName");
s_drivers.initField(2, AttrType.STRING, 20, "LastName"); s_drivers.initField(2, AttrType.STRING, 20, "LastName");
s_drivers.initField(3, AttrType.FLOAT, 4, "Age"); s_drivers.initField(3, AttrType.FLOAT, 4, "Age");
s_drivers.initField(4, AttrType.INTEGER, 4, "NumSeats"); s_drivers.initField(4, AttrType.INTEGER, 4, "NumSeats");
//Create and populate "Drivers" relation; // Create and populate "Drivers" relation;
f_drivers = new HeapFile("drivers"); f_drivers = new HeapFile("drivers");
idx_drivers = new HashIndex("drivers_idx"); idx_drivers = new HashIndex("drivers_idx");
d_tuple = new Tuple(s_drivers); d_tuple = new Tuple(s_drivers);
d_tuple.setAllFields(1, "Ahmed", "Elmagarmid", 25F, 5); d_tuple.setAllFields(1, "Ahmed", "Elmagarmid", 25F, 5);
idx_drivers.insertEntry(new SearchKey(25F), f_drivers.insertRecord(d_tuple.getData())); idx_drivers.insertEntry(new SearchKey(25F), f_drivers.insertRecord(d_tuple.getData()));
d_tuple.setAllFields(2, "Walid", "Aref", 20F, 13); d_tuple.setAllFields(2, "Walid", "Aref", 20F, 13);
idx_drivers.insertEntry(new SearchKey(20F), f_drivers.insertRecord(d_tuple.getData())); idx_drivers.insertEntry(new SearchKey(20F), f_drivers.insertRecord(d_tuple.getData()));
d_tuple.setAllFields(3, "Christopher", "Clifton", 18F, 4); d_tuple.setAllFields(3, "Christopher", "Clifton", 18F, 4);
idx_drivers.insertEntry(new SearchKey(18F), f_drivers.insertRecord(d_tuple.getData())); idx_drivers.insertEntry(new SearchKey(18F), f_drivers.insertRecord(d_tuple.getData()));
d_tuple.setAllFields(4, "Sunil", "Prabhakar", 22F, 7); d_tuple.setAllFields(4, "Sunil", "Prabhakar", 22F, 7);
idx_drivers.insertEntry(new SearchKey(22F), f_drivers.insertRecord(d_tuple.getData())); idx_drivers.insertEntry(new SearchKey(22F), f_drivers.insertRecord(d_tuple.getData()));
d_tuple.setAllFields(5, "Elisa", "Bertino", 26F, 5); d_tuple.setAllFields(5, "Elisa", "Bertino", 26F, 5);
idx_drivers.insertEntry(new SearchKey(26F), f_drivers.insertRecord(d_tuple.getData())); idx_drivers.insertEntry(new SearchKey(26F), f_drivers.insertRecord(d_tuple.getData()));
d_tuple.setAllFields(6, "Susanne", "Hambrusch", 23F, 3); d_tuple.setAllFields(6, "Susanne", "Hambrusch", 23F, 3);
idx_drivers.insertEntry(new SearchKey(23F), f_drivers.insertRecord(d_tuple.getData())); idx_drivers.insertEntry(new SearchKey(23F), f_drivers.insertRecord(d_tuple.getData()));
d_tuple.setAllFields(7, "David", "Eberts", 24F, 8); d_tuple.setAllFields(7, "David", "Eberts", 24F, 8);
idx_drivers.insertEntry(new SearchKey(24F), f_drivers.insertRecord(d_tuple.getData())); idx_drivers.insertEntry(new SearchKey(24F), f_drivers.insertRecord(d_tuple.getData()));
d_tuple.setAllFields(8, "Arif", "Ghafoor", 20F, 5); d_tuple.setAllFields(8, "Arif", "Ghafoor", 20F, 5);
idx_drivers.insertEntry(new SearchKey(20F), f_drivers.insertRecord(d_tuple.getData())); idx_drivers.insertEntry(new SearchKey(20F), f_drivers.insertRecord(d_tuple.getData()));
d_tuple.setAllFields(9, "Jeff", "Vitter", 19F, 10); d_tuple.setAllFields(9, "Jeff", "Vitter", 19F, 10);
idx_drivers.insertEntry(new SearchKey(19F), f_drivers.insertRecord(d_tuple.getData())); idx_drivers.insertEntry(new SearchKey(19F), f_drivers.insertRecord(d_tuple.getData()));
// initialize schema for the "Rides" table // initialize schema for the "Rides" table
s_rides = new Schema(4); s_rides = new Schema(4);
s_rides.initField(0, AttrType.INTEGER, 4, "DriverId"); s_rides.initField(0, AttrType.INTEGER, 4, "DriverId");
s_rides.initField(1, AttrType.INTEGER, 4, "GroupId"); s_rides.initField(1, AttrType.INTEGER, 4, "GroupId");
s_rides.initField(2, AttrType.STRING, 10, "FromDate"); s_rides.initField(2, AttrType.STRING, 10, "FromDate");
s_rides.initField(3, AttrType.STRING, 10, "ToDate"); s_rides.initField(3, AttrType.STRING, 10, "ToDate");
// Create and populate "Rides" table // Create and populate "Rides" table
f_rides = new HeapFile("rides"); f_rides = new HeapFile("rides");
idx_rides = new HashIndex("rides_idx"); idx_rides = new HashIndex("rides_idx");
r_tuple = new Tuple(s_rides); r_tuple = new Tuple(s_rides);
r_tuple.setAllFields(3, 5, "2/10/2006", "2/13/2006"); r_tuple.setAllFields(3, 5, "2/10/2006", "2/13/2006");
idx_rides.insertEntry(new SearchKey(5), f_rides.insertRecord(r_tuple.getData())); idx_rides.insertEntry(new SearchKey(5), f_rides.insertRecord(r_tuple.getData()));
r_tuple.setAllFields(1, 2, "2/12/2006", "2/14/2006"); r_tuple.setAllFields(1, 2, "2/12/2006", "2/14/2006");
idx_rides.insertEntry(new SearchKey(2), f_rides.insertRecord(r_tuple.getData())); idx_rides.insertEntry(new SearchKey(2), f_rides.insertRecord(r_tuple.getData()));
r_tuple.setAllFields(9, 1, "2/15/2006", "2/15/2006"); r_tuple.setAllFields(9, 1, "2/15/2006", "2/15/2006");
idx_rides.insertEntry(new SearchKey(1), f_rides.insertRecord(r_tuple.getData())); idx_rides.insertEntry(new SearchKey(1), f_rides.insertRecord(r_tuple.getData()));
r_tuple.setAllFields(5, 7, "2/14/2006", "2/18/2006"); r_tuple.setAllFields(5, 7, "2/14/2006", "2/18/2006");
idx_rides.insertEntry(new SearchKey(7), f_rides.insertRecord(r_tuple.getData())); idx_rides.insertEntry(new SearchKey(7), f_rides.insertRecord(r_tuple.getData()));
r_tuple.setAllFields(1, 3, "2/15/2006", "2/16/2006"); r_tuple.setAllFields(1, 3, "2/15/2006", "2/16/2006");
idx_rides.insertEntry(new SearchKey(3), f_rides.insertRecord(r_tuple.getData())); idx_rides.insertEntry(new SearchKey(3), f_rides.insertRecord(r_tuple.getData()));
r_tuple.setAllFields(2, 6, "2/17/2006", "2/20/2006"); r_tuple.setAllFields(2, 6, "2/17/2006", "2/20/2006");
idx_rides.insertEntry(new SearchKey(6), f_rides.insertRecord(r_tuple.getData())); idx_rides.insertEntry(new SearchKey(6), f_rides.insertRecord(r_tuple.getData()));
r_tuple.setAllFields(3, 4, "2/18/2006", "2/19/2006"); r_tuple.setAllFields(3, 4, "2/18/2006", "2/19/2006");
idx_rides.insertEntry(new SearchKey(4), f_rides.insertRecord(r_tuple.getData())); idx_rides.insertEntry(new SearchKey(4), f_rides.insertRecord(r_tuple.getData()));
r_tuple.setAllFields(4, 1, "2/19/2006", "2/19/2006"); r_tuple.setAllFields(4, 1, "2/19/2006", "2/19/2006");
idx_rides.insertEntry(new SearchKey(1), f_rides.insertRecord(r_tuple.getData())); idx_rides.insertEntry(new SearchKey(1), f_rides.insertRecord(r_tuple.getData()));
r_tuple.setAllFields(2, 7, "2/18/2006", "2/23/2006"); r_tuple.setAllFields(2, 7, "2/18/2006", "2/23/2006");
idx_rides.insertEntry(new SearchKey(7), f_rides.insertRecord(r_tuple.getData())); idx_rides.insertEntry(new SearchKey(7), f_rides.insertRecord(r_tuple.getData()));
r_tuple.setAllFields(8, 5, "2/20/2006", "2/22/2006"); r_tuple.setAllFields(8, 5, "2/20/2006", "2/22/2006");
idx_rides.insertEntry(new SearchKey(5), f_rides.insertRecord(r_tuple.getData())); idx_rides.insertEntry(new SearchKey(5), f_rides.insertRecord(r_tuple.getData()));
r_tuple.setAllFields(3, 2, "2/24/2006", "2/26/2006"); r_tuple.setAllFields(3, 2, "2/24/2006", "2/26/2006");
idx_rides.insertEntry(new SearchKey(2), f_rides.insertRecord(r_tuple.getData())); idx_rides.insertEntry(new SearchKey(2), f_rides.insertRecord(r_tuple.getData()));
r_tuple.setAllFields(6, 6, "2/25/2006", "2/26/2006"); r_tuple.setAllFields(6, 6, "2/25/2006", "2/26/2006");
idx_rides.insertEntry(new SearchKey(6), f_rides.insertRecord(r_tuple.getData())); idx_rides.insertEntry(new SearchKey(6), f_rides.insertRecord(r_tuple.getData()));
// initialize schema for the "Groups" table // initialize schema for the "Groups" table
s_groups = new Schema(2); s_groups = new Schema(2);
s_groups.initField(0, AttrType.INTEGER, 4, "GroupId"); s_groups.initField(0, AttrType.INTEGER, 4, "GroupId");
s_groups.initField(1, AttrType.STRING, 10, "GroupName"); s_groups.initField(1, AttrType.STRING, 10, "GroupName");
// Create and populate "Groups" table // Create and populate "Groups" table
f_groups = new HeapFile("groups"); f_groups = new HeapFile("groups");
idx_groups = new HashIndex("groups_idx"); idx_groups = new HashIndex("groups_idx");
g_tuple = new Tuple(s_groups); g_tuple = new Tuple(s_groups);
for(int i = 1; i <= 7; i++) for (int i = 1; i <= 7; i++) {
{ g_tuple.setAllFields(i, "Purdue" + i);
g_tuple.setAllFields(i, "Purdue" + i); idx_groups.insertEntry(new SearchKey(i), f_groups.insertRecord(g_tuple.getData()));
idx_groups.insertEntry(new SearchKey(i), f_groups.insertRecord(g_tuple.getData())); }
}
System.out.println(); System.out.println();
} }
@AfterClass @AfterClass
public static void tearDownDB() public static void tearDownDB() {
{ idx_drivers.deleteFile();
idx_drivers.deleteFile(); idx_rides.deleteFile();
idx_rides.deleteFile(); idx_groups.deleteFile();
idx_groups.deleteFile(); f_rides.deleteFile();
f_rides.deleteFile(); f_drivers.deleteFile();
f_drivers.deleteFile(); f_groups.deleteFile();
f_groups.deleteFile(); rot.delete_minibase();
rot.delete_minibase(); }
}
@Test // @Test
public void testFileScan() { // public void testFileScan() {
//Scan drivers table // // Scan drivers table
Iterator fscan = new FileScan(s_drivers, f_drivers); // Iterator fscan = new FileScan(s_drivers, f_drivers);
execute_and_compare("Filescan", "filescan", fscan); // execute_and_compare("Filescan", "filescan", fscan);
} // }
//
@Test // @Test
public void testIndexScan() { // public void testIndexScan() {
//Scan drivers index // // Scan drivers index
Iterator idxscan = new IndexScan(s_drivers, idx_drivers, f_drivers); // Iterator idxscan = new IndexScan(s_drivers, idx_drivers, f_drivers);
execute_and_compare("IndexScan", "idxscan", idxscan); // execute_and_compare("IndexScan", "idxscan", idxscan);
} // }
//
@Test // @Test
public void testKeyScan() { // public void testKeyScan() {
//Scan drivers index for key 20f // // Scan drivers index for key 20f
Iterator keyscan = new KeyScan(s_drivers, idx_drivers, new SearchKey(20f), f_drivers); // Iterator keyscan = new KeyScan(s_drivers, idx_drivers, new SearchKey(20f), f_drivers);
execute_and_compare("KeyScan", "keyscan", keyscan); // execute_and_compare("KeyScan", "keyscan", keyscan);
} // }
//
@Test // @Test
public void testSelection() { // public void testSelection() {
//Selection drivers with age > 20 // // Selection drivers with age > 20
Iterator selection = new Selection(new FileScan(s_drivers, f_drivers), // Iterator selection = new Selection(new FileScan(s_drivers, f_drivers),
new Predicate(AttrOperator.GT, AttrType.COLNAME, "age", AttrType.FLOAT, 20F)); // new Predicate(AttrOperator.GT, AttrType.COLNAME, "age", AttrType.FLOAT, 20F));
execute_and_compare("Selection", "selection", selection); // execute_and_compare("Selection", "selection", selection);
} // }
//
@Test // @Test
public void testSelectionMultiplePredicates() { // public void testSelectionMultiplePredicates() {
Iterator selection_preds = new Selection(new FileScan(s_drivers, f_drivers), // Iterator selection_preds = new Selection(new FileScan(s_drivers, f_drivers),
new Predicate(AttrOperator.GT, AttrType.COLNAME, "age", AttrType.FLOAT, 23F), // new Predicate(AttrOperator.GT, AttrType.COLNAME, "age", AttrType.FLOAT, 23F),
new Predicate(AttrOperator.LT, AttrType.COLNAME, "age", AttrType.FLOAT, 19F)); // new Predicate(AttrOperator.LT, AttrType.COLNAME, "age", AttrType.FLOAT, 19F));
execute_and_compare("Selection Multipled Predicates", "selection_preds", selection_preds); // execute_and_compare("Selection Multipled Predicates", "selection_preds", selection_preds);
} // }
//
@Test // @Test
public void testProjection() { // public void testProjection() {
//Projection on Drivers: {FirstName, NumSeats} // // Projection on Drivers: {FirstName, NumSeats}
Iterator projection = new Projection(new FileScan(s_drivers, f_drivers), s_drivers.fieldNumber("FirstName"), s_drivers.fieldNumber("NumSeats")); // Iterator projection = new Projection(new FileScan(s_drivers, f_drivers), s_drivers.fieldNumber("FirstName"),
execute_and_compare("Projection", "projection", projection); // s_drivers.fieldNumber("NumSeats"));
} // execute_and_compare("Projection", "projection", projection);
// }
@Test @Test
public void testHashJoin() { public void testHashJoin() {
//HashJoin on Drivers X Rides on DriverID // HashJoin on Drivers X Rides on DriverID
Iterator hashjoin = new HashJoin(new FileScan(s_drivers, f_drivers), Iterator hashjoin = new HashJoin(new FileScan(s_drivers, f_drivers), new FileScan(s_rides, f_rides), 0, 0);
new FileScan(s_rides, f_rides), 0, 0);
execute_and_compare("Hash Join", "hashjoin", hashjoin); execute_and_compare("Hash Join", "hashjoin", hashjoin);
} }
@Test @Test
public void testSelectionPipelining() { public void testSelectionPipelining() {
//Test all possible Iterator inputs to Selection // Test all possible Iterator inputs to Selection
Iterator sel_idx = new Selection(new IndexScan(s_drivers, idx_drivers, f_drivers), Iterator sel_idx = new Selection(new IndexScan(s_drivers, idx_drivers, f_drivers),
new Predicate(AttrOperator.EQ, AttrType.COLNAME, "FirstName", AttrType.STRING, "Walid")); new Predicate(AttrOperator.EQ, AttrType.COLNAME, "FirstName", AttrType.STRING, "Walid"));
execute_and_compare("Selection - Pipelining IndexScan", "sel_idx", sel_idx); execute_and_compare("Selection - Pipelining IndexScan", "sel_idx", sel_idx);
Iterator sel_key = new Selection(new KeyScan(s_drivers, idx_drivers, new SearchKey(20F), f_drivers), Iterator sel_key = new Selection(new KeyScan(s_drivers, idx_drivers, new SearchKey(20F), f_drivers),
new Predicate(AttrOperator.EQ, AttrType.COLNAME, "FirstName", AttrType.STRING, "Walid")); new Predicate(AttrOperator.EQ, AttrType.COLNAME, "FirstName", AttrType.STRING, "Walid"));
execute_and_compare("Selection - Pipelining Keyscan", "sel_key", sel_key); execute_and_compare("Selection - Pipelining Keyscan", "sel_key", sel_key);
Iterator sel_sel = new Selection(new Selection(new FileScan(s_drivers, f_drivers), Iterator sel_sel = new Selection(
new Predicate(AttrOperator.EQ, AttrType.COLNAME, "Age", AttrType.FLOAT, 20F)), new Selection(new FileScan(s_drivers, f_drivers),
new Predicate(AttrOperator.EQ, AttrType.COLNAME, "FirstName", AttrType.STRING, "Walid")); new Predicate(AttrOperator.EQ, AttrType.COLNAME, "Age", AttrType.FLOAT, 20F)),
new Predicate(AttrOperator.EQ, AttrType.COLNAME, "FirstName", AttrType.STRING, "Walid"));
execute_and_compare("Selection - Pipelining Selection", "sel_sel", sel_sel); execute_and_compare("Selection - Pipelining Selection", "sel_sel", sel_sel);
Iterator sel_proj = new Selection(new Projection( Iterator sel_proj = new Selection(
new FileScan(s_drivers, f_drivers), s_drivers.fieldNumber("DriverId"), s_drivers.fieldNumber("FirstName")), new Projection(new FileScan(s_drivers, f_drivers), s_drivers.fieldNumber("DriverId"),
new Predicate(AttrOperator.EQ, AttrType.COLNAME, "FirstName", AttrType.STRING, "Walid")); s_drivers.fieldNumber("FirstName")),
new Predicate(AttrOperator.EQ, AttrType.COLNAME, "FirstName", AttrType.STRING, "Walid"));
execute_and_compare("Selection - Pipelining Projection", "sel_proj", sel_proj); execute_and_compare("Selection - Pipelining Projection", "sel_proj", sel_proj);
Iterator sel_sj = new Selection(new SimpleJoin(new FileScan(s_drivers, f_drivers), new FileScan(s_rides, f_rides), Iterator sel_sj = new Selection(
new Predicate(AttrOperator.EQ, AttrType.FIELDNO, 0, AttrType.FIELDNO, 5)), new SimpleJoin(new FileScan(s_drivers, f_drivers), new FileScan(s_rides, f_rides),
new Predicate(AttrOperator.EQ, AttrType.COLNAME, "FirstName", AttrType.STRING, "Walid")); new Predicate(AttrOperator.EQ, AttrType.FIELDNO, 0, AttrType.FIELDNO, 5)),
new Predicate(AttrOperator.EQ, AttrType.COLNAME, "FirstName", AttrType.STRING, "Walid"));
execute_and_compare("Selection - Pipelining Simple Join", "sel_sj", sel_sj); execute_and_compare("Selection - Pipelining Simple Join", "sel_sj", sel_sj);
Iterator sel_hj = new Selection(new HashJoin(new FileScan(s_drivers, f_drivers), new FileScan(s_rides, f_rides),0,0), // Iterator sel_hj = new Selection(
new Predicate(AttrOperator.EQ, AttrType.COLNAME, "FirstName", AttrType.STRING, "Walid")); // new HashJoin(new FileScan(s_drivers, f_drivers), new FileScan(s_rides, f_rides), 0, 0),
execute_and_compare("Selection - Pipelining Hash Join", "sel_jh", sel_hj); // new Predicate(AttrOperator.EQ, AttrType.COLNAME, "FirstName", AttrType.STRING, "Walid"));
// execute_and_compare("Selection - Pipelining Hash Join", "sel_jh", sel_hj);
} }
@Test @Test
public void testProjectionPipelining() { public void testProjectionPipelining() {
//Test all possible Iterator inputs to HashJoin // Test all possible Iterator inputs to HashJoin
Iterator proj_idx = new Projection(new IndexScan(s_drivers, idx_drivers, f_drivers), // Iterator proj_idx = new Projection(new IndexScan(s_drivers, idx_drivers, f_drivers),
s_drivers.fieldNumber("DriverId"), s_drivers.fieldNumber("Age")); // s_drivers.fieldNumber("DriverId"), s_drivers.fieldNumber("Age"));
execute_and_compare("Projection - Pipelining IndexScan", "proj_idx", proj_idx); // execute_and_compare("Projection - Pipelining IndexScan", "proj_idx", proj_idx);
Iterator proj_key = new Projection(new KeyScan(s_drivers, idx_drivers, new SearchKey(20F), f_drivers), // Iterator proj_key = new Projection(new KeyScan(s_drivers, idx_drivers, new SearchKey(20F), f_drivers),
s_drivers.fieldNumber("DriverId"), s_drivers.fieldNumber("Age")); // s_drivers.fieldNumber("DriverId"), s_drivers.fieldNumber("Age"));
execute_and_compare("Projection - Pipelining KeyScan", "proj_key", proj_key); // execute_and_compare("Projection - Pipelining KeyScan", "proj_key", proj_key);
Iterator proj_sel = new Projection(new Selection(new FileScan(s_drivers, f_drivers), // Iterator proj_sel = new Projection(
new Predicate(AttrOperator.EQ, AttrType.COLNAME, "Age", AttrType.FLOAT, 20F)), // new Selection(new FileScan(s_drivers, f_drivers),
s_drivers.fieldNumber("DriverId"), s_drivers.fieldNumber("Age")); // new Predicate(AttrOperator.EQ, AttrType.COLNAME, "Age", AttrType.FLOAT, 20F)),
execute_and_compare("Projection - Pipelining Selection", "proj_sel", proj_sel); // s_drivers.fieldNumber("DriverId"), s_drivers.fieldNumber("Age"));
Iterator proj_proj = new Projection(new Projection(new FileScan(s_drivers, f_drivers), // execute_and_compare("Projection - Pipelining Selection", "proj_sel", proj_sel);
s_drivers.fieldNumber("DriverId"), s_drivers.fieldNumber("FirstName"), s_drivers.fieldNumber("Age")), // Iterator proj_proj = new Projection(new Projection(new FileScan(s_drivers, f_drivers),
0, 2); // s_drivers.fieldNumber("DriverId"), s_drivers.fieldNumber("FirstName"), s_drivers.fieldNumber("Age")), 0,
execute_and_compare("Projection - Pipelining Projection", "proj_proj", proj_proj); // 2);
Iterator proj_sj = new Projection(new SimpleJoin(new FileScan(s_drivers, f_drivers), new FileScan(s_rides, f_rides), // execute_and_compare("Projection - Pipelining Projection", "proj_proj", proj_proj);
new Predicate(AttrOperator.EQ, AttrType.FIELDNO, 0, AttrType.FIELDNO, 5)), // Iterator proj_sj = new Projection(
0, 3); // new SimpleJoin(new FileScan(s_drivers, f_drivers), new FileScan(s_rides, f_rides),
execute_and_compare("Projection - Pipelining Simple Join", "proj_sj", proj_sj); // new Predicate(AttrOperator.EQ, AttrType.FIELDNO, 0, AttrType.FIELDNO, 5)),
Iterator proj_hj = new Projection(new HashJoin(new FileScan(s_drivers, f_drivers), new FileScan(s_rides, f_rides),0,0), // 0, 3);
0, 3); // execute_and_compare("Projection - Pipelining Simple Join", "proj_sj", proj_sj);
Iterator proj_hj = new Projection(
new HashJoin(new FileScan(s_drivers, f_drivers), new FileScan(s_rides, f_rides), 0, 0), 0, 3);
execute_and_compare("Projection - Pipelining Hash Join", "proj_hj", proj_hj); execute_and_compare("Projection - Pipelining Hash Join", "proj_hj", proj_hj);
} }
@Test @Test
public void testHashjoinPipelining() { public void testHashjoinPipelining() {
//Test all possible Iterator inputs to HashJoin // Test all possible Iterator inputs to HashJoin
Iterator hj_sel_sj = new HashJoin(new Selection(new FileScan(s_drivers, f_drivers), Iterator hj_sel_sj = new HashJoin(
new Predicate(AttrOperator.GTE, AttrType.COLNAME, "DriverId", AttrType.INTEGER, 1)), new Selection(new FileScan(s_drivers, f_drivers),
new SimpleJoin(new FileScan(s_rides, f_rides), new FileScan(s_groups, f_groups), new Predicate(AttrOperator.GTE, AttrType.COLNAME, "DriverId", AttrType.INTEGER, 1)),
new Predicate(AttrOperator.EQ, AttrType.COLNAME, "GroupId", AttrType.COLNAME, "DriverId")),0,0); new SimpleJoin(new FileScan(s_rides, f_rides), new FileScan(s_groups, f_groups),
new Predicate(AttrOperator.EQ, AttrType.COLNAME, "GroupId", AttrType.COLNAME, "DriverId")),
0, 0);
execute_and_compare("Hash Join - Pipelining Selection/Simple Join", "hj_sel_sj", hj_sel_sj); execute_and_compare("Hash Join - Pipelining Selection/Simple Join", "hj_sel_sj", hj_sel_sj);
Iterator hj_proj_hj = new HashJoin(new Projection(new FileScan(s_drivers, f_drivers), s_drivers.fieldNumber("DriverId"), s_drivers.fieldNumber("Age")), Iterator hj_proj_hj = new HashJoin(
new HashJoin(new FileScan(s_rides, f_rides), new FileScan(s_groups, f_groups), 1, 0), 0, 0); new Projection(new FileScan(s_drivers, f_drivers), s_drivers.fieldNumber("DriverId"),
s_drivers.fieldNumber("Age")),
new HashJoin(new FileScan(s_rides, f_rides), new FileScan(s_groups, f_groups), 1, 0), 0, 0);
execute_and_compare("Hash Join - Pipelining Projection/Hash Join", "hj_proj_hj", hj_proj_hj); execute_and_compare("Hash Join - Pipelining Projection/Hash Join", "hj_proj_hj", hj_proj_hj);
Iterator hj_iscan_kscan = new HashJoin(new IndexScan(s_drivers, idx_drivers, f_drivers), Iterator hj_iscan_kscan = new HashJoin(new IndexScan(s_drivers, idx_drivers, f_drivers),
new KeyScan(s_rides, idx_rides, new SearchKey(1), f_rides), 0, 0); new KeyScan(s_rides, idx_rides, new SearchKey(1), f_rides), 0, 0);
execute_and_compare("Hash Join - Pipelining IndexScan/KeyScan", "hj_iscan_kscan", hj_iscan_kscan); execute_and_compare("Hash Join - Pipelining IndexScan/KeyScan", "hj_iscan_kscan", hj_iscan_kscan);
} }