Added bloom filter implementation. Additional extensive testing is still needed.

git-svn-id: https://hyracks.googlecode.com/svn/branches/hyracks_lsm_tree_bloom_filter@2699 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks-storage-am-bloomfilter/pom.xml b/hyracks-storage-am-bloomfilter/pom.xml
new file mode 100644
index 0000000..dab96f9
--- /dev/null
+++ b/hyracks-storage-am-bloomfilter/pom.xml
@@ -0,0 +1,42 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-storage-am-bloomfilter</artifactId>
+  <version>0.2.2-SNAPSHOT</version>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.2-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.6</source>
+          <target>1.6</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-common</artifactId>
+  		<version>0.2.2-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>  	
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.1</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>  	  		
+  </dependencies>
+</project>
diff --git a/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/api/IFilter.java b/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/api/IFilter.java
new file mode 100644
index 0000000..04c9f0b
--- /dev/null
+++ b/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/api/IFilter.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.bloomfilter.api;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public interface IFilter {
+
+    public void create() throws HyracksDataException;
+
+    public void add(ITupleReference tuple);
+
+    public boolean contains(ITupleReference tuple);
+
+    public void destroy() throws HyracksDataException;
+
+    public void activate() throws HyracksDataException;
+
+    public void deactivate() throws HyracksDataException;
+
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomFilter.java b/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomFilter.java
new file mode 100644
index 0000000..217269f
--- /dev/null
+++ b/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomFilter.java
@@ -0,0 +1,203 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.bloomfilter.impls;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.bloomfilter.api.IFilter;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+
+public class BloomFilter implements IFilter {
+
+    private final static int METADATA_PAGE_ID = 0;
+    private final static int NUM_PAGES_OFFSET = 0; // 0
+    private final static int NUM_HASHES_USED_OFFSET = NUM_PAGES_OFFSET + 8; // 8
+
+    private final static int NUM_BITS_PER_ELEMENT = 10;
+
+    private final IBufferCache bufferCache;
+    private final IFileMapProvider fileMapProvider;
+    private final FileReference file;
+    private final long numElements;
+    private final int[] keyFields;
+    private final int numHashes;
+    private int fileId = -1;
+    private boolean isActivated = false;
+    private final long numPages;
+    private final ArrayList<ICachedPage> bloomFilterPages = new ArrayList<ICachedPage>();
+    private final static long SEED = 0L;
+    private final int numBitsPerPage;
+
+    public BloomFilter(IBufferCache bufferCache, IFileMapProvider fileMapProvider, FileReference file,
+            long numElements, int[] keyFields, int numHashes) {
+        this.bufferCache = bufferCache;
+        this.fileMapProvider = fileMapProvider;
+        this.file = file;
+        this.numElements = numElements;
+        this.keyFields = keyFields;
+        this.numHashes = numHashes;
+        numBitsPerPage = bufferCache.getPageSize() * Byte.SIZE;
+        numPages = (long) Math.ceil((numElements * NUM_BITS_PER_ELEMENT) / (double) numBitsPerPage);
+    }
+
+    public int getFileId() {
+        return fileId;
+    }
+
+    public FileReference getFileReference() {
+        return file;
+    }
+
+    @Override
+    public void add(ITupleReference tuple) {
+        long[] hashes = new long[2];
+        MurmurHash128Bit.hash3_x64_128(tuple, keyFields, SEED, hashes);
+        for (int i = 0; i < numHashes; ++i) {
+            long hash = Math.abs((hashes[0] + (long) i * hashes[1]) % numElements);
+
+            ByteBuffer buffer = bloomFilterPages.get((int) (hash / numBitsPerPage)).getBuffer();
+            int byteIndex = (int) (hash % numBitsPerPage) / Byte.SIZE;
+            byte b = buffer.get(byteIndex);
+            int bitIndex = (int) (hash % numBitsPerPage) % Byte.SIZE;
+            b = (byte) (b | (1 << bitIndex));
+
+            buffer.put(byteIndex, b);
+        }
+    }
+
+    @Override
+    public boolean contains(ITupleReference tuple) {
+        long[] hashes = new long[2];
+        MurmurHash128Bit.hash3_x64_128(tuple, keyFields, SEED, hashes);
+        for (int i = 0; i < numHashes; ++i) {
+            long hash = Math.abs((hashes[0] + (long) i * hashes[1]) % numElements);
+
+            ByteBuffer buffer = bloomFilterPages.get((int) (hash / numBitsPerPage)).getBuffer();
+            int byteIndex = (int) (hash % numBitsPerPage) / Byte.SIZE;
+            byte b = buffer.get(byteIndex);
+            int bitIndex = (int) (hash % numBitsPerPage) % Byte.SIZE;
+
+            if (!((b & (1L << bitIndex)) != 0)) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    @Override
+    public synchronized void create() throws HyracksDataException {
+        if (isActivated) {
+            throw new HyracksDataException("Failed to create the bloom filter since it is activated.");
+        }
+
+        boolean fileIsMapped = false;
+        synchronized (fileMapProvider) {
+            fileIsMapped = fileMapProvider.isMapped(file);
+            if (!fileIsMapped) {
+                bufferCache.createFile(file);
+            }
+            fileId = fileMapProvider.lookupFileId(file);
+            try {
+                // Also creates the file if it doesn't exist yet.
+                bufferCache.openFile(fileId);
+            } catch (HyracksDataException e) {
+                // Revert state of buffer cache since file failed to open.
+                if (!fileIsMapped) {
+                    bufferCache.deleteFile(fileId, false);
+                }
+                throw e;
+            }
+        }
+        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, METADATA_PAGE_ID), false);
+        page.acquireWriteLatch();
+        page.getBuffer().putLong(NUM_PAGES_OFFSET, numPages);
+        page.getBuffer().putInt(NUM_HASHES_USED_OFFSET, numHashes);
+        page.releaseWriteLatch();
+        bufferCache.unpin(page);
+        bufferCache.closeFile(fileId);
+    }
+
+    @Override
+    public void activate() throws HyracksDataException {
+        if (isActivated) {
+            return;
+        }
+
+        boolean fileIsMapped = false;
+        synchronized (fileMapProvider) {
+            fileIsMapped = fileMapProvider.isMapped(file);
+            if (!fileIsMapped) {
+                bufferCache.createFile(file);
+            }
+            fileId = fileMapProvider.lookupFileId(file);
+            try {
+                // Also creates the file if it doesn't exist yet.
+                bufferCache.openFile(fileId);
+            } catch (HyracksDataException e) {
+                // Revert state of buffer cache since file failed to open.
+                if (!fileIsMapped) {
+                    bufferCache.deleteFile(fileId, false);
+                }
+                throw e;
+            }
+        }
+        isActivated = true;
+
+        int currentPageId = 1;
+        while (currentPageId <= numPages) {
+            ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
+            page.acquireWriteLatch();
+            bloomFilterPages.add(page);
+            ++currentPageId;
+        }
+    }
+
+    @Override
+    public void deactivate() throws HyracksDataException {
+        if (!isActivated) {
+            return;
+        }
+
+        for (int i = 0; i < numPages; ++i) {
+            ICachedPage page = bloomFilterPages.get(i);
+            page.releaseWriteLatch();
+            bufferCache.unpin(page);
+        }
+        bufferCache.closeFile(fileId);
+        isActivated = false;
+    }
+
+    @Override
+    public void destroy() throws HyracksDataException {
+        if (isActivated) {
+            throw new HyracksDataException("Failed to destroy the bloom filter since it is activated.");
+        }
+
+        file.delete();
+        if (fileId == -1) {
+            return;
+        }
+        bufferCache.deleteFile(fileId, false);
+        fileId = -1;
+    }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/MurmurHash128Bit.java b/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/MurmurHash128Bit.java
new file mode 100644
index 0000000..25c3967
--- /dev/null
+++ b/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/MurmurHash128Bit.java
@@ -0,0 +1,252 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.bloomfilter.impls;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public class MurmurHash128Bit {
+
+    private final static int DUMMY_FIELD = 0;
+
+    public static long rotl64(long v, int n) {
+        return ((v << n) | (v >>> (64 - n)));
+    }
+
+    public static long fmix(long k) {
+        k ^= k >>> 33;
+        k *= 0xff51afd7ed558ccdL;
+        k ^= k >>> 33;
+        k *= 0xc4ceb9fe1a85ec53L;
+        k ^= k >>> 33;
+
+        return k;
+    }
+
+    public static void hash3_x64_128(ITupleReference tuple, int[] keyFields, long seed, long[] hashes) {
+        int length = 0;
+        for (int i = 0; i < keyFields.length; ++i) {
+            length += tuple.getFieldLength(keyFields[i]);
+        }
+        final int nblocks = length >> 4; // Process as 128-bit blocks.
+
+        long h1 = seed;
+        long h2 = seed;
+
+        long c1 = 0x87c37b91114253d5L;
+        long c2 = 0x4cf5ad432745937fL;
+
+        //----------
+        // body
+
+        int currentFieldIndex = 0;
+        int bytePos = 0;
+        for (int i = 0; i < nblocks; ++i) {
+
+            long k1 = 0L;
+            for (int j = 0; j < 8; ++j) {
+                k1 += (((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos] & 0xff) << (j << 3));
+                ++bytePos;
+                if (tuple.getFieldLength(keyFields[currentFieldIndex]) == bytePos) {
+                    ++currentFieldIndex;
+                    bytePos = 0;
+                }
+            }
+            long k2 = 0L;
+            for (int j = 0; j < 8; ++j) {
+                k2 += (((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos] & 0xff) << (j << 3));
+                ++bytePos;
+                if (tuple.getFieldLength(keyFields[currentFieldIndex]) == bytePos) {
+                    ++currentFieldIndex;
+                    bytePos = 0;
+                }
+            }
+
+            k1 *= c1;
+            k1 = rotl64(k1, 31);
+            k1 *= c2;
+            h1 ^= k1;
+
+            h1 = rotl64(h1, 27);
+            h1 += h2;
+            h1 = h1 * 5 + 0x52dce729;
+
+            k2 *= c2;
+            k2 = rotl64(k2, 33);
+            k2 *= c1;
+            h2 ^= k2;
+
+            h2 = rotl64(h2, 31);
+            h2 += h1;
+            h2 = h2 * 5 + 0x38495ab5;
+        }
+
+        //----------
+        // tail
+
+        long k1 = 0L;
+        long k2 = 0L;
+
+        currentFieldIndex = keyFields.length - 1;
+        bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+        switch (length & 15) {
+            case 15:
+                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]) << 48;
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+            case 14:
+                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]) << 40;
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+            case 13:
+                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]) << 32;
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+            case 12:
+                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]) << 24;
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+            case 11:
+                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]) << 16;
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+            case 10:
+                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]) << 8;
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+            case 9:
+                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]);
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+                k2 *= c2;
+                k2 = rotl64(k2, 33);
+                k2 *= c1;
+                h2 ^= k2;
+
+            case 8:
+                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]) << 56;
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+            case 7:
+                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]) << 48;
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+            case 6:
+                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]) << 40;
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+            case 5:
+                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]) << 32;
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+            case 4:
+                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]) << 24;
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+            case 3:
+                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]) << 16;
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+            case 2:
+                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]) << 8;
+                --bytePos;
+                if (bytePos == -1) {
+                    --currentFieldIndex;
+                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
+                }
+            case 1:
+                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
+                        + bytePos]);
+                k1 *= c1;
+                k1 = rotl64(k1, 31);
+                k1 *= c2;
+                h1 ^= k1;
+        };
+
+        //----------
+        // finalization
+
+        h1 ^= length;
+        h2 ^= length;
+
+        h1 += h2;
+        h2 += h1;
+
+        h1 = fmix(h1);
+        h2 = fmix(h2);
+
+        h1 += h2;
+        h2 += h1;
+
+        hashes[0] = h1;
+        hashes[1] = h2;
+    }
+
+}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/config/AccessMethodTestsConfig.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/config/AccessMethodTestsConfig.java
index fb2fb38..1c555f9 100644
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/config/AccessMethodTestsConfig.java
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/config/AccessMethodTestsConfig.java
@@ -90,7 +90,16 @@
     // Allocate a generous size to make sure we have enough elements for all tests.
     public static final int LSM_INVINDEX_SCAN_COUNT_ARRAY_SIZE = 1000000;
     public static final int LSM_INVINDEX_MULTITHREAD_NUM_OPERATIONS = 200;
-    
+
+    // Test params for BloomFilter
+    public static final int BLOOM_FILTER_NUM_TUPLES_TO_INSERT = 100;
+
+    // Mem configuration for BloomFilter.
+    public static final int BLOOM_FILTER_PAGE_SIZE = 256;
+    public static final int BLOOM_FILTER_NUM_PAGES = 1000;
+    public static final int BLOOM_FILTER_MAX_OPEN_FILES = 10;
+    public static final int BLOOM_FILTER_HYRACKS_FRAME_SIZE = 128;
+
 }
 
 /* ORIGINAL TEST PARAMETERS: DO NOT EDIT!
diff --git a/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml b/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
new file mode 100644
index 0000000..3b15677
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
@@ -0,0 +1,49 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-storage-am-bloomfilter-test</artifactId>
+  <version>0.2.2-SNAPSHOT</version>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-tests</artifactId>
+    <version>0.2.2-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.6</source>
+          <target>1.6</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.1</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-bloomfilter</artifactId>
+  		<version>0.2.2-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-test-support</artifactId>
+  		<version>0.2.2-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/BloomFilterTest.java b/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/BloomFilterTest.java
new file mode 100644
index 0000000..b9f5bf2
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/BloomFilterTest.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.bloomfilter;
+
+import java.util.ArrayList;
+import java.util.Random;
+import java.util.TreeSet;
+import java.util.logging.Level;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilter;
+import edu.uci.ics.hyracks.storage.am.bloomfilter.util.AbstractBloomFilterTest;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+public class BloomFilterTest extends AbstractBloomFilterTest {
+    private final int fieldCount = 2;
+    private final Random rnd = new Random(50);
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        super.setUp();
+    }
+
+    @Test
+    public void basicTest() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("TESTING BLOOM FILTER");
+        }
+
+        IBufferCache bufferCache = harness.getBufferCache();
+
+        long numElements = 100L;
+        int[] keyFields = { 0 };
+        int numHashes = 10;
+
+        BloomFilter bf = new BloomFilter(bufferCache, harness.getFileMapProvider(), harness.getFileReference(),
+                numElements, keyFields, numHashes);
+
+        bf.create();
+        bf.activate();
+
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+
+        // generate keys
+        int numKeys = 50;
+        int maxKey = 1000;
+        TreeSet<Integer> uniqueKeys = new TreeSet<Integer>();
+        ArrayList<Integer> keys = new ArrayList<Integer>();
+        while (uniqueKeys.size() < numKeys) {
+            int key = rnd.nextInt() % maxKey;
+            uniqueKeys.add(key);
+        }
+        for (Integer i : uniqueKeys) {
+            keys.add(i);
+        }
+
+        for (int i = 0; i < keys.size(); ++i) {
+
+            TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);
+            tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+
+            bf.add(tuple);
+
+            Assert.assertTrue(bf.contains(tuple));
+        }
+
+        bf.deactivate();
+        bf.destroy();
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java b/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java
new file mode 100644
index 0000000..4c694a0
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java
@@ -0,0 +1,303 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.bloomfilter;
+
+import java.nio.ByteBuffer;
+import java.util.Random;
+import java.util.logging.Level;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.MurmurHash128Bit;
+import edu.uci.ics.hyracks.storage.am.bloomfilter.util.AbstractBloomFilterTest;
+
+@SuppressWarnings("rawtypes")
+public class MurmurHashForITupleReferenceTest extends AbstractBloomFilterTest {
+    private final static int NUM_LONG_VARS_FOR_128_BIT_HASH = 2;
+    private final static int DUMMY_FIELD = 0;
+    private final Random rnd = new Random(50);
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        super.setUp();
+    }
+
+    @Test
+    public void murmurhashONEIntegerFieldTest() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("TESTING MURMUR HASH ONE INTEGER FIELD");
+        }
+
+        int fieldCount = 2;
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(tupleBuilder, tuple, rnd.nextInt());
+        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+
+        int keyFields[] = { 0 };
+        int length = getTupleSize(tuple, keyFields);
+
+        long actuals[] = new long[NUM_LONG_VARS_FOR_128_BIT_HASH];
+        MurmurHash128Bit.hash3_x64_128(tuple, keyFields, 0L, actuals);
+
+        ByteBuffer buffer;
+        byte[] array = new byte[length];
+        fillArrayWithData(array, keyFields, tuple, length);
+        buffer = ByteBuffer.wrap(array);
+
+        long[] expecteds = hash3_x64_128(buffer, 0, length, 0L);
+        Assert.assertArrayEquals(expecteds, actuals);
+    }
+
+    @Test
+    public void murmurhashTwoIntegerFieldsTest() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("TESTING MURMUR HASH TWO INTEGER FIELDS");
+        }
+
+        int fieldCount = 2;
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(tupleBuilder, tuple, rnd.nextInt(), rnd.nextInt());
+        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+
+        int keyFields[] = { 0, 1 };
+        int length = getTupleSize(tuple, keyFields);
+
+        long actuals[] = new long[NUM_LONG_VARS_FOR_128_BIT_HASH];
+        MurmurHash128Bit.hash3_x64_128(tuple, keyFields, 0L, actuals);
+
+        ByteBuffer buffer;
+        byte[] array = new byte[length];
+        fillArrayWithData(array, keyFields, tuple, length);
+        buffer = ByteBuffer.wrap(array);
+
+        long[] expecteds = hash3_x64_128(buffer, 0, length, 0L);
+        Assert.assertArrayEquals(expecteds, actuals);
+    }
+
+    @Test
+    public void murmurhashOneStringFieldTest() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("TESTING MURMUR HASH ONE STRING FIELD");
+        }
+
+        int fieldCount = 2;
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE };
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        String s = randomString(100, rnd);
+        TupleUtils.createTuple(tupleBuilder, tuple, fieldSerdes, s);
+
+        int keyFields[] = { 0 };
+        int length = getTupleSize(tuple, keyFields);
+
+        long actuals[] = new long[NUM_LONG_VARS_FOR_128_BIT_HASH];
+        MurmurHash128Bit.hash3_x64_128(tuple, keyFields, 0L, actuals);
+
+        byte[] array = new byte[length];
+        ByteBuffer buffer;
+        fillArrayWithData(array, keyFields, tuple, length);
+        buffer = ByteBuffer.wrap(array);
+
+        long[] expecteds = hash3_x64_128(buffer, 0, length, 0L);
+        Assert.assertArrayEquals(expecteds, actuals);
+    }
+
+    @Test
+    public void murmurhashThreeStringFieldsTest() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("TESTING MURMUR HASH THREE STRING FIELDS");
+        }
+
+        int fieldCount = 3;
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        String s1 = randomString(40, rnd);
+        String s2 = randomString(60, rnd);
+        String s3 = randomString(20, rnd);
+        TupleUtils.createTuple(tupleBuilder, tuple, fieldSerdes, s1, s2, s3);
+
+        int keyFields[] = { 2, 0, 1 };
+        int length = getTupleSize(tuple, keyFields);
+
+        long actuals[] = new long[NUM_LONG_VARS_FOR_128_BIT_HASH];
+        MurmurHash128Bit.hash3_x64_128(tuple, keyFields, 0L, actuals);
+
+        byte[] array = new byte[length];
+        ByteBuffer buffer;
+        fillArrayWithData(array, keyFields, tuple, length);
+        buffer = ByteBuffer.wrap(array);
+
+        long[] expecteds = hash3_x64_128(buffer, 0, length, 0L);
+        Assert.assertArrayEquals(expecteds, actuals);
+    }
+
+    private void fillArrayWithData(byte[] array, int[] keyFields, ITupleReference tuple, int length) {
+        int currentFieldIndex = 0;
+        int bytePos = 0;
+        for (int i = 0; i < length; ++i) {
+            array[i] = tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex]) + bytePos];
+            ++bytePos;
+            if (tuple.getFieldLength(keyFields[currentFieldIndex]) == bytePos) {
+                ++currentFieldIndex;
+                bytePos = 0;
+            }
+        }
+    }
+
+    private int getTupleSize(ITupleReference tuple, int[] keyFields) {
+        int length = 0;
+        for (int i = 0; i < keyFields.length; ++i) {
+            length += tuple.getFieldLength(keyFields[i]);
+        }
+        return length;
+    }
+
+    public static String randomString(int length, Random random) {
+        char[] chars = "abcdefghijklmnopqrstuvwxyz".toCharArray();
+        StringBuilder strBuilder = new StringBuilder();
+        for (int i = 0; i < length; ++i) {
+            char c = chars[random.nextInt(chars.length)];
+            strBuilder.append(c);
+        }
+        return strBuilder.toString();
+    }
+
+    protected static long getblock(ByteBuffer key, int offset, int index) {
+        int i_8 = index << 3;
+        int blockOffset = offset + i_8;
+        return ((long) key.get(blockOffset + 0) & 0xff) + (((long) key.get(blockOffset + 1) & 0xff) << 8)
+                + (((long) key.get(blockOffset + 2) & 0xff) << 16) + (((long) key.get(blockOffset + 3) & 0xff) << 24)
+                + (((long) key.get(blockOffset + 4) & 0xff) << 32) + (((long) key.get(blockOffset + 5) & 0xff) << 40)
+                + (((long) key.get(blockOffset + 6) & 0xff) << 48) + (((long) key.get(blockOffset + 7) & 0xff) << 56);
+    }
+
+    public static long[] hash3_x64_128(ByteBuffer key, int offset, int length, long seed) {
+        final int nblocks = length >> 4; // Process as 128-bit blocks.
+
+        long h1 = seed;
+        long h2 = seed;
+
+        long c1 = 0x87c37b91114253d5L;
+        long c2 = 0x4cf5ad432745937fL;
+
+        //----------
+        // body
+
+        for (int i = 0; i < nblocks; i++) {
+            long k1 = getblock(key, offset, i * 2 + 0);
+            long k2 = getblock(key, offset, i * 2 + 1);
+
+            k1 *= c1;
+            k1 = MurmurHash128Bit.rotl64(k1, 31);
+            k1 *= c2;
+            h1 ^= k1;
+
+            h1 = MurmurHash128Bit.rotl64(h1, 27);
+            h1 += h2;
+            h1 = h1 * 5 + 0x52dce729;
+
+            k2 *= c2;
+            k2 = MurmurHash128Bit.rotl64(k2, 33);
+            k2 *= c1;
+            h2 ^= k2;
+
+            h2 = MurmurHash128Bit.rotl64(h2, 31);
+            h2 += h1;
+            h2 = h2 * 5 + 0x38495ab5;
+        }
+
+        //----------
+        // tail
+
+        // Advance offset to the unprocessed tail of the data.
+        offset += nblocks * 16;
+
+        long k1 = 0;
+        long k2 = 0;
+
+        switch (length & 15) {
+            case 15:
+                k2 ^= ((long) key.get(offset + 14)) << 48;
+            case 14:
+                k2 ^= ((long) key.get(offset + 13)) << 40;
+            case 13:
+                k2 ^= ((long) key.get(offset + 12)) << 32;
+            case 12:
+                k2 ^= ((long) key.get(offset + 11)) << 24;
+            case 11:
+                k2 ^= ((long) key.get(offset + 10)) << 16;
+            case 10:
+                k2 ^= ((long) key.get(offset + 9)) << 8;
+            case 9:
+                k2 ^= ((long) key.get(offset + 8)) << 0;
+                k2 *= c2;
+                k2 = MurmurHash128Bit.rotl64(k2, 33);
+                k2 *= c1;
+                h2 ^= k2;
+
+            case 8:
+                k1 ^= ((long) key.get(offset + 7)) << 56;
+            case 7:
+                k1 ^= ((long) key.get(offset + 6)) << 48;
+            case 6:
+                k1 ^= ((long) key.get(offset + 5)) << 40;
+            case 5:
+                k1 ^= ((long) key.get(offset + 4)) << 32;
+            case 4:
+                k1 ^= ((long) key.get(offset + 3)) << 24;
+            case 3:
+                k1 ^= ((long) key.get(offset + 2)) << 16;
+            case 2:
+                k1 ^= ((long) key.get(offset + 1)) << 8;
+            case 1:
+                k1 ^= ((long) key.get(offset));
+                k1 *= c1;
+                k1 = MurmurHash128Bit.rotl64(k1, 31);
+                k1 *= c2;
+                h1 ^= k1;
+        };
+
+        //----------
+        // finalization
+
+        h1 ^= length;
+        h2 ^= length;
+
+        h1 += h2;
+        h2 += h1;
+
+        h1 = MurmurHash128Bit.fmix(h1);
+        h2 = MurmurHash128Bit.fmix(h2);
+
+        h1 += h2;
+        h2 += h1;
+
+        return (new long[] { h1, h2 });
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java b/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java
new file mode 100644
index 0000000..9712da9
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.bloomfilter.util;
+
+import java.util.logging.Logger;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public abstract class AbstractBloomFilterTest {
+    protected final Logger LOGGER = Logger.getLogger(BloomFilterTestHarness.class.getName());
+
+    protected final BloomFilterTestHarness harness;
+
+    public AbstractBloomFilterTest() {
+        harness = new BloomFilterTestHarness();
+    }
+
+    public AbstractBloomFilterTest(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
+        harness = new BloomFilterTestHarness(pageSize, numPages, maxOpenFiles, hyracksFrameSize);
+    }
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        harness.setUp();
+    }
+
+    @After
+    public void tearDown() throws HyracksDataException {
+        harness.tearDown();
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/util/BloomFilterTestHarness.java b/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/util/BloomFilterTestHarness.java
new file mode 100644
index 0000000..8fac122
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/util/BloomFilterTestHarness.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.bloomfilter.util;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Random;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class BloomFilterTestHarness {
+
+    private static final long RANDOM_SEED = 50;
+
+    protected final int pageSize;
+    protected final int numPages;
+    protected final int maxOpenFiles;
+    protected final int hyracksFrameSize;
+
+    protected IHyracksTaskContext ctx;
+    protected IBufferCache bufferCache;
+    protected IFileMapProvider fileMapProvider;
+    protected FileReference file;
+
+    protected final Random rnd = new Random();
+    protected final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    protected final String tmpDir = System.getProperty("java.io.tmpdir");
+    protected final String sep = System.getProperty("file.separator");
+    protected String fileName;
+
+    public BloomFilterTestHarness() {
+        this.pageSize = AccessMethodTestsConfig.BLOOM_FILTER_PAGE_SIZE;
+        this.numPages = AccessMethodTestsConfig.BLOOM_FILTER_NUM_PAGES;
+        this.maxOpenFiles = AccessMethodTestsConfig.BLOOM_FILTER_MAX_OPEN_FILES;
+        this.hyracksFrameSize = AccessMethodTestsConfig.BLOOM_FILTER_HYRACKS_FRAME_SIZE;
+    }
+
+    public BloomFilterTestHarness(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
+        this.pageSize = pageSize;
+        this.numPages = numPages;
+        this.maxOpenFiles = maxOpenFiles;
+        this.hyracksFrameSize = hyracksFrameSize;
+    }
+
+    public void setUp() throws HyracksDataException {
+        fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+        ctx = TestUtils.create(getHyracksFrameSize());
+        TestStorageManagerComponentHolder.init(pageSize, numPages, maxOpenFiles);
+        bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        fileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        file = new FileReference(new File(fileName));
+        rnd.setSeed(RANDOM_SEED);
+    }
+
+    public void tearDown() throws HyracksDataException {
+        bufferCache.close();
+        file.delete();
+    }
+
+    public IHyracksTaskContext getHyracksTaskContext() {
+        return ctx;
+    }
+
+    public IBufferCache getBufferCache() {
+        return bufferCache;
+    }
+
+    public IFileMapProvider getFileMapProvider() {
+        return fileMapProvider;
+    }
+
+    public FileReference getFileReference() {
+        return file;
+    }
+
+    public String getFileName() {
+        return fileName;
+    }
+
+    public Random getRandom() {
+        return rnd;
+    }
+
+    public int getPageSize() {
+        return pageSize;
+    }
+
+    public int getNumPages() {
+        return numPages;
+    }
+
+    public int getHyracksFrameSize() {
+        return hyracksFrameSize;
+    }
+
+    public int getMaxOpenFiles() {
+        return maxOpenFiles;
+    }
+}
diff --git a/hyracks-tests/pom.xml b/hyracks-tests/pom.xml
index b79295a..4011339 100644
--- a/hyracks-tests/pom.xml
+++ b/hyracks-tests/pom.xml
@@ -19,5 +19,6 @@
     <module>hyracks-storage-am-lsm-btree-test</module>
     <module>hyracks-storage-am-lsm-rtree-test</module>
     <module>hyracks-storage-am-lsm-invertedindex-test</module>
+    <module>hyracks-storage-am-bloomfilter-test</module>
   </modules>
 </project>
diff --git a/pom.xml b/pom.xml
index ce15c7f..9942e8e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -95,6 +95,7 @@
     <module>hyracks-cli</module>
     <module>hyracks-storage-common</module>
     <module>hyracks-storage-am-common</module>
+    <module>hyracks-storage-am-bloomfilter</module>
     <module>hyracks-storage-am-btree</module>
     <module>hyracks-storage-am-lsm-invertedindex</module>
     <module>hyracks-storage-am-lsm-common</module>