Skip to content

Commit

Permalink
Migrated p/c queries from id cache to field data. Changed p/c queries…
Browse files Browse the repository at this point in the history
… to use paging data structures (BytesRefHash, BigFloatArray, BigIntArray) instead of hppc maps / sets.

Also removed the id cache.

Closes #4930
  • Loading branch information
martijnvg committed Feb 26, 2014
1 parent 5429019 commit 0e780b7
Show file tree
Hide file tree
Showing 71 changed files with 2,184 additions and 1,700 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.cache.filter.terms.IndicesTermsFilterCache;
Expand Down Expand Up @@ -149,7 +150,7 @@ protected ShardClearIndicesCacheResponse shardOperation(ShardClearIndicesCacheRe
}
if (request.idCache()) {
clearedAtLeastOne = true;
service.cache().idCache().clear();
service.fieldData().clearField(ParentFieldMapper.NAME);
}
if (!clearedAtLeastOne) {
if (request.fields() != null && request.fields().length > 0) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,15 @@ public class PageCacheRecycler extends AbstractComponent {
private final Recycler<byte[]> bytePage;
private final Recycler<int[]> intPage;
private final Recycler<long[]> longPage;
private final Recycler<float[]> floatPage;
private final Recycler<double[]> doublePage;
private final Recycler<Object[]> objectPage;

public void close() {
bytePage.close();
intPage.close();
longPage.close();
floatPage.close();
doublePage.close();
objectPage.close();
}
Expand Down Expand Up @@ -102,6 +104,7 @@ public PageCacheRecycler(Settings settings, ThreadPool threadPool) {
final double bytesWeight = componentSettings.getAsDouble(WEIGHT + ".bytes", 1d);
final double intsWeight = componentSettings.getAsDouble(WEIGHT + ".ints", 1d);
final double longsWeight = componentSettings.getAsDouble(WEIGHT + ".longs", 1d);
final double floatsWeight = componentSettings.getAsDouble(WEIGHT + ".floats", 1d);
final double doublesWeight = componentSettings.getAsDouble(WEIGHT + ".doubles", 1d);
// object pages are less useful to us so we give them a lower weight by default
final double objectsWeight = componentSettings.getAsDouble(WEIGHT + ".objects", 0.1d);
Expand Down Expand Up @@ -138,6 +141,16 @@ public void recycle(long[] value) {
// nothing to do
}
});
floatPage = build(type, maxCount(limit, BigArrays.FLOAT_PAGE_SIZE, floatsWeight, totalWeight), searchThreadPoolSize, availableProcessors, new AbstractRecyclerC<float[]>() {
@Override
public float[] newInstance(int sizing) {
return new float[BigArrays.FLOAT_PAGE_SIZE];
}
@Override
public void recycle(float[] value) {
// nothing to do
}
});
doublePage = build(type, maxCount(limit, BigArrays.DOUBLE_PAGE_SIZE, doublesWeight, totalWeight), searchThreadPoolSize, availableProcessors, new AbstractRecyclerC<double[]>() {
@Override
public double[] newInstance(int sizing) {
Expand Down Expand Up @@ -184,6 +197,14 @@ public Recycler.V<long[]> longPage(boolean clear) {
return v;
}

public Recycler.V<float[]> floatPage(boolean clear) {
final Recycler.V<float[]> v = floatPage.obtain();
if (v.isRecycled() && clear) {
Arrays.fill(v.v(), 0f);
}
return v;
}

public Recycler.V<double[]> doublePage(boolean clear) {
final Recycler.V<double[]> v = doublePage.obtain();
if (v.isRecycled() && clear) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,15 @@ protected final long[] newLongPage(int page) {
}
}

protected final float[] newFloatPage(int page) {
if (recycler != null) {
final Recycler.V<float[]> v = recycler.floatPage(clearOnResize);
return registerNewPage(v, page, BigArrays.FLOAT_PAGE_SIZE);
} else {
return new float[BigArrays.FLOAT_PAGE_SIZE];
}
}

protected final double[] newDoublePage(int page) {
if (recycler != null) {
final Recycler.V<double[]> v = recycler.doublePage(clearOnResize);
Expand Down
82 changes: 82 additions & 0 deletions src/main/java/org/elasticsearch/common/util/BigArrays.java
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ public enum BigArrays {
public static final int PAGE_SIZE_IN_BYTES = 1 << 14;
public static final int BYTE_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_BYTE;
public static final int INT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_INT;
public static final int FLOAT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_FLOAT;
public static final int LONG_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_LONG;
public static final int DOUBLE_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_DOUBLE;
public static final int OBJECT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_OBJECT_REF;
Expand Down Expand Up @@ -236,6 +237,49 @@ public void fill(long fromIndex, long toIndex, double value) {

}

private static class FloatArrayWrapper extends AbstractArray implements FloatArray {

private final float[] array;

FloatArrayWrapper(float[] array, PageCacheRecycler recycler, boolean clearOnResize) {
super(recycler, clearOnResize);
this.array = array;
}

@Override
public long size() {
return array.length;
}

@Override
public float get(long index) {
assert indexIsInt(index);
return array[(int) index];
}

@Override
public float set(long index, float value) {
assert indexIsInt(index);
float ret = array[(int) index];
array[(int) index] = value;
return ret;
}

@Override
public float increment(long index, float inc) {
assert indexIsInt(index);
return array[(int) index] += inc;
}

@Override
public void fill(long fromIndex, long toIndex, float value) {
assert indexIsInt(fromIndex);
assert indexIsInt(toIndex);
Arrays.fill(array, (int) fromIndex, (int) toIndex, value);
}

}

private static class ObjectArrayWrapper<T> extends AbstractArray implements ObjectArray<T> {

private final Object[] array;
Expand Down Expand Up @@ -419,6 +463,44 @@ public static DoubleArray grow(DoubleArray array, long minSize) {
return resize(array, newSize);
}

/** Allocate a new {@link FloatArray} of the given capacity. */
public static FloatArray newFloatArray(long size, PageCacheRecycler recycler, boolean clearOnResize) {
if (size <= FLOAT_PAGE_SIZE) {
return new FloatArrayWrapper(new float[(int) size], recycler, clearOnResize);
} else {
return new BigFloatArray(size, recycler, clearOnResize);
}
}

/** Allocate a new {@link FloatArray} of the given capacity. */
public static FloatArray newFloatArray(long size) {
return newFloatArray(size, null, true);
}

/** Resize the array to the exact provided size. */
public static FloatArray resize(FloatArray array, long size) {
if (array instanceof BigFloatArray) {
((BigFloatArray) array).resize(size);
return array;
} else {
AbstractArray arr = (AbstractArray) array;
final FloatArray newArray = newFloatArray(size, arr.recycler, arr.clearOnResize);
for (long i = 0, end = Math.min(size, array.size()); i < end; ++i) {
newArray.set(i, array.get(i));
}
return newArray;
}
}

/** Grow an array to a size that is larger than <code>minSize</code>, preserving content, and potentially reusing part of the provided array. */
public static FloatArray grow(FloatArray array, long minSize) {
if (minSize <= array.size()) {
return array;
}
final long newSize = overSize(minSize, FLOAT_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_FLOAT);
return resize(array, newSize);
}

/** Allocate a new {@link ObjectArray} of the given capacity. */
public static <T> ObjectArray<T> newObjectArray(long size, PageCacheRecycler recycler) {
if (size <= OBJECT_PAGE_SIZE) {
Expand Down
109 changes: 109 additions & 0 deletions src/main/java/org/elasticsearch/common/util/BigFloatArray.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.common.util;

import com.google.common.base.Preconditions;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.cache.recycler.PageCacheRecycler;

import java.util.Arrays;

import static org.elasticsearch.common.util.BigArrays.FLOAT_PAGE_SIZE;

/**
* Float array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of
* configurable length.
*/
final class BigFloatArray extends AbstractBigArray implements FloatArray {

private float[][] pages;

/** Constructor. */
public BigFloatArray(long size, PageCacheRecycler recycler, boolean clearOnResize) {
super(FLOAT_PAGE_SIZE, recycler, clearOnResize);
this.size = size;
pages = new float[numPages(size)][];
for (int i = 0; i < pages.length; ++i) {
pages[i] = newFloatPage(i);
}
}

@Override
public float set(long index, float value) {
final int pageIndex = pageIndex(index);
final int indexInPage = indexInPage(index);
final float[] page = pages[pageIndex];
final float ret = page[indexInPage];
page[indexInPage] = value;
return ret;
}

@Override
public float increment(long index, float inc) {
final int pageIndex = pageIndex(index);
final int indexInPage = indexInPage(index);
return pages[pageIndex][indexInPage] += inc;
}

public float get(long index) {
final int pageIndex = pageIndex(index);
final int indexInPage = indexInPage(index);
return pages[pageIndex][indexInPage];
}

@Override
protected int numBytesPerElement() {
return RamUsageEstimator.NUM_BYTES_FLOAT;
}

/** Change the size of this array. Content between indexes <code>0</code> and <code>min(size(), newSize)</code> will be preserved. */
public void resize(long newSize) {
final int numPages = numPages(newSize);
if (numPages > pages.length) {
pages = Arrays.copyOf(pages, ArrayUtil.oversize(numPages, RamUsageEstimator.NUM_BYTES_OBJECT_REF));
}
for (int i = numPages - 1; i >= 0 && pages[i] == null; --i) {
pages[i] = newFloatPage(i);
}
for (int i = numPages; i < pages.length && pages[i] != null; ++i) {
pages[i] = null;
releasePage(i);
}
this.size = newSize;
}

@Override
public void fill(long fromIndex, long toIndex, float value) {
Preconditions.checkArgument(fromIndex <= toIndex);
final int fromPage = pageIndex(fromIndex);
final int toPage = pageIndex(toIndex - 1);
if (fromPage == toPage) {
Arrays.fill(pages[fromPage], indexInPage(fromIndex), indexInPage(toIndex - 1) + 1, value);
} else {
Arrays.fill(pages[fromPage], indexInPage(fromIndex), pages[fromPage].length, value);
for (int i = fromPage + 1; i < toPage; ++i) {
Arrays.fill(pages[i], value);
}
Arrays.fill(pages[toPage], 0, indexInPage(toIndex - 1) + 1, value);
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -16,35 +16,32 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.cache.id;

import org.elasticsearch.common.bytes.HashedBytesArray;
package org.elasticsearch.common.util;

/**
*
* Abstraction of an array of double values.
*/
public interface IdReaderTypeCache {
public interface FloatArray extends BigArray {

/**
* @param docId The Lucene docId of the child document to return the parent _uid for.
* @return The parent _uid for the specified docId (which is a child document)
* Get an element given its index.
*/
HashedBytesArray parentIdByDoc(int docId);
public abstract float get(long index);

/**
* @param uid The uid of the document to return the lucene docId for
* @return The lucene docId for the specified uid
* Set a value at the given index and return the previous value.
*/
int docById(HashedBytesArray uid);
public abstract float set(long index, float value);

/**
* @param docId The lucene docId of the document to return _uid for
* @return The _uid of the specified docId
* Increment value at the given index by <code>inc</code> and return the value.
*/
HashedBytesArray idByDoc(int docId);
public abstract float increment(long index, float inc);

/**
* @return The size in bytes for this particular instance
* Fill slots between <code>fromIndex</code> inclusive to <code>toIndex</code> exclusive with <code>value</code>.
*/
long sizeInBytes();
public abstract void fill(long fromIndex, long toIndex, float value);

}
Loading

0 comments on commit 0e780b7

Please sign in to comment.