Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

(refactor) some opportunities to use diamond operator #25585

Merged
merged 2 commits into from
Aug 15, 2017
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ public void testGeoDistance() {

public void testGeoPolygon() {
// tag::geo_polygon
List<GeoPoint> points = new ArrayList<GeoPoint>(); // <1>
List<GeoPoint> points = new ArrayList<>(); // <1>
points.add(new GeoPoint(40, -70));
points.add(new GeoPoint(30, -80));
points.add(new GeoPoint(20, -90));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public static <Response> TaskListener<Response> instance() {
return (TaskListener<Response>) INSTANCE;
}

private static final LoggingTaskListener<Object> INSTANCE = new LoggingTaskListener<Object>();
private static final LoggingTaskListener<Object> INSTANCE = new LoggingTaskListener<>();

private LoggingTaskListener() {
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ public static DateTimeFormatter forFields(
if (fields == null || fields.size() == 0) {
throw new IllegalArgumentException("The fields must not be null or empty");
}
Set<DateTimeFieldType> workingFields = new HashSet<DateTimeFieldType>(fields);
Set<DateTimeFieldType> workingFields = new HashSet<>(fields);
int inputSize = workingFields.size();
boolean reducedPrec = false;
DateTimeFormatterBuilder bld = new DateTimeFormatterBuilder();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ public void testSimpleSettings() {

public void testSimpleMixedFeatures() {
int numFeatures = randomIntBetween(1, Feature.values().length);
List<Feature> features = new ArrayList<Feature>(numFeatures);
List<Feature> features = new ArrayList<>(numFeatures);
for (int i = 0; i < numFeatures; i++) {
features.add(randomFrom(Feature.values()));
}
Expand Down Expand Up @@ -156,7 +156,7 @@ public void testSimpleMixedFeatures() {

public void testEmptyMixedFeatures() {
int numFeatures = randomIntBetween(1, Feature.values().length);
List<Feature> features = new ArrayList<Feature>(numFeatures);
List<Feature> features = new ArrayList<>(numFeatures);
for (int i = 0; i < numFeatures; i++) {
features.add(randomFrom(Feature.values()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public class KeyedLockTests extends ESTestCase {
public void testIfMapEmptyAfterLotsOfAcquireAndReleases() throws InterruptedException {
ConcurrentHashMap<String, Integer> counter = new ConcurrentHashMap<>();
ConcurrentHashMap<String, AtomicInteger> safeCounter = new ConcurrentHashMap<>();
KeyedLock<String> connectionLock = new KeyedLock<String>(randomBoolean());
KeyedLock<String> connectionLock = new KeyedLock<>(randomBoolean());
String[] names = new String[randomIntBetween(1, 40)];
for (int i = 0; i < names.length; i++) {
names[i] = randomRealisticUnicodeOfLengthBetween(10, 20);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ protected void sendRequest(Connection connection, long requestId, String action,
refresh();
SearchResponse searchResponse = client().prepareSearch("index").setSize(numDocs * 2).addStoredField("_id").get();

Set<String> uniqueIds = new HashSet();
Set<String> uniqueIds = new HashSete<>();
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It looks like you have a typo here.

It's very helpful it you can get gradle to pass before raising a PR, so that these sorts of things get picked up by tooling rather than people needing to look at them.

long dupCounter = 0;
boolean found_duplicate_already = false;
for (int i = 0; i < searchResponse.getHits().getHits().length; i++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ private SyncedFlushUtil() {
*/
public static ShardsSyncedFlushResult attemptSyncedFlush(InternalTestCluster cluster, ShardId shardId) {
SyncedFlushService service = cluster.getInstance(SyncedFlushService.class);
LatchedListener<ShardsSyncedFlushResult> listener = new LatchedListener();
LatchedListener<ShardsSyncedFlushResult> listener = new LatchedListener<>();
service.attemptSyncedFlush(shardId, listener);
try {
listener.latch.await();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ protected ScriptedMetricAggregationBuilder createTestAggregatorBuilder() {
factory.reduceScript(randomScript("reduceScript"));
}
if (randomBoolean()) {
Map<String, Object> params = new HashMap<String, Object>();
Map<String, Object> params = new HashMap<>();
params.put("foo", "bar");
factory.params(params);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ public void testSingleValueAggDerivative() throws Exception {
Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)deriv).getProperty("_count");
Object[] propertiesSumCounts = (Object[]) ((InternalAggregation)deriv).getProperty("sum.value");

List<Bucket> buckets = new ArrayList<Bucket>(deriv.getBuckets());
List<Bucket> buckets = new ArrayList<>(deriv.getBuckets());
Long expectedSumPreviousBucket = Long.MIN_VALUE; // start value, gets
// overwritten
for (int i = 0; i < numValueBuckets; ++i) {
Expand Down Expand Up @@ -311,7 +311,7 @@ public void testMultiValueAggDerivative() throws Exception {
Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)deriv).getProperty("_count");
Object[] propertiesSumCounts = (Object[]) ((InternalAggregation)deriv).getProperty("stats.sum");

List<Bucket> buckets = new ArrayList<Bucket>(deriv.getBuckets());
List<Bucket> buckets = new ArrayList<>(deriv.getBuckets());
Long expectedSumPreviousBucket = Long.MIN_VALUE; // start value, gets
// overwritten
for (int i = 0; i < numValueBuckets; ++i) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ public void testExists() throws Exception {
// empty doc
emptyMap()
};
List<IndexRequestBuilder> reqs = new ArrayList<IndexRequestBuilder>();
List<IndexRequestBuilder> reqs = new ArrayList<>();
for (Map<String, Object> source : sources) {
reqs.add(client().prepareIndex("idx", "type").setSource(source));
}
Expand All @@ -106,7 +106,7 @@ public void testExists() throws Exception {
// confuse the exists/missing parser at query time
indexRandom(true, false, reqs);

final Map<String, Integer> expected = new LinkedHashMap<String, Integer>();
final Map<String, Integer> expected = new LinkedHashMap<>();
expected.put("foo", 1);
expected.put("f*", 1);
expected.put("bar", 2);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ public void testContextFilteringWorksWithUTF8Categories() throws Exception {

public void testSingleContextFiltering() throws Exception {
CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build();
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<String, ContextMapping>(Collections.singletonMap("cat", contextMapping));
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
createIndexAndMapping(mapping);
int numDocs = 10;
Expand All @@ -209,7 +209,7 @@ public void testSingleContextFiltering() throws Exception {

public void testSingleContextBoosting() throws Exception {
CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build();
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<String, ContextMapping>(Collections.singletonMap("cat", contextMapping));
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
createIndexAndMapping(mapping);
int numDocs = 10;
Expand Down Expand Up @@ -237,7 +237,7 @@ public void testSingleContextBoosting() throws Exception {

public void testSingleContextMultipleContexts() throws Exception {
CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build();
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<String, ContextMapping>(Collections.singletonMap("cat", contextMapping));
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
createIndexAndMapping(mapping);
int numDocs = 10;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ public void testMegamorphic() throws Throwable {
assertEquals(3, (int)handle.invokeExact((Object) Arrays.asList("x", "y", "z")));
assertEquals(2, (int)handle.invokeExact((Object) Arrays.asList("u", "v")));

final HashMap<String,String> map = new HashMap<String,String>();
final HashMap<String,String> map = new HashMap<>();
map.put("x", "y");
map.put("a", "b");
assertEquals(2, (int)handle.invokeExact((Object) map));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@ public void onFailure(Exception e) {
*/
void onBulkResponse(TimeValue thisBatchStartTime, BulkResponse response) {
try {
List<Failure> failures = new ArrayList<Failure>();
List<Failure> failures = new ArrayList<>();
Set<String> destinationIndicesThisBatch = new HashSet<>();
for (BulkItemResponse item : response) {
if (item.isFailed()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ public void testResponseOnSearchFailure() throws Exception {
}

private void indexDocs(int count) throws Exception {
List<IndexRequestBuilder> docs = new ArrayList<IndexRequestBuilder>(count);
List<IndexRequestBuilder> docs = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
docs.add(client().prepareIndex("source", "test", Integer.toString(i)).setSource("test", "words words"));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

public class KuromojiPartOfSpeechFilterFactory extends AbstractTokenFilterFactory {

private final Set<String> stopTags = new HashSet<String>();
private final Set<String> stopTags = new HashSet<>();

public KuromojiPartOfSpeechFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ public void delete(BlobPath path) {
//we can do at most 1K objects per delete
//We don't know the bucket name until first object listing
DeleteObjectsRequest multiObjectDeleteRequest = null;
ArrayList<KeyVersion> keys = new ArrayList<KeyVersion>();
ArrayList<KeyVersion> keys = new ArrayList<>();
while (true) {
ObjectListing list;
if (prevListing != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -452,7 +452,7 @@ public void cleanRepositoryFiles(String basePath) {
//we can do at most 1K objects per delete
//We don't know the bucket name until first object listing
DeleteObjectsRequest multiObjectDeleteRequest = null;
ArrayList<DeleteObjectsRequest.KeyVersion> keys = new ArrayList<DeleteObjectsRequest.KeyVersion>();
ArrayList<DeleteObjectsRequest.KeyVersion> keys = new ArrayList<>();
while (true) {
ObjectListing list;
if (prevListing != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ public class TestAmazonS3 extends AmazonS3Wrapper {

private String randomPrefix;

ConcurrentMap<String, AtomicLong> accessCounts = new ConcurrentHashMap<String, AtomicLong>();
ConcurrentMap<String, AtomicLong> accessCounts = new ConcurrentHashMap<>();

private long incrementAndGet(String path) {
AtomicLong value = accessCounts.get(path);
Expand Down