Skip to content

Commit

Permalink
adapt the latest version & clean code
Browse files Browse the repository at this point in the history
also fix the sec alert
  • Loading branch information
imbajin committed Nov 7, 2022
1 parent 36f7cd0 commit 97748a4
Show file tree
Hide file tree
Showing 21 changed files with 75 additions and 92 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,6 @@

import java.util.Map;

import javax.inject.Singleton;
import javax.ws.rs.Consumes;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;

import org.slf4j.Logger;

import com.baidu.hugegraph.HugeGraph;
Expand All @@ -46,6 +37,15 @@
import com.codahale.metrics.annotation.Timed;
import com.google.common.collect.ImmutableMap;

import jakarta.inject.Singleton;
import jakarta.ws.rs.Consumes;
import jakarta.ws.rs.NotFoundException;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.Context;

@Path("graphs/{graph}/jobs/algorithm")
@Singleton
public class AlgorithmAPI extends API {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,7 @@
import com.baidu.hugegraph.util.E;
import com.baidu.hugegraph.util.JsonUtil;
import com.baidu.hugegraph.util.ParameterUtil;

import jersey.repackaged.com.google.common.base.Objects;
import com.google.common.base.Objects;

@SuppressWarnings("deprecation") // StringEscapeUtils
public abstract class AbstractAlgorithm implements Algorithm {
Expand Down Expand Up @@ -382,9 +381,7 @@ protected Iterator<Vertex> vertices(Object label, long limit) {
ConditionQuery query = new ConditionQuery(HugeType.VERTEX);
query.capacity(Query.NO_CAPACITY);
query.limit(limit);
if (label != null) {
query.eq(HugeKeys.LABEL, this.getVertexLabelId(label));
}
query.eq(HugeKeys.LABEL, this.getVertexLabelId(label));
return this.graph().vertices(query);
}

Expand Down Expand Up @@ -544,8 +541,8 @@ public JsonMap() {
this(4 * (int) Bytes.KB);
}

public JsonMap(int initCapaticy) {
this.json = new StringBuilder(initCapaticy);
public JsonMap(int initCapacity) {
this.json = new StringBuilder(initCapacity);
}

public void startObject() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,11 @@

public interface Algorithm {

public String name();
String name();

public String category();
String category();

public Object call(UserJob<Object> job, Map<String, Object> parameters);
Object call(UserJob<Object> job, Map<String, Object> parameters);

public void checkParameters(Map<String, Object> parameters);
void checkParameters(Map<String, Object> parameters);
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import com.baidu.hugegraph.job.algorithm.cent.EigenvectorCentralityAlgorithm;
import com.baidu.hugegraph.job.algorithm.cent.StressCentralityAlgorithm;
import com.baidu.hugegraph.job.algorithm.cent.StressCentralityAlgorithmV2;
import com.baidu.hugegraph.job.algorithm.comm.ClusterCoeffcientAlgorithm;
import com.baidu.hugegraph.job.algorithm.comm.ClusterCoefficientAlgorithm;
import com.baidu.hugegraph.job.algorithm.comm.KCoreAlgorithm;
import com.baidu.hugegraph.job.algorithm.comm.LouvainAlgorithm;
import com.baidu.hugegraph.job.algorithm.comm.LpaAlgorithm;
Expand All @@ -56,7 +56,7 @@ public class AlgorithmPool {
INSTANCE.register(new EigenvectorCentralityAlgorithm());

INSTANCE.register(new TriangleCountAlgorithm());
INSTANCE.register(new ClusterCoeffcientAlgorithm());
INSTANCE.register(new ClusterCoefficientAlgorithm());
INSTANCE.register(new LpaAlgorithm());
INSTANCE.register(new LouvainAlgorithm());
INSTANCE.register(new WeakConnectedComponent());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public abstract class BfsTraverser<T extends BfsTraverser.Node>
extends AbstractAlgorithm.AlgoTraverser
implements AutoCloseable {

private Stack<Id> traversedVertices = new Stack<>();
private final Stack<Id> traversedVertices = new Stack<>();

public BfsTraverser(UserJob<Object> job) {
super(job);
Expand Down Expand Up @@ -113,7 +113,7 @@ public static class Node {

private Id[] parents;
private int pathCount;
private int distance;
private final int distance;

public Node(Node parentNode) {
this(0, parentNode.distance + 1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ private Void runAndDone() {
this.run();
this.done();
} catch (Throwable e) {
// Only the first exception of one thread can be stored
// Only the first exception to one thread can be stored
this.exception = e;
if (!(e instanceof StopExecution)) {
LOG.error("Error when running task", e);
Expand All @@ -110,7 +110,8 @@ private void run() {
this.consume();
}
assert this.ending;
while (this.consume());
while (this.consume()) {
}

LOG.debug("Worker finished");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import java.util.Iterator;
import java.util.Map;

import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.configuration2.PropertiesConfiguration;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.slf4j.Logger;
Expand Down Expand Up @@ -90,7 +90,6 @@ private HugeGraph createTempGraph(UserJob<Object> job) {
PropertiesConfiguration config = new PropertiesConfiguration();
config.setProperty(CoreOptions.BACKEND.name(), "memory");
config.setProperty(CoreOptions.STORE.name(), name);
config.setDelimiterParsingDisabled(true);
/*
* NOTE: this temp graph don't need to init backend because no task info
* required, also not set started because no task to be scheduled.
Expand Down Expand Up @@ -129,12 +128,11 @@ protected static boolean copySchema(Map<String, Object> parameters) {

private static class Traverser extends AlgoTraverser {

private static Map<String, Object> PARAMS = ImmutableMap.of(
"depth", 10L,
"degree", -1L,
"sample", -1L,
"top", -1L /* sorted */,
"workers", 0);
private static final Map<String, Object> PARAMS = ImmutableMap.of("depth", 10L,
"degree", -1L,
"sample", -1L,
"top", -1L /* sorted */,
"workers", 0);

public Traverser(UserJob<Object> job) {
super(job);
Expand Down Expand Up @@ -166,8 +164,8 @@ public Object subgraphStat(UserJob<Object> job) {

results.put("page_ranks", pageRanks(job));

algo = pool.get("cluster_coeffcient");
results.put("cluster_coeffcient", algo.call(job, parameters));
algo = pool.get("cluster_coefficient");
results.put("cluster_coefficient", algo.call(job, parameters));

algo = pool.get("rings");
parameters = ImmutableMap.<String, Object>builder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,7 @@ protected GraphTraversal<Vertex, Vertex> constructSource(

t = t.filter(it -> {
this.updateProgress(++this.progress);
return sourceCLabel == null ? true :
match(it.get(), sourceCLabel);
return sourceCLabel == null || match(it.get(), sourceCLabel);
});

if (sourceSample > 0L) {
Expand Down Expand Up @@ -164,9 +163,7 @@ protected <V> GraphTraversal<V, V> filterNonShortestPath(
triples.put(key, len);
} else {
assert len == shortest;
if (keepOneShortestPath) {
return false;
}
return !keepOneShortestPath;
}
return true;
});
Expand All @@ -182,7 +179,7 @@ protected GraphTraversal<Vertex, Id> substractPath(
@SuppressWarnings("unchecked")
Iterator<HugeVertex> items = (Iterator<HugeVertex>)
path.iterator();
return new MapperIterator<>(items, v -> v.id());
return new MapperIterator<>(items, HugeVertex::id);
}
int len = path.size();
if (len < 3) {
Expand All @@ -195,7 +192,7 @@ protected GraphTraversal<Vertex, Id> substractPath(
@SuppressWarnings("unchecked")
Iterator<HugeVertex> items = (Iterator<HugeVertex>)
path.iterator();
return new MapperIterator<>(items, v -> v.id());
return new MapperIterator<>(items, HugeVertex::id);
});
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ public Object betweennessCentrality(Directions direction,
tg = this.computeBetweenness(tg);
GraphTraversal<Vertex, ?> tLimit = topN(tg, topN);

return this.execute(tLimit, () -> tLimit.next());
return this.execute(tLimit, tLimit::next);
}

protected GraphTraversal<Vertex, ?> groupPathByEndpoints(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ public Object closenessCentrality(Directions direction,
.math("_-1").sack(Operator.div).sack().sum());
GraphTraversal<Vertex, ?> tLimit = topN(tg, topN);

return this.execute(tLimit, () -> tLimit.next());
return this.execute(tLimit, tLimit::next);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ public Object call(UserJob<Object> job, Map<String, Object> parameters) {

private static class Traverser extends BfsTraverser<BfsTraverser.Node> {

private Map<Id, Float> globalCloseness;
private final Map<Id, Float> globalCloseness;

private float startVertexCloseness;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

package com.baidu.hugegraph.job.algorithm.cent;

import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -155,7 +155,7 @@ protected Object degreeCentralityForBothDir(String label, long topN) {
}

private long degree(Id source, String label) {
List<String> labels = label == null ? null : Arrays.asList(label);
List<String> labels = label == null ? null : Collections.singletonList(label);
EdgeStep step = new EdgeStep(this.graph(), Directions.BOTH,
labels, null, NO_LIMIT, 0);
return this.edgesCount(source, step);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ public Object eigenvectorCentrality(Directions direction,
GraphTraversal<Vertex, Object> tCap = t.cap("m");
GraphTraversal<Vertex, ?> tLimit = topN(tCap, topN);

return this.execute(tLimit, () -> tLimit.next());
return this.execute(tLimit, tLimit::next);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ public Object stressCentrality(Directions direction,
.groupCount();
GraphTraversal<Vertex, ?> tLimit = topN(tg, topN);

return this.execute(tLimit, () -> tLimit.next());
return this.execute(tLimit, tLimit::next);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@
import com.baidu.hugegraph.util.E;
import com.baidu.hugegraph.util.InsertionOrderUtil;

public class ClusterCoeffcientAlgorithm extends AbstractCommAlgorithm {
public class ClusterCoefficientAlgorithm extends AbstractCommAlgorithm {

public static final String ALGO_NAME = "cluster_coeffcient";
public static final String ALGO_NAME = "cluster_coefficient";

@Override
public String name() {
Expand All @@ -46,8 +46,7 @@ public void checkParameters(Map<String, Object> parameters) {
public Object call(UserJob<Object> job, Map<String, Object> parameters) {
int workers = workersWhenBoth(parameters);
try (Traverser traverser = new Traverser(job, workers)) {
return traverser.clusterCoeffcient(direction(parameters),
degree(parameters));
return traverser.clusterCoefficient(direction(parameters), degree(parameters));
}
}

Expand All @@ -67,18 +66,18 @@ public Traverser(UserJob<Object> job, int workers) {
super(job, ALGO_NAME, workers);
}

public Object clusterCoeffcient(Directions direction, long degree) {
public Object clusterCoefficient(Directions direction, long degree) {
Map<String, Long> results = this.triangles(direction, degree);
results = InsertionOrderUtil.newMap(results);

long triangles = results.remove(KEY_TRIANGLES);
long triads = results.remove(KEY_TRIADS);
assert triangles <= triads;
double coeffcient = triads == 0L ? 0d : 1d * triangles / triads;
double coefficient = triads == 0L ? 0d : 1d * triangles / triads;

@SuppressWarnings({ "unchecked", "rawtypes" })
Map<String, Double> converted = (Map) results;
converted.put("cluster_coeffcient", coeffcient);
converted.put("cluster_coefficient", coefficient);

return results;
}
Expand Down
Loading

0 comments on commit 97748a4

Please sign in to comment.