Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refact: use standard UTF-8 charset & enhance CI configs #2095

Merged
merged 18 commits into from
Mar 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .asf.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ github:
contexts:
- Analyze (java)
- CodeQL
- check-license
- build (memory, 8)
- build (memory, 11)
required_pull_request_reviews:
Expand Down
27 changes: 22 additions & 5 deletions .github/workflows/check-dependencies.yml
Original file line number Diff line number Diff line change
@@ -1,31 +1,48 @@
name: third-party dependencies check
name: "3rd-party"

on:
push:
branches:
- master
- /^release-.*$/
pull_request:

permissions:
contents: read

jobs:
build:
dependency-check:
runs-on: ubuntu-latest
env:
SCRIPT_DEPENDENCY: hugegraph-dist/scripts/dependency
steps:
- name: Checkout source
uses: actions/checkout@v3
- name: Set up JDK 8
- name: Set up JDK 11
uses: actions/setup-java@v3
with:
java-version: '8'
java-version: '11'
distribution: 'adopt'
- name: mvn install
run: |
mvn install -DskipTests=true
mvn install -DskipTests=true -ntp
- name: generate current dependencies
run: |
bash $SCRIPT_DEPENDENCY/regenerate_known_dependencies.sh current-dependencies.txt
- name: check third dependencies
run: |
bash $SCRIPT_DEPENDENCY/check_dependencies.sh

dependency-review:
runs-on: ubuntu-latest
steps:
- name: 'Checkout Repository'
uses: actions/checkout@v3
- name: 'Dependency Review'
uses: actions/dependency-review-action@v3
# Refer: https://github.com/actions/dependency-review-action
with:
fail-on-severity: low
# Action will fail if dependencies don't match the list
#allow-licenses: Apache-2.0, MIT
#deny-licenses: GPL-3.0, AGPL-1.0, AGPL-3.0, LGPL-2.0, CC-BY-3.0
4 changes: 1 addition & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: hugegraph-ci
name: "hugegraph-ci"

on:
push:
Expand All @@ -7,8 +7,6 @@ on:
- 'release-*'
- 'test-*'
pull_request:
branches:
- '**'

jobs:
build:
Expand Down
6 changes: 2 additions & 4 deletions .github/workflows/codeql-analysis.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
name: "CodeQL"

on:
push:
branches: [ master, release-*, v*.* ]
branches: [ master, release-* ]
pull_request:
# The branches below must be a subset of the branches above
# branches: [ master ] # enable in all PR
Expand Down Expand Up @@ -33,7 +31,7 @@ jobs:
uses: actions/setup-java@v3
with:
distribution: 'zulu'
java-version: '8'
java-version: '11'

# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/licence-checker.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
name: License checker
name: "License checker"

on:
push:
branches:
- master
- /^v[0-9]\..*$/
- 'release-*'
pull_request:

jobs:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/stale.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Mark stale issues and pull requests
name: "Mark stale issues and pull requests"

on:
schedule:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,12 +56,13 @@
* CassandraShard is used for cassandra scanning operations.
* Each shard represents a range of tokens for a node.
* Reading data from a given shard does not cross multiple nodes.
* <p>
* Refer to AbstractColumnFamilyInputFormat from:
* <a href="https://github.com/2013Commons/hive-cassandra/">...</a>
*/
public class CassandraShard {

/* The minimal shard size should >= 1M to prevent too many number of shards */
/** The minimal shard size should >= 1M to prevent too many number of shards */
imbajin marked this conversation as resolved.
Show resolved Hide resolved
private static final int MIN_SHARD_SIZE = (int) Bytes.MB;

private CassandraSessionPool.Session session;
Expand Down Expand Up @@ -228,7 +229,7 @@ private static Map<TokenRange, Long> describeSplits(
tokenRange.getEnd().toString());
Row row = resultSet.one();

long meanPartitionSize = 0L;
long meanPartitionSize;
long partitionsCount = 0L;
long splitCount = 0L;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ public AnsjAnalyzer(String mode) {

@Override
public Set<String> segment(String text) {
Result terms = null;
Result terms;
switch (this.analysis) {
case "BaseAnalysis":
terms = BaseAnalysis.parse(text);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,11 +165,11 @@ private void require(int size) {
E.checkState(this.resize, "Can't resize for wrapped buffer");

// Extra capacity as buffer
int newcapacity = size + this.buffer.limit() + DEFAULT_CAPACITY;
E.checkArgument(newcapacity <= MAX_BUFFER_CAPACITY,
int newCapacity = size + this.buffer.limit() + DEFAULT_CAPACITY;
E.checkArgument(newCapacity <= MAX_BUFFER_CAPACITY,
"Capacity exceeds max buffer capacity: %s",
MAX_BUFFER_CAPACITY);
ByteBuffer newBuffer = ByteBuffer.allocate(newcapacity);
ByteBuffer newBuffer = ByteBuffer.allocate(newCapacity);
((Buffer) this.buffer).flip();
newBuffer.put(this.buffer);
this.buffer = newBuffer;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,8 @@ public final class HugeScriptTraversal<S, E> extends DefaultTraversal<S, E> {

private Object result;

public HugeScriptTraversal(TraversalSource traversalSource,
String language, String script,
Map<String, Object> bindings,
Map<String, String> aliases) {
public HugeScriptTraversal(TraversalSource traversalSource, String language, String script,
Map<String, Object> bindings, Map<String, String> aliases) {
this.graph = traversalSource.getGraph();
this.language = language;
this.script = script;
Expand All @@ -75,8 +73,7 @@ public String script() {

@Override
public void applyStrategies() throws IllegalStateException {
ScriptEngine engine =
SingleGremlinScriptEngineManager.get(this.language);
ScriptEngine engine = SingleGremlinScriptEngineManager.get(this.language);

Bindings bindings = engine.createBindings();
bindings.putAll(this.bindings);
Expand All @@ -94,9 +91,8 @@ public void applyStrategies() throws IllegalStateException {
for (Map.Entry<String, String> entry : this.aliases.entrySet()) {
Object value = bindings.get(entry.getValue());
if (value == null) {
throw new IllegalArgumentException(String.format(
"Invalid aliase '%s':'%s'",
entry.getKey(), entry.getValue()));
throw new IllegalArgumentException(String.format("Invalid alias '%s':'%s'",
entry.getKey(), entry.getValue()));
}
bindings.put(entry.getKey(), value);
}
Expand All @@ -105,7 +101,7 @@ public void applyStrategies() throws IllegalStateException {
Object result = engine.eval(this.script, bindings);

if (result instanceof Admin) {
@SuppressWarnings({ "unchecked", "resource" })
@SuppressWarnings({ "unchecked"})
Admin<S, E> traversal = (Admin<S, E>) result;
traversal.getSideEffects().mergeInto(this.sideEffects);
traversal.getSteps().forEach(this::addStep);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ public enum Cardinality implements SerialEnum {
*/
SET(3, "set");

private byte code = 0;
private String name = null;
private final byte code;
private final String name;

static {
SerialEnum.register(Cardinality.class);
Expand Down Expand Up @@ -78,8 +78,8 @@ public static Cardinality convert(VertexProperty.Cardinality cardinality) {
case set:
return SET;
default:
throw new AssertionError(String.format(
"Unrecognized cardinality: '%s'", cardinality));
throw new AssertionError(String.format("Unrecognized cardinality: '%s'",
cardinality));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,43 +37,36 @@ public static BytesBuffer compress(byte[] bytes, int blockSize) {
return compress(bytes, blockSize, DEFAULT_BUFFER_RATIO);
}

public static BytesBuffer compress(byte[] bytes, int blockSize,
float bufferRatio) {
public static BytesBuffer compress(byte[] bytes, int blockSize, float bufferRatio) {
float ratio = bufferRatio <= 0.0F ? DEFAULT_BUFFER_RATIO : bufferRatio;
LZ4Factory factory = LZ4Factory.fastestInstance();
LZ4Compressor compressor = factory.fastCompressor();
int initBufferSize = Math.round(bytes.length / ratio);
BytesBuffer buf = new BytesBuffer(initBufferSize);
LZ4BlockOutputStream lz4Output = new LZ4BlockOutputStream(
buf, blockSize, compressor);
LZ4BlockOutputStream lz4Output = new LZ4BlockOutputStream(buf, blockSize, compressor);
try {
lz4Output.write(bytes);
lz4Output.close();
} catch (IOException e) {
throw new BackendException("Failed to compress", e);
}
/*
* If need to perform reading outside the method,
* remember to call forReadWritten()
*/
// If we need to perform reading outside the method, remember to call forReadWritten()
return buf;
}

public static BytesBuffer decompress(byte[] bytes, int blockSize) {
return decompress(bytes, blockSize, DEFAULT_BUFFER_RATIO);
}

public static BytesBuffer decompress(byte[] bytes, int blockSize,
float bufferRatio) {
public static BytesBuffer decompress(byte[] bytes, int blockSize, float bufferRatio) {
float ratio = bufferRatio <= 0.0F ? DEFAULT_BUFFER_RATIO : bufferRatio;
LZ4Factory factory = LZ4Factory.fastestInstance();
LZ4FastDecompressor decompressor = factory.fastDecompressor();
ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
int initBufferSize = Math.min(Math.round(bytes.length * ratio),
BytesBuffer.MAX_BUFFER_CAPACITY);
BytesBuffer buf = new BytesBuffer(initBufferSize);
LZ4BlockInputStream lzInput = new LZ4BlockInputStream(bais,
decompressor);
LZ4BlockInputStream lzInput = new LZ4BlockInputStream(bais, decompressor);
int count;
byte[] buffer = new byte[blockSize];
try {
Expand All @@ -84,10 +77,7 @@ public static BytesBuffer decompress(byte[] bytes, int blockSize,
} catch (IOException e) {
throw new BackendException("Failed to decompress", e);
}
/*
* If need to perform reading outside the method,
* remember to call forReadWritten()
*/
// If we need to perform reading outside the method, remember to call forReadWritten()
return buf;
}
}
Loading