Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

enhance: Enable linux code checker #35084

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions .github/mergify.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ pull_request_rules:
- 'status-success=UT for Cpp'
- 'status-success=UT for Go'
- 'status-success=Integration Test'
# - 'status-success=Code Checker AMD64 Ubuntu 22.04'
- 'status-success=Code Checker AMD64 Ubuntu 22.04'
# - 'status-success=Code Checker MacOS 12'
# - 'status-success=Code Checker Amazonlinux 2023'
- 'status-success=cpu-e2e'
Expand All @@ -63,7 +63,7 @@ pull_request_rules:
- name: Test passed for code changed -2.*.*
conditions:
- base~=^2(\.\d+){2}$
# - 'status-success=Code Checker AMD64 Ubuntu 22.04'
- 'status-success=Code Checker AMD64 Ubuntu 22.04'
- or: *Build_AND_TEST_STATUS_SUCESS_ON_UBUNTU_20_OR_UBUNTU_22
- 'status-success=UT for Cpp'
- 'status-success=UT for Go'
Expand Down Expand Up @@ -123,7 +123,7 @@ pull_request_rules:
- base=sql_beta
- base~=^2(\.\d+){1,2}$
- or: *Build_AND_TEST_STATUS_SUCESS_ON_UBUNTU_20_OR_UBUNTU_22
# - 'status-success=Code Checker AMD64 Ubuntu 22.04'
- 'status-success=Code Checker AMD64 Ubuntu 22.04'
# - 'status-success=Code Checker MacOS 12'
# - 'status-success=Code Checker Amazonlinux 2023'
- 'status-success=UT for Go'
Expand All @@ -139,7 +139,7 @@ pull_request_rules:
conditions:
- or: *Build_AND_TEST_STATUS_SUCESS_ON_UBUNTU_20_OR_UBUNTU_22
- base~=^2\.2\.\d+$
# - 'status-success=Code Checker AMD64 Ubuntu 22.04'
- 'status-success=Code Checker AMD64 Ubuntu 22.04'
# - 'status-success=Code Checker MacOS 12'
- -files~=^(?!internal\/.*_test\.go).*$
actions:
Expand Down Expand Up @@ -267,7 +267,7 @@ pull_request_rules:
- base~=^2(\.\d+){1,2}$
- or: *Build_AND_TEST_STATUS_SUCESS_ON_UBUNTU_20_OR_UBUNTU_22
- title~=\[skip e2e\]
# - 'status-success=Code Checker AMD64 Ubuntu 22.04'
- 'status-success=Code Checker AMD64 Ubuntu 22.04'
- 'status-success=UT for Cpp'
- 'status-success=UT for Go'
- 'status-success=Integration Test'
Expand Down Expand Up @@ -320,7 +320,7 @@ pull_request_rules:
- or:
- *failed_on_ubuntu_20
- *failed_on_ubuntu_22
# - 'status-success!=Code Checker AMD64 Ubuntu 22.04'
- 'status-success!=Code Checker AMD64 Ubuntu 22.04'
- 'status-success!=UT for Cpp'
- 'status-success!=UT for Go'
- 'status-success!=Integration Test'
Expand Down
100 changes: 50 additions & 50 deletions .github/workflows/code-checker.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -61,54 +61,54 @@ jobs:
run: |
./build/builder.sh /bin/bash -c "make check-proto-product && make verifiers"

amazonlinux:
name: Code Checker Amazonlinux 2023
# Run in amazonlinux docker
runs-on: ubuntu-latest
timeout-minutes: 180
steps:
- name: Maximize build space
uses: easimon/maximize-build-space@master
if: ${{ ! startsWith(runner.name, 'self') }} # skip this step if it is self-hosted runner
with:
root-reserve-mb: 20480
swap-size-mb: 1024
remove-dotnet: 'true'
remove-android: 'true'
remove-haskell: 'true'
- name: Checkout
uses: actions/checkout@v2
- name: Download Caches
uses: ./.github/actions/cache
with:
os: 'amazonlinux2023'
- name: Code Check
run: |
sed -i 's/ubuntu22.04/amazonlinux2023/g' .env
./build/builder.sh /bin/bash -c "make install"
# amazonlinux:
# name: Code Checker Amazonlinux 2023
# # Run in amazonlinux docker
# runs-on: ubuntu-latest
# timeout-minutes: 180
# steps:
# - name: Maximize build space
# uses: easimon/maximize-build-space@master
# if: ${{ ! startsWith(runner.name, 'self') }} # skip this step if it is self-hosted runner
# with:
# root-reserve-mb: 20480
# swap-size-mb: 1024
# remove-dotnet: 'true'
# remove-android: 'true'
# remove-haskell: 'true'
# - name: Checkout
# uses: actions/checkout@v2
# - name: Download Caches
# uses: ./.github/actions/cache
# with:
# os: 'amazonlinux2023'
# - name: Code Check
# run: |
# sed -i 's/ubuntu22.04/amazonlinux2023/g' .env
# ./build/builder.sh /bin/bash -c "make install"

rockylinux:
name: Code Checker rockylinux8
# Run in amazonlinux docker
runs-on: ubuntu-latest
timeout-minutes: 180
steps:
- name: Maximize build space
uses: easimon/maximize-build-space@master
if: ${{ ! startsWith(runner.name, 'self') }} # skip this step if it is self-hosted runner
with:
root-reserve-mb: 20480
swap-size-mb: 1024
remove-dotnet: 'true'
remove-android: 'true'
remove-haskell: 'true'
- name: Checkout
uses: actions/checkout@v2
- name: Download Caches
uses: ./.github/actions/cache
with:
os: 'rockylinux8'
- name: Code Check
run: |
sed -i 's/ubuntu22.04/rockylinux8/g' .env
./build/builder.sh /bin/bash -c "make install"
# rockylinux:
# name: Code Checker rockylinux8
# # Run in amazonlinux docker
# runs-on: ubuntu-latest
# timeout-minutes: 180
# steps:
# - name: Maximize build space
# uses: easimon/maximize-build-space@master
# if: ${{ ! startsWith(runner.name, 'self') }} # skip this step if it is self-hosted runner
# with:
# root-reserve-mb: 20480
# swap-size-mb: 1024
# remove-dotnet: 'true'
# remove-android: 'true'
# remove-haskell: 'true'
# - name: Checkout
# uses: actions/checkout@v2
# - name: Download Caches
# uses: ./.github/actions/cache
# with:
# os: 'rockylinux8'
# - name: Code Check
# run: |
# sed -i 's/ubuntu22.04/rockylinux8/g' .env
# ./build/builder.sh /bin/bash -c "make install"
8 changes: 4 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -182,13 +182,13 @@ lint-fix: getdeps
static-check: getdeps
@echo "Running $@ check"
@echo "Start check core packages"
@source $(PWD)/scripts/setenv.sh && GO111MODULE=on $(INSTALL_PATH)/golangci-lint run --build-tags dynamic,test --timeout=30m --config $(PWD)/.golangci.yml
@source $(PWD)/scripts/setenv.sh && GO111MODULE=on GOFLAGS=-buildvcs=false $(INSTALL_PATH)/golangci-lint run --build-tags dynamic,test --timeout=30m --config $(PWD)/.golangci.yml
@echo "Start check pkg package"
@source $(PWD)/scripts/setenv.sh && cd pkg && GO111MODULE=on $(INSTALL_PATH)/golangci-lint run --build-tags dynamic,test --timeout=30m --config $(PWD)/.golangci.yml
@source $(PWD)/scripts/setenv.sh && cd pkg && GO111MODULE=on GOFLAGS=-buildvcs=false $(INSTALL_PATH)/golangci-lint run --build-tags dynamic,test --timeout=30m --config $(PWD)/.golangci.yml
@echo "Start check client package"
@source $(PWD)/scripts/setenv.sh && cd client && GO111MODULE=on $(INSTALL_PATH)/golangci-lint run --timeout=30m --config $(PWD)/client/.golangci.yml
@source $(PWD)/scripts/setenv.sh && cd client && GO111MODULE=on GOFLAGS=-buildvcs=false $(INSTALL_PATH)/golangci-lint run --timeout=30m --config $(PWD)/client/.golangci.yml
@echo "Start check go_client e2e package"
@source $(PWD)/scripts/setenv.sh && cd tests/go_client && GO111MODULE=on $(INSTALL_PATH)/golangci-lint run --timeout=30m --config $(PWD)/client/.golangci.yml
@source $(PWD)/scripts/setenv.sh && cd tests/go_client && GO111MODULE=on GOFLAGS=-buildvcs=false $(INSTALL_PATH)/golangci-lint run --timeout=30m --config $(PWD)/client/.golangci.yml

verifiers: build-cpp getdeps cppcheck fmt static-check

Expand Down
36 changes: 18 additions & 18 deletions cmd/milvus/mck.go
Original file line number Diff line number Diff line change
Expand Up @@ -528,40 +528,40 @@ func (c *mck) unmarshalTask(taskID int64, t string) (string, []int64, []int64, e

switch header.Base.MsgType {
case commonpb.MsgType_LoadCollection:
loadReq := querypb.LoadCollectionRequest{}
err = proto.Unmarshal([]byte(t), &loadReq)
loadReq := &querypb.LoadCollectionRequest{}
err = proto.Unmarshal([]byte(t), loadReq)
if err != nil {
return errReturn(taskID, "LoadCollectionRequest", err)
}
log.Info("LoadCollection", zap.String("detail", fmt.Sprintf("+%v", loadReq)))
return "LoadCollection", emptyInt64(), emptyInt64(), nil
case commonpb.MsgType_LoadPartitions:
loadReq := querypb.LoadPartitionsRequest{}
err = proto.Unmarshal([]byte(t), &loadReq)
loadReq := &querypb.LoadPartitionsRequest{}
err = proto.Unmarshal([]byte(t), loadReq)
if err != nil {
return errReturn(taskID, "LoadPartitionsRequest", err)
}
log.Info("LoadPartitions", zap.String("detail", fmt.Sprintf("+%v", loadReq)))
return "LoadPartitions", loadReq.PartitionIDs, emptyInt64(), nil
case commonpb.MsgType_ReleaseCollection:
loadReq := querypb.ReleaseCollectionRequest{}
err = proto.Unmarshal([]byte(t), &loadReq)
loadReq := &querypb.ReleaseCollectionRequest{}
err = proto.Unmarshal([]byte(t), loadReq)
if err != nil {
return errReturn(taskID, "ReleaseCollectionRequest", err)
}
log.Info("ReleaseCollection", zap.String("detail", fmt.Sprintf("+%v", loadReq)))
return "ReleaseCollection", emptyInt64(), emptyInt64(), nil
case commonpb.MsgType_ReleasePartitions:
loadReq := querypb.ReleasePartitionsRequest{}
err = proto.Unmarshal([]byte(t), &loadReq)
loadReq := &querypb.ReleasePartitionsRequest{}
err = proto.Unmarshal([]byte(t), loadReq)
if err != nil {
return errReturn(taskID, "ReleasePartitionsRequest", err)
}
log.Info("ReleasePartitions", zap.String("detail", fmt.Sprintf("+%v", loadReq)))
return "ReleasePartitions", loadReq.PartitionIDs, emptyInt64(), nil
case commonpb.MsgType_LoadSegments:
loadReq := querypb.LoadSegmentsRequest{}
err = proto.Unmarshal([]byte(t), &loadReq)
loadReq := &querypb.LoadSegmentsRequest{}
err = proto.Unmarshal([]byte(t), loadReq)
if err != nil {
return errReturn(taskID, "LoadSegmentsRequest", err)
}
Expand All @@ -584,16 +584,16 @@ func (c *mck) unmarshalTask(taskID int64, t string) (string, []int64, []int64, e
log.Info("LoadSegments", zap.String("detail", fmt.Sprintf("+%v", loadReq)))
return "LoadSegments", removeRepeatElement(partitionIDs), removeRepeatElement(segmentIDs), nil
case commonpb.MsgType_ReleaseSegments:
loadReq := querypb.ReleaseSegmentsRequest{}
err = proto.Unmarshal([]byte(t), &loadReq)
loadReq := &querypb.ReleaseSegmentsRequest{}
err = proto.Unmarshal([]byte(t), loadReq)
if err != nil {
return errReturn(taskID, "ReleaseSegmentsRequest", err)
}
log.Info("ReleaseSegments", zap.String("detail", fmt.Sprintf("+%v", loadReq)))
return "ReleaseSegments", loadReq.PartitionIDs, loadReq.SegmentIDs, nil
case commonpb.MsgType_WatchDmChannels:
loadReq := querypb.WatchDmChannelsRequest{}
err = proto.Unmarshal([]byte(t), &loadReq)
loadReq := &querypb.WatchDmChannelsRequest{}
err = proto.Unmarshal([]byte(t), loadReq)
if err != nil {
return errReturn(taskID, "WatchDmChannelsRequest", err)
}
Expand All @@ -619,16 +619,16 @@ func (c *mck) unmarshalTask(taskID int64, t string) (string, []int64, []int64, e
log.Warn("legacy WatchQueryChannels type found, ignore")
return "WatchQueryChannels", emptyInt64(), emptyInt64(), nil
case commonpb.MsgType_LoadBalanceSegments:
loadReq := querypb.LoadBalanceRequest{}
err = proto.Unmarshal([]byte(t), &loadReq)
loadReq := &querypb.LoadBalanceRequest{}
err = proto.Unmarshal([]byte(t), loadReq)
if err != nil {
return errReturn(taskID, "LoadBalanceRequest", err)
}
log.Info("LoadBalanceSegments", zap.String("detail", fmt.Sprintf("+%v", loadReq)))
return "LoadBalanceSegments", emptyInt64(), loadReq.SealedSegmentIDs, nil
case commonpb.MsgType_HandoffSegments:
handoffReq := querypb.HandoffSegmentsRequest{}
err = proto.Unmarshal([]byte(t), &handoffReq)
handoffReq := &querypb.HandoffSegmentsRequest{}
err = proto.Unmarshal([]byte(t), handoffReq)
if err != nil {
return errReturn(taskID, "HandoffSegmentsRequest", err)
}
Expand Down
2 changes: 1 addition & 1 deletion configs/milvus.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,7 @@ dataCoord:
maxSize: 1024 # Maximum size of a segment in MB
diskSegmentMaxSize: 2048 # Maximun size of a segment in MB for collection which has Disk index
sealProportion: 0.12
sealProportionJitter: 0.1 # segment seal proportion jitter ratio, default value 0.1(10%), if seal propertion is 12%, with jitter=0.1, the actuall applied ratio will be 10.8~12%
sealProportionJitter: 0.1 # segment seal proportion jitter ratio, default value 0.1(10%), if seal proportion is 12%, with jitter=0.1, the actuall applied ratio will be 10.8~12%
assignmentExpiration: 2000 # The time of the assignment expiration in ms
allocLatestExpireAttempt: 200 # The time attempting to alloc latest lastExpire from rootCoord after restart
maxLife: 86400 # The max lifetime of segment in seconds, 24*60*60
Expand Down
4 changes: 2 additions & 2 deletions internal/core/src/common/QueryResult.h
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,8 @@ struct VectorIterator {
for (auto& iter : iterators_) {
if (iter->HasNext()) {
auto origin_pair = iter->Next();
origin_pair.first = convert_to_segment_offset(
origin_pair.first, idx);
origin_pair.first =
convert_to_segment_offset(origin_pair.first, idx);
auto off_dis_pair =
std::make_shared<OffsetDisPair>(origin_pair, idx++);
heap_.push(off_dis_pair);
Expand Down
4 changes: 2 additions & 2 deletions internal/core/src/common/Tracer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ initTelemetry(const TraceConfig& cfg) {
opts.url = cfg.otlpEndpoint;
exporter = otlp::OtlpHttpExporterFactory::Create(opts);
LOG_INFO("init otlp http exporter, endpoint: {}", opts.url);
} else if (cfg.otlpMethod == "grpc" ||
cfg.otlpMethod == "") { // legacy configuration
} else if (cfg.otlpMethod == "grpc" ||
cfg.otlpMethod == "") { // legacy configuration
auto opts = otlp::OtlpGrpcExporterOptions{};
opts.endpoint = cfg.otlpEndpoint;
opts.use_ssl_credentials = cfg.oltpSecure;
Expand Down
2 changes: 1 addition & 1 deletion internal/core/src/exec/expression/UnaryExpr.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ PhyUnaryRangeFilterExpr::ExecArrayEqualForIndex(bool reverse) {

// filtering by index, get candidates.
auto size_per_chunk = segment_->size_per_chunk();
auto retrieve = [size_per_chunk, this](int64_t offset) -> auto {
auto retrieve = [ size_per_chunk, this ](int64_t offset) -> auto {
auto chunk_idx = offset / size_per_chunk;
auto chunk_offset = offset % size_per_chunk;
const auto& chunk =
Expand Down
10 changes: 5 additions & 5 deletions internal/core/src/monitor/prometheus_client.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,12 +47,12 @@ const prometheus::Histogram::BucketBoundaries bytesBuckets = {
536870912, // 512M
1073741824}; // 1G

const prometheus::Histogram::BucketBoundaries ratioBuckets =
{0.0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5,
0.55, 0.6, 0.65, 0.7, 0.75, 0.8, 0.85, 0.9, 0.95, 1.0};
const prometheus::Histogram::BucketBoundaries ratioBuckets = {
0.0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5,
0.55, 0.6, 0.65, 0.7, 0.75, 0.8, 0.85, 0.9, 0.95, 1.0};

const std::unique_ptr<PrometheusClient>
prometheusClient = std::make_unique<PrometheusClient>();
const std::unique_ptr<PrometheusClient> prometheusClient =
std::make_unique<PrometheusClient>();

/******************GetMetrics*************************************************************
* !!! NOT use SUMMARY metrics here, because when parse SUMMARY metrics in Milvus,
Expand Down
25 changes: 11 additions & 14 deletions internal/core/src/query/SubSearchResult.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,11 @@
namespace milvus::query {
class SubSearchResult {
public:
SubSearchResult(
int64_t num_queries,
int64_t topk,
const MetricType& metric_type,
int64_t round_decimal,
const std::vector<knowhere::IndexNode::IteratorPtr>& iters)
SubSearchResult(int64_t num_queries,
int64_t topk,
const MetricType& metric_type,
int64_t round_decimal,
const std::vector<knowhere::IndexNode::IteratorPtr>& iters)
: num_queries_(num_queries),
topk_(topk),
round_decimal_(round_decimal),
Expand All @@ -41,12 +40,11 @@ class SubSearchResult {
int64_t topk,
const MetricType& metric_type,
int64_t round_decimal)
: SubSearchResult(
num_queries,
topk,
metric_type,
round_decimal,
std::vector<knowhere::IndexNode::IteratorPtr>{}) {
: SubSearchResult(num_queries,
topk,
metric_type,
round_decimal,
std::vector<knowhere::IndexNode::IteratorPtr>{}) {
}

SubSearchResult(SubSearchResult&& other) noexcept
Expand Down Expand Up @@ -130,8 +128,7 @@ class SubSearchResult {
knowhere::MetricType metric_type_;
std::vector<int64_t> seg_offsets_;
std::vector<float> distances_;
std::vector<knowhere::IndexNode::IteratorPtr>
chunk_iterators_;
std::vector<knowhere::IndexNode::IteratorPtr> chunk_iterators_;
};

} // namespace milvus::query
2 changes: 1 addition & 1 deletion internal/core/src/query/groupby/SearchGroupByOperator.h
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ struct GroupByMap {
}
bool
Push(const T& t) {
if (group_map_.size() >= group_capacity_ && group_map_[t] == 0){
if (group_map_.size() >= group_capacity_ && group_map_[t] == 0) {
return false;
}
if (group_map_[t] >= group_size_) {
Expand Down
3 changes: 1 addition & 2 deletions internal/core/src/query/visitors/ExecPlanNodeVisitor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -236,8 +236,7 @@ ExecPlanNodeVisitor::VectorVisitorImpl(VectorPlanNode& node) {
double total_cost =
std::chrono::duration<double, std::micro>(vector_end - scalar_start)
.count();
double scalar_ratio =
total_cost > 0.0 ? scalar_cost / total_cost : 0.0;
double scalar_ratio = total_cost > 0.0 ? scalar_cost / total_cost : 0.0;
monitor::internal_core_search_latency_scalar_proportion.Observe(
scalar_ratio);
}
Expand Down
Loading
Loading