Skip to content

Commit

Permalink
Added logging storage in MacOS and Windows, update the way Zookeeper/…
Browse files Browse the repository at this point in the history
…Kafka starts in Windows (#252)

* Added logging storage in MacOS and Windows, update the way Zookeeper/Kafka starts in Windows

* Remove default and use ValueBuffer.Empty

* Distinguish cache between run and attempts to avoid wrong cancellation of previous jobs

* Store logs in case of cancellation: used to understand some stuck conditions
  • Loading branch information
masesdevelopers authored Jun 27, 2024
1 parent f4e9865 commit e401f4e
Show file tree
Hide file tree
Showing 6 changed files with 153 additions and 45 deletions.
79 changes: 44 additions & 35 deletions .github/workflows/build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,10 @@ jobs:
- name: Recompile to create nuget packages
run: dotnet build --no-incremental --configuration Release /p:Platform="Any CPU" src\net\KEFCore.sln

- name: List NuGet packages to avoid download of artifact
shell: cmd
run: dir .\bin\*nupkg

- uses: actions/upload-artifact@v4
with:
name: KEFCore
Expand All @@ -80,22 +84,24 @@ jobs:
run: |
Copy-Item .github\workflows\zookeeper.properties -Destination bin\net6.0\zookeeper.properties -Force
Copy-Item .github\workflows\server.properties -Destination bin\net6.0\server.properties -Force
Copy-Item .github\workflows\log4j.properties -Destination bin\net6.0\log4j.properties -Force
Copy-Item .github\workflows\zookeeper.properties -Destination bin\net8.0\zookeeper.properties -Force
Copy-Item .github\workflows\server.properties -Destination bin\net8.0\server.properties -Force
Copy-Item .github\workflows\log4j.properties -Destination bin\net8.0\log4j.properties -Force
- name: Save KEFCore net6.0 bin in cache
uses: actions/cache/save@v4
with:
enableCrossOsArchive: true
path: ./bin/net6.0/
key: KEFCore_net6.0_bin_${{ github.sha }}
key: KEFCore_${{ github.run_number }}_${{ github.run_attempt }}_net6.0_bin_${{ github.sha }}

- name: Save KEFCore net8.0 bin in cache
uses: actions/cache/save@v4
with:
enableCrossOsArchive: true
path: ./bin/net8.0/
key: KEFCore_net8.0_bin_${{ github.sha }}
key: KEFCore_${{ github.run_number }}_${{ github.run_attempt }}_net8.0_bin_${{ github.sha }}

execute_tests_linux:
needs: build_windows
Expand Down Expand Up @@ -130,25 +136,25 @@ jobs:
fail-on-cache-miss: true
enableCrossOsArchive: true
path: ./bin/${{ matrix.framework }}/
key: KEFCore_${{ matrix.framework }}_bin_${{ github.sha }}
key: KEFCore_${{ github.run_number }}_${{ github.run_attempt }}_${{ matrix.framework }}_bin_${{ github.sha }}

- name: Set up JDK distribution
uses: actions/setup-java@v4
with: # running setup-java again overwrites the settings.xml
distribution: ${{ matrix.jdk_vendor }}
java-version: ${{ matrix.jdk_version }}

- name: Executing KNetReplicator Benchmark on Ubuntu with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
- name: Execute KNetReplicator Benchmark on Ubuntu with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
run: dotnet ${{ github.workspace }}/bin/${{ matrix.framework }}/MASES.EntityFrameworkCore.KNet.Test.Benchmark.dll ${{ github.workspace }}/bin/${{ matrix.framework }}/Benchmark.KNetReplicator.json localhost:9092
env:
JCOBRIDGE_LicensePath: ${{ secrets.JCOBRIDGE_ONLINE }}

- name: Executing KNetStreams Raw Benchmark on Ubuntu with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
- name: Execute KNetStreams Raw Benchmark on Ubuntu with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
run: dotnet ${{ github.workspace }}/bin/${{ matrix.framework }}/MASES.EntityFrameworkCore.KNet.Test.Benchmark.dll ${{ github.workspace }}/bin/${{ matrix.framework }}/Benchmark.KNetStreams.Raw.json localhost:9092
env:
JCOBRIDGE_LicensePath: ${{ secrets.JCOBRIDGE_ONLINE }}

- name: Executing KNetStreams Buffered Benchmark on Ubuntu with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
- name: Execute KNetStreams Buffered Benchmark on Ubuntu with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
run: dotnet ${{ github.workspace }}/bin/${{ matrix.framework }}/MASES.EntityFrameworkCore.KNet.Test.Benchmark.dll ${{ github.workspace }}/bin/${{ matrix.framework }}/Benchmark.KNetStreams.Buffered.json localhost:9092
env:
JCOBRIDGE_LicensePath: ${{ secrets.JCOBRIDGE_ONLINE }}
Expand Down Expand Up @@ -178,7 +184,7 @@ jobs:
fail-on-cache-miss: true
enableCrossOsArchive: true
path: ./bin/${{ matrix.framework }}/
key: KEFCore_${{ matrix.framework }}_bin_${{ github.sha }}
key: KEFCore_${{ github.run_number }}_${{ github.run_attempt }}_${{ matrix.framework }}_bin_${{ github.sha }}

- name: Set up JDK distribution
uses: actions/setup-java@v4
Expand All @@ -193,50 +199,53 @@ jobs:
run: dotnet tool update -g MASES.KNetCLI

- name: Start Kafka on ${{ matrix.os }} with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
if: ${{ matrix.os != 'windows-latest' }}
shell: pwsh
run: |
Start-Process -FilePath knet -ArgumentList ( 'zookeeperstart', '${{ github.workspace }}/bin/${{ matrix.framework }}/zookeeper.properties' )
Start-Process -FilePath knet -ArgumentList ( 'kafkastart', '${{ github.workspace }}/bin/${{ matrix.framework }}/server.properties' )
New-Item -Path "${{ github.workspace }}/" -Name "logfiles" -ItemType Directory
Start-Process -RSE ${{ github.workspace }}/logfiles/PWSH_zookeeper_err.log -RSO ${{ github.workspace }}/logfiles/PWSH_zookeeper_out.log -FilePath knet -ArgumentList ( 'zookeeperstart', '-LogPath', '${{ github.workspace }}/logfiles/', '-Log4JConfiguration', '${{ github.workspace }}/bin/${{ matrix.framework }}/log4j.properties', '${{ github.workspace }}/bin/${{ matrix.framework }}/zookeeper.properties' )
Start-Process -RSE ${{ github.workspace }}/logfiles/PWSH_kafka_err.log -RSO ${{ github.workspace }}/logfiles/PWSH_kafka_out.log -FilePath knet -ArgumentList ( 'kafkastart', '-LogPath', '${{ github.workspace }}/logfiles/', '-Log4JConfiguration', '${{ github.workspace }}/bin/${{ matrix.framework }}/log4j.properties', '${{ github.workspace }}/bin/${{ matrix.framework }}/server.properties' )
env:
JCOBRIDGE_LicensePath: ${{ secrets.JCOBRIDGE_ONLINE }}

- name: Executing KNetReplicator Benchmark on ${{ matrix.os }} with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
- name: Execute KNetReplicator Benchmark on ${{ matrix.os }} with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
if: ${{ matrix.os != 'windows-latest' }}
run: dotnet ${{ github.workspace }}/bin/${{ matrix.framework }}/MASES.EntityFrameworkCore.KNet.Test.Benchmark.dll ${{ github.workspace }}/bin/${{ matrix.framework }}/Benchmark.KNetReplicator.json localhost:9092
env:
JCOBRIDGE_LicensePath: ${{ secrets.JCOBRIDGE_ONLINE }}

- name: Executing KNetStreams Raw Benchmark on ${{ matrix.os }} with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
shell: pwsh
- name: Execute KNetStreams Raw Benchmark on ${{ matrix.os }} with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
if: ${{ matrix.os != 'windows-latest' }}
run: dotnet ${{ github.workspace }}/bin/${{ matrix.framework }}/MASES.EntityFrameworkCore.KNet.Test.Benchmark.dll ${{ github.workspace }}/bin/${{ matrix.framework }}/Benchmark.KNetStreams.Raw.json localhost:9092
env:
JCOBRIDGE_LicensePath: ${{ secrets.JCOBRIDGE_ONLINE }}

- name: Executing KNetStreams Buffered Benchmark on ${{ matrix.os }} with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
shell: pwsh
- name: Execute KNetStreams Buffered Benchmark on ${{ matrix.os }} with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
if: ${{ matrix.os != 'windows-latest' }}
run: dotnet ${{ github.workspace }}/bin/${{ matrix.framework }}/MASES.EntityFrameworkCore.KNet.Test.Benchmark.dll ${{ github.workspace }}/bin/${{ matrix.framework }}/Benchmark.KNetStreams.Buffered.json localhost:9092
env:
JCOBRIDGE_LicensePath: ${{ secrets.JCOBRIDGE_ONLINE }}

#- name: Start ZooKeeper and Apache Kafka
# shell: pwsh
# run: |
# knet zookeeperstart ${{ github.workspace }}/.github/wotkflows/zookeeper.properties &
# knet kafkastart ${{ github.workspace }}/.github/wotkflows/server.properties &
# env:
# JCOBRIDGE_LicensePath: ${{ secrets.JCOBRIDGE_ONLINE }}

#- name: Executing MASES.EntityFrameworkCore.KNet.Test.Benchmark on ${{ matrix.os }} with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
# if: ${{ matrix.os == 'macos-latest' || matrix.os == 'macos-13' }}
# run: dotnet ${{ github.workspace }}/bin/${{ matrix.framework }}/MASES.EntityFrameworkCore.KNet.Test.Benchmark.dll ${{ github.workspace }}/bin/${{ matrix.framework }}/Benchmark.KNetStreams.json localhost:9092
# env:
# JCOBRIDGE_LicensePath: ${{ secrets.JCOBRIDGE_ONLINE }}
#
#- name: Executing MASES.EntityFrameworkCore.KNet.Test.Benchmark on Windows with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
# if: ${{ matrix.os == 'windows-latest' }}
# run: dotnet ${{ github.workspace }}\bin\${{ matrix.framework }}\MASES.EntityFrameworkCore.KNet.Test.Benchmark.dll ${{ github.workspace }}\bin\${{ matrix.framework }}\Benchmark.KNetStreams.json localhost:9092
# env:
# JCOBRIDGE_LicensePath: ${{ secrets.JCOBRIDGE_ONLINE }}
- name: WINDOWS ONLY - Start Kafka and execute tests on ${{ matrix.os }} with ${{ matrix.jdk_vendor }} ${{ matrix.jdk_version }}
if: ${{ matrix.os == 'windows-latest' }}
shell: pwsh
run: |
New-Item -Path "${{ github.workspace }}/" -Name "logfiles" -ItemType Directory
Start-Process -RSE ${{ github.workspace }}\logfiles\PWSH_zookeeper_err.log -RSO ${{ github.workspace }}\logfiles\PWSH_zookeeper_out.log -FilePath knet -ArgumentList ( 'zookeeperstart', '-LogPath', '${{ github.workspace }}\logfiles\', '-Log4JConfiguration', '${{ github.workspace }}\bin\${{ matrix.framework }}\log4j.properties', '${{ github.workspace }}\bin\${{ matrix.framework }}\zookeeper.properties' )
Start-Process -RSE ${{ github.workspace }}\logfiles\PWSH_kafka_err.log -RSO ${{ github.workspace }}\logfiles\PWSH_kafka_out.log -FilePath knet -ArgumentList ( 'kafkastart', '-LogPath', '${{ github.workspace }}\logfiles\', '-Log4JConfiguration', '${{ github.workspace }}\bin\${{ matrix.framework }}\log4j.properties', '${{ github.workspace }}\bin\${{ matrix.framework }}\server.properties' )
dotnet ${{ github.workspace }}\bin\${{ matrix.framework }}\MASES.EntityFrameworkCore.KNet.Test.Benchmark.dll ${{ github.workspace }}\bin\${{ matrix.framework }}\Benchmark.KNetReplicator.json localhost:9092
dotnet ${{ github.workspace }}\bin\${{ matrix.framework }}\MASES.EntityFrameworkCore.KNet.Test.Benchmark.dll ${{ github.workspace }}\bin\${{ matrix.framework }}\Benchmark.KNetStreams.Raw.json localhost:9092
dotnet ${{ github.workspace }}\bin\${{ matrix.framework }}\MASES.EntityFrameworkCore.KNet.Test.Benchmark.dll ${{ github.workspace }}\bin\${{ matrix.framework }}\Benchmark.KNetStreams.Buffered.json localhost:9092
env:
JCOBRIDGE_LicensePath: ${{ secrets.JCOBRIDGE_ONLINE }}

- uses: actions/upload-artifact@v4
if: ${{ failure() || cancelled() }}
with:
name: KEFCore_Server_${{ matrix.os }}_${{ matrix.framework }}_${{ matrix.jdk_vendor }}_${{ matrix.jdk_version }}
path: ${{ github.workspace }}/logfiles/
retention-days: 7

final_cleanup:
needs: [ execute_tests_linux, execute_tests_other ]
if: "always()"
Expand All @@ -252,7 +261,7 @@ jobs:
run: |
gh extension install actions/gh-actions-cache
echo "Fetching list of cache key"
cacheKeysForPR=$(gh actions-cache list --key KEFCore_ )
cacheKeysForPR=$(gh actions-cache list --key KEFCore_${{ github.run_number }}_${{ github.run_attempt }} | cut -f 1 )
## Setting this to not fail the workflow while deleting cache keys.
set +e
echo "Deleting caches..."
Expand Down
96 changes: 96 additions & 0 deletions .github/workflows/log4j.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Unspecified loggers and loggers with additivity=true output to server.log and stdout
# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise
log4j.rootLogger=INFO, stdout, kafkaAppender

log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log
log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log
log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log
log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log
log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log
log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.authorizerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.authorizerAppender.File=${kafka.logs.dir}/kafka-authorizer.log
log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

# Change the line below to adjust ZK client logging
log4j.logger.org.apache.zookeeper=INFO

# Change the two lines below to adjust the general broker logging level (output to server.log and stdout)
log4j.logger.kafka=INFO
log4j.logger.org.apache.kafka=INFO

# Change to DEBUG or TRACE to enable request logging
log4j.logger.kafka.request.logger=WARN, requestAppender
log4j.additivity.kafka.request.logger=false

# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output
# related to the handling of requests
#log4j.logger.kafka.network.Processor=TRACE, requestAppender
#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender
#log4j.additivity.kafka.server.KafkaApis=false
log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender
log4j.additivity.kafka.network.RequestChannel$=false

# Change the line below to adjust KRaft mode controller logging
log4j.logger.org.apache.kafka.controller=INFO, controllerAppender
log4j.additivity.org.apache.kafka.controller=false

# Change the line below to adjust ZK mode controller logging
log4j.logger.kafka.controller=TRACE, controllerAppender
log4j.additivity.kafka.controller=false

log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender
log4j.additivity.kafka.log.LogCleaner=false

log4j.logger.state.change.logger=INFO, stateChangeAppender
log4j.additivity.state.change.logger=false

# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses
log4j.logger.kafka.authorizer.logger=INFO, authorizerAppender
log4j.additivity.kafka.authorizer.logger=false

3 changes: 3 additions & 0 deletions .github/workflows/server.properties
Original file line number Diff line number Diff line change
Expand Up @@ -137,3 +137,6 @@ zookeeper.connection.timeout.ms=18000
# We override this to 0 here as it makes for a better out-of-the-box experience for development and testing.
# However, in production environments the default value of 3 seconds is more suitable as this will help to avoid unnecessary, and potentially expensive, rebalances during application startup.
group.initial.rebalance.delay.ms=0

#disable cleaner to avoid problems in Windows
log.cleaner.enable=false
6 changes: 3 additions & 3 deletions src/net/KEFCore/Storage/Internal/EntityTypeProducer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ public KNetCompactedReplicatorEnumerator(IEntityType entityType, IKNetCompactedR
_enumerator = _kafkaCompactedReplicator?.GetEnumerator();
}

ValueBuffer? _current = null;
ValueBuffer _current = ValueBuffer.Empty;

public ValueBuffer Current
{
Expand All @@ -93,7 +93,7 @@ public ValueBuffer Current
{
_currentSw.Start();
#endif
return _current ?? default;
return _current;
#if DEBUG_PERFORMANCE
}
finally
Expand Down Expand Up @@ -139,7 +139,7 @@ public bool MoveNext()
_current = new ValueBuffer(array);
return true;
}
_current = null;
_current = ValueBuffer.Empty;
return false;
#if DEBUG_PERFORMANCE
}
Expand Down
8 changes: 4 additions & 4 deletions src/net/KEFCore/Storage/Internal/KNetStreamsRetriever.cs
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,7 @@ public KafkaEnumerator(IKafkaCluster kafkaCluster, IEntityType entityType, KeyVa
#endif
}

ValueBuffer? _current = null;
ValueBuffer _current = ValueBuffer.Empty;

public ValueBuffer Current
{
Expand All @@ -355,7 +355,7 @@ public ValueBuffer Current
{
_currentSw.Start();
#endif
return _current ?? default;
return _current;
#if DEBUG_PERFORMANCE
}
finally
Expand Down Expand Up @@ -421,7 +421,7 @@ public bool MoveNext()

return true;
}
_current = null;
_current = ValueBuffer.Empty;
return false;
#if DEBUG_PERFORMANCE
}
Expand Down Expand Up @@ -471,7 +471,7 @@ public ValueTask<bool> MoveNextAsync()

return ValueTask.FromResult(true);
}
_current = null;
_current = ValueBuffer.Empty;
return ValueTask.FromResult(false);
#if DEBUG_PERFORMANCE
}
Expand Down
6 changes: 3 additions & 3 deletions src/net/KEFCore/Storage/Internal/KafkaStreamsBaseRetriever.cs
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ public KafkaEnumerator(IKafkaCluster kafkaCluster, IEntityType entityType, ISerD
#endif
}

ValueBuffer? _current = null;
ValueBuffer _current = ValueBuffer.Empty;

public ValueBuffer Current
{
Expand All @@ -319,7 +319,7 @@ public ValueBuffer Current
{
_currentSw.Start();
#endif
return _current ?? default;
return _current;
#if DEBUG_PERFORMANCE
}
finally
Expand Down Expand Up @@ -380,7 +380,7 @@ public bool MoveNext()

return true;
}
_current = null;
_current = ValueBuffer.Empty;
return false;
#if DEBUG_PERFORMANCE
}
Expand Down

0 comments on commit e401f4e

Please sign in to comment.