Skip to content

Commit

Permalink
Merge branch 'master' into build/check-proto-build-setup
Browse files Browse the repository at this point in the history
  • Loading branch information
jdsika authored Jan 15, 2024
2 parents 807ac1a + ec82b7c commit 03a9172
Show file tree
Hide file tree
Showing 20 changed files with 913 additions and 426 deletions.
86 changes: 57 additions & 29 deletions .github/workflows/protobuf.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
name: ProtoBuf CI Builds

env:
PROTOBUF_VERSION: 3.20.1
PROTOBUF_VARIANT: '-all' # Use '-all' prior to 22.0, '' after
ABSEIL_VERSION: 20230802.1

on:
push:
pull_request:
Expand All @@ -13,7 +18,7 @@ jobs:

steps:
- name: Checkout OSI
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
submodules: true

Expand All @@ -23,12 +28,17 @@ jobs:
( result=0 ; for f in *.proto ; do grep -q "'$f'" setup.py || { echo "Missing $f in setup.py" && let "result++"; } ; done ; exit $result )
- name: Setup Python
uses: actions/setup-python@v2
uses: actions/setup-python@v5
with:
python-version: '3.7'
python-version: '3.8'

- name: Install Python Dependencies
run: python -m pip install --upgrade pip setuptools wheel pyyaml
run: |
python -m pip install --upgrade pip
python -m pip install -r requirements_develop.txt
- name: Check black format
run: black --check --diff .

- name: Install Doxygen
run: sudo apt-get install doxygen graphviz
Expand All @@ -37,20 +47,29 @@ jobs:
id: cache-depends
uses: actions/cache@v3
with:
path: protobuf-3.20.1
path: protobuf-${{ env.PROTOBUF_VERSION }}
key: ${{ runner.os }}-v2-depends

- name: Download ProtoBuf
- name: Download ProtoBuf ${{ env.PROTOBUF_VERSION }}
if: steps.cache-depends.outputs.cache-hit != 'true'
run: curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v3.20.1/protobuf-all-3.20.1.tar.gz && tar xzvf protobuf-all-3.20.1.tar.gz
run: curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v${{env.PROTOBUF_VERSION}}/protobuf${{env.PROTOBUF_VARIANT}}-${{env.PROTOBUF_VERSION}}.tar.gz && tar xzvf protobuf${{env.PROTOBUF_VARIANT}}-${{env.PROTOBUF_VERSION}}.tar.gz

- name: Build ProtoBuf
if: steps.cache-depends.outputs.cache-hit != 'true'
working-directory: protobuf-3.20.1
- name: Download Abseil ${{ env.ABSEIL_VERSION }}
if: steps.cache-depends.outputs.cache-hit != 'true' && env.PROTOBUF_VARIANT == ''
run: curl -OL https://github.com/abseil/abseil-cpp/archive/refs/tags/${{env.ABSEIL_VERSION}}.tar.gz && tar xzvf ${{env.ABSEIL_VERSION}}.tar.gz && rm -rf protobuf-${{env.PROTOBUF_VERSION}}/third_party/abseil-cpp && mv abseil-cpp-${{env.ABSEIL_VERSION}} protobuf-${{env.PROTOBUF_VERSION}}/third_party/abseil-cpp

- name: Build ProtoBuf ${{ env.PROTOBUF_VERSION }} via autotools
if: steps.cache-depends.outputs.cache-hit != 'true' && env.PROTOBUF_VARIANT == '-all'
working-directory: protobuf-${{ env.PROTOBUF_VERSION }}
run: ./configure DIST_LANG=cpp --prefix=/usr && make

- name: Install ProtoBuf
working-directory: protobuf-3.20.1
- name: Build ProtoBuf ${{ env.PROTOBUF_VERSION }} via cmake
if: steps.cache-depends.outputs.cache-hit != 'true' && env.PROTOBUF_VARIANT == ''
working-directory: protobuf-${{ env.PROTOBUF_VERSION }}
run: cmake -DCMAKE_CXX_STANDARD=17 -Dprotobuf_BUILD_SHARED_LIBS=ON -Dprotobuf_BUILD_TESTS=OFF . && cmake --build . --config Release -j 4

- name: Install ProtoBuf ${{ env.PROTOBUF_VERSION }}
working-directory: protobuf-${{ env.PROTOBUF_VERSION }}
run: sudo make install && sudo ldconfig

- name: Install proto2cpp
Expand All @@ -62,17 +81,17 @@ jobs:
# Versioning
- name: Get versioning
id: get_version
run: echo ::set-output name=VERSION::$(git describe --always)
run: echo "VERSION=$(git describe --always)" >> $GITHUB_OUTPUT

- name: Prepare Documentation Build
run: |
sed -i 's/PROJECT_NUMBER\s*= @VERSION_MAJOR@.@VERSION_MINOR@.@VERSION_PATCH@/PROJECT_NUMBER = master (${{ steps.get_version.outputs.VERSION }})/g' doxygen_config.cmake.in
echo "EXCLUDE_PATTERNS = */osi3/* */protobuf-3.20.1/* */proto2cpp/* */flatbuffers/*" >> doxygen_config.cmake.in
echo "EXCLUDE_PATTERNS = */osi3/* */protobuf-*/* */proto2cpp/* */flatbuffers/*" >> doxygen_config.cmake.in
echo "GENERATE_TREEVIEW = YES" >> doxygen_config.cmake.in
- name: Configure C++ Build
working-directory: build
run: cmake -D FILTER_PROTO2CPP_PY_PATH=$GITHUB_WORKSPACE/proto2cpp ..
run: cmake -D FILTER_PROTO2CPP_PY_PATH=$GITHUB_WORKSPACE/proto2cpp ${{ env.PROTOBUF_VARIANT =='' && '-DCMAKE_CXX_STANDARD=17' }} ..

- name: Build C++
working-directory: build
Expand All @@ -89,7 +108,7 @@ jobs:

- name: Archive Documentation
if: ${{ github.event_name == 'pull_request' }}
uses: actions/upload-artifact@v2
uses: actions/upload-artifact@v4
with:
name: linux64-doc
path: doc/html
Expand All @@ -109,36 +128,45 @@ jobs:

steps:
- name: Checkout OSI
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
submodules: true

- name: Setup Python
uses: actions/setup-python@v2
uses: actions/setup-python@v5
with:
python-version: '3.7'
python-version: '3.8'

- name: Install Python Dependencies
run: python -m pip install --upgrade pip setuptools wheel pyyaml

- name: Cache Dependencies
id: cache-depends
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: protobuf-3.20.1
path: protobuf-${{ env.PROTOBUF_VERSION }}
key: ${{ runner.os }}-v2-depends

- name: Download ProtoBuf
- name: Download ProtoBuf ${{ env.PROTOBUF_VERSION }}
if: steps.cache-depends.outputs.cache-hit != 'true'
run: curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v3.20.1/protobuf-all-3.20.1.tar.gz && tar xzvf protobuf-all-3.20.1.tar.gz
run: curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v${{env.PROTOBUF_VERSION}}/protobuf${{env.PROTOBUF_VARIANT}}-${{env.PROTOBUF_VERSION}}.tar.gz && tar xzvf protobuf${{env.PROTOBUF_VARIANT}}-${{env.PROTOBUF_VERSION}}.tar.gz

- name: Build ProtoBuf
if: steps.cache-depends.outputs.cache-hit != 'true'
working-directory: protobuf-3.20.1
- name: Download Abseil ${{ env.ABSEIL_VERSION }}
if: steps.cache-depends.outputs.cache-hit != 'true' && env.PROTOBUF_VARIANT == ''
run: curl -OL https://github.com/abseil/abseil-cpp/archive/refs/tags/${{env.ABSEIL_VERSION}}.tar.gz && tar xzvf ${{env.ABSEIL_VERSION}}.tar.gz && rm -rf protobuf-${{env.PROTOBUF_VERSION}}/third_party/abseil-cpp && mv abseil-cpp-${{env.ABSEIL_VERSION}} protobuf-${{env.PROTOBUF_VERSION}}/third_party/abseil-cpp

- name: Build ProtoBuf ${{ env.PROTOBUF_VERSION }} via autotools
if: steps.cache-depends.outputs.cache-hit != 'true' && env.PROTOBUF_VARIANT == '-all'
working-directory: protobuf-${{ env.PROTOBUF_VERSION }}
run: ./configure DIST_LANG=cpp --prefix=/usr && make

- name: Install ProtoBuf
working-directory: protobuf-3.20.1
- name: Build ProtoBuf ${{ env.PROTOBUF_VERSION }} via cmake
if: steps.cache-depends.outputs.cache-hit != 'true' && env.PROTOBUF_VARIANT == ''
working-directory: protobuf-${{ env.PROTOBUF_VERSION }}
run: cmake -DCMAKE_CXX_STANDARD=17 -Dprotobuf_BUILD_SHARED_LIBS=ON -Dprotobuf_BUILD_TESTS=OFF . && cmake --build . --config Release -j 4

- name: Install ProtoBuf ${{ env.PROTOBUF_VERSION }}
working-directory: protobuf-${{ env.PROTOBUF_VERSION }}
run: sudo make install && sudo ldconfig

- name: Prepare C++ Build
Expand All @@ -151,7 +179,7 @@ jobs:
- name: Configure C++ Build
working-directory: build
run: cmake ..
run: cmake ${{ env.PROTOBUF_VARIANT =='' && '-DCMAKE_CXX_STANDARD=17' }} ..

- name: Build C++
working-directory: build
Expand Down
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ compile_commands.json

# Python-generated files
__pycache__/
.venv/
venv/
*.py[cod]
proto2cpp.log
.clang-format
Expand Down
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR)
endif()

# Set the C++ standard
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD 11 CACHE STRING "C++ standard to be used")
set(CMAKE_CXX_STANDARD_REQUIRED ON)

# Optional Flatbuffer support
Expand Down
73 changes: 44 additions & 29 deletions format/OSITrace.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,10 @@
from osi3.osi_groundtruth_pb2 import GroundTruth
from osi3.osi_sensordata_pb2 import SensorData
import warnings
warnings.simplefilter('default')

SEPARATOR = b'$$__$$'
warnings.simplefilter("default")

SEPARATOR = b"$$__$$"
SEPARATOR_LENGTH = len(SEPARATOR)
BUFFER_SIZE = 1000000

Expand All @@ -31,7 +32,7 @@ def get_size_from_file_stream(file_object):
MESSAGES_TYPE = {
"SensorView": SensorView,
"GroundTruth": GroundTruth,
"SensorData": SensorData
"SensorData": SensorData,
}


Expand All @@ -49,15 +50,15 @@ def __init__(self, path=None, type_name="SensorView"):
def from_file(self, path, type_name="SensorView", max_index=-1, format_type=None):
"""Import a scenario from a file"""

if path.lower().endswith(('.lzma', '.xz')):
if path.lower().endswith((".lzma", ".xz")):
self.scenario_file = lzma.open(path, "rb")
else:
self.scenario_file = open(path, "rb")

self.type_name = type_name
self.format_type = format_type

if self.format_type == 'separated':
if self.format_type == "separated":
# warnings.warn("The separated trace files will be completely removed in the near future. Please convert them to *.osi files with the converter in the main OSI repository.", PendingDeprecationWarning)
self.timestep_count = self.retrieve_message_offsets(max_index)
else:
Expand All @@ -73,7 +74,7 @@ def retrieve_message_offsets(self, max_index):
scenario_size = get_size_from_file_stream(self.scenario_file)

if max_index == -1:
max_index = float('inf')
max_index = float("inf")

buffer_deque = deque(maxlen=2)

Expand All @@ -100,7 +101,7 @@ def retrieve_message_offsets(self, max_index):
self.scenario_file.seek(message_offset)

while eof and found != -1:
buffer = buffer[found + SEPARATOR_LENGTH:]
buffer = buffer[found + SEPARATOR_LENGTH :]
found = buffer.find(SEPARATOR)

buffer_offset = scenario_size - len(buffer)
Expand All @@ -126,7 +127,7 @@ def retrieve_message(self):
self.message_offsets = [0]
eof = False

# TODO Implement buffering for the scenarios
# TODO Implement buffering for the scenarios
self.scenario_file.seek(0)
serialized_message = self.scenario_file.read()
INT_LENGTH = len(struct.pack("<L", 0))
Expand All @@ -135,8 +136,12 @@ def retrieve_message(self):
i = 0
while i < len(serialized_message):
message = MESSAGES_TYPE[self.type_name]()
message_length = struct.unpack("<L", serialized_message[i:INT_LENGTH+i])[0]
message.ParseFromString(serialized_message[i+INT_LENGTH:i+INT_LENGTH+message_length])
message_length = struct.unpack(
"<L", serialized_message[i : INT_LENGTH + i]
)[0]
message.ParseFromString(
serialized_message[i + INT_LENGTH : i + INT_LENGTH + message_length]
)
i += message_length + INT_LENGTH
self.message_offsets.append(i)

Expand All @@ -153,7 +158,7 @@ def get_message_by_index(self, index):
Get a message by its index. Try first to get it from the cache made
by the method ``cache_messages_in_index_range``.
"""
return next(self.get_messages_in_index_range(index, index+1))
return next(self.get_messages_in_index_range(index, index + 1))

def get_messages(self):
return self.get_messages_in_index_range(0, len(self.message_offsets))
Expand All @@ -164,26 +169,28 @@ def get_messages_in_index_range(self, begin, end):
"""
self.scenario_file.seek(self.message_offsets[begin])
abs_first_offset = self.message_offsets[begin]
abs_last_offset = self.message_offsets[end] \
if end < len(self.message_offsets) \
abs_last_offset = (
self.message_offsets[end]
if end < len(self.message_offsets)
else self.retrieved_scenario_size
)

rel_message_offsets = [
abs_message_offset - abs_first_offset
for abs_message_offset in self.message_offsets[begin:end]
]

if self.format_type == "separated":
message_sequence_len = abs_last_offset - \
abs_first_offset - SEPARATOR_LENGTH
serialized_messages_extract = self.scenario_file.read(
message_sequence_len)
message_sequence_len = abs_last_offset - abs_first_offset - SEPARATOR_LENGTH
serialized_messages_extract = self.scenario_file.read(message_sequence_len)

for rel_index, rel_message_offset in enumerate(rel_message_offsets):
rel_begin = rel_message_offset
rel_end = rel_message_offsets[rel_index + 1] - SEPARATOR_LENGTH \
if rel_index + 1 < len(rel_message_offsets) \
rel_end = (
rel_message_offsets[rel_index + 1] - SEPARATOR_LENGTH
if rel_index + 1 < len(rel_message_offsets)
else message_sequence_len
)
message = MESSAGES_TYPE[self.type_name]()
serialized_message = serialized_messages_extract[rel_begin:rel_end]
message.ParseFromString(serialized_message)
Expand Down Expand Up @@ -212,27 +219,35 @@ def get_messages_in_index_range(self, begin, end):

def make_readable(self, name, interval=None, index=None):
self.scenario_file.seek(0)
serialized_message = self.scenario_file.read()
serialized_message = self.scenario_file.read()
message_length = len(serialized_message)

if message_length > 1000000000:
# Throw a warning if trace file is bigger than 1GB
gb_size_input = round(message_length/1000000000, 2)
gb_size_output = round(3.307692308*message_length/1000000000, 2)
warnings.warn(f"The trace file you are trying to make readable has the size {gb_size_input}GB. This will generate a readable file with the size {gb_size_output}GB. Make sure you have enough disc space and memory to read the file with your text editor.", ResourceWarning)

with open(name, 'a') as f:

gb_size_input = round(message_length / 1000000000, 2)
gb_size_output = round(3.307692308 * message_length / 1000000000, 2)
warnings.warn(
f"The trace file you are trying to make readable has the size {gb_size_input}GB. This will generate a readable file with the size {gb_size_output}GB. Make sure you have enough disc space and memory to read the file with your text editor.",
ResourceWarning,
)

with open(name, "a") as f:
if interval is None and index is None:
for i in self.get_messages():
f.write(str(i))

if interval is not None and index is None:
if type(interval) == tuple and len(interval) == 2 and interval[0]<interval[1]:
if (
type(interval) == tuple
and len(interval) == 2
and interval[0] < interval[1]
):
for i in self.get_messages_in_index_range(interval[0], interval[1]):
f.write(str(i))
else:
raise Exception("Argument 'interval' needs to be a tuple of length 2! The first number must be smaller then the second.")
raise Exception(
"Argument 'interval' needs to be a tuple of length 2! The first number must be smaller then the second."
)

if interval is None and index is not None:
if type(index) == int:
Expand Down
Loading

0 comments on commit 03a9172

Please sign in to comment.