Skip to content

Commit

Permalink
[eclipse-iceoryx#532] Renamed DataSegment into PublisherBackend
Browse files Browse the repository at this point in the history
  • Loading branch information
elfenpiff committed Nov 28, 2024
1 parent bdfb8dc commit 0ef0a01
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 19 deletions.
16 changes: 8 additions & 8 deletions iceoryx2/src/port/publisher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -240,9 +240,9 @@ pub(crate) enum RemovePubSubPortFromAllConnectionsError {
}

#[derive(Debug)]
pub(crate) struct DataSegment<Service: service::Service> {
pub(crate) struct PublisherBackend<Service: service::Service> {
sample_reference_counter: Vec<IoxAtomicU64>,
memory: Service::SharedMemory,
data_segment: Service::SharedMemory,
payload_size: usize,
payload_type_layout: Layout,
port_id: UniquePublisherId,
Expand All @@ -257,7 +257,7 @@ pub(crate) struct DataSegment<Service: service::Service> {
is_active: IoxAtomicBool,
}

impl<Service: service::Service> DataSegment<Service> {
impl<Service: service::Service> PublisherBackend<Service> {
fn sample_index(&self, distance_to_chunk: usize) -> usize {
distance_to_chunk / self.payload_size
}
Expand All @@ -266,7 +266,7 @@ impl<Service: service::Service> DataSegment<Service> {
self.retrieve_returned_samples();

let msg = "Unable to allocate Sample";
let ptr = self.memory.allocate(layout)?;
let ptr = self.data_segment.allocate(layout)?;
if self.sample_reference_counter[self.sample_index(ptr.offset.offset())]
.fetch_add(1, Ordering::Relaxed)
!= 0
Expand All @@ -289,7 +289,7 @@ impl<Service: service::Service> DataSegment<Service> {
== 1
{
unsafe {
self.memory
self.data_segment
.deallocate(distance_to_chunk, self.payload_type_layout);
}
}
Expand Down Expand Up @@ -538,7 +538,7 @@ pub struct Publisher<
Payload: Debug + ?Sized + 'static,
UserHeader: Debug,
> {
pub(crate) data_segment: Arc<DataSegment<Service>>,
pub(crate) data_segment: Arc<PublisherBackend<Service>>,
dynamic_publisher_handle: Option<ContainerHandle>,
payload_size: usize,
_payload: PhantomData<Payload>,
Expand Down Expand Up @@ -596,9 +596,9 @@ impl<Service: service::Service, Payload: Debug + ?Sized, UserHeader: Debug>
"{} since the data segment could not be acquired.", msg);

let max_slice_len = config.initial_max_slice_len;
let data_segment = Arc::new(DataSegment {
let data_segment = Arc::new(PublisherBackend {
is_active: IoxAtomicBool::new(true),
memory: data_segment,
data_segment,
payload_size: static_config
.message_type_details()
.sample_layout(config.initial_max_slice_len)
Expand Down
14 changes: 8 additions & 6 deletions iceoryx2/src/sample_mut.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@
//! ```

use crate::{
port::publisher::{DataSegment, PublisherSendError},
port::publisher::{PublisherBackend, PublisherSendError},
raw_sample::RawSampleMut,
service::header::publish_subscribe::Header,
};
Expand All @@ -87,7 +87,7 @@ use std::{
/// Does not implement [`Send`] since it releases unsent samples in the [`crate::port::publisher::Publisher`] and the
/// [`crate::port::publisher::Publisher`] is not thread-safe!
pub struct SampleMut<Service: crate::service::Service, Payload: Debug + ?Sized, UserHeader> {
pub(crate) data_segment: Arc<DataSegment<Service>>,
pub(crate) publisher_backend: Arc<PublisherBackend<Service>>,
pub(crate) ptr: RawSampleMut<Header, UserHeader, Payload>,
pub(crate) offset_to_chunk: PointerOffset,
}
Expand All @@ -98,11 +98,11 @@ impl<Service: crate::service::Service, Payload: Debug + ?Sized, UserHeader> Debu
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"SampleMut<{}, {}, {}> {{ data_segment: {:?}, offset_to_chunk: {:?} }}",
"SampleMut<{}, {}, {}> {{ publisher_backend: {:?}, offset_to_chunk: {:?} }}",
core::any::type_name::<Payload>(),
core::any::type_name::<UserHeader>(),
core::any::type_name::<Service>(),
self.data_segment,
self.publisher_backend,
self.offset_to_chunk
)
}
Expand All @@ -112,7 +112,8 @@ impl<Service: crate::service::Service, Payload: Debug + ?Sized, UserHeader> Drop
for SampleMut<Service, Payload, UserHeader>
{
fn drop(&mut self) {
self.data_segment.return_loaned_sample(self.offset_to_chunk);
self.publisher_backend
.return_loaned_sample(self.offset_to_chunk);
}
}

Expand Down Expand Up @@ -288,6 +289,7 @@ impl<
/// # }
/// ```
pub fn send(self) -> Result<usize, PublisherSendError> {
self.data_segment.send_sample(self.offset_to_chunk.offset())
self.publisher_backend
.send_sample(self.offset_to_chunk.offset())
}
}
10 changes: 5 additions & 5 deletions iceoryx2/src/sample_mut_uninit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ use std::{fmt::Debug, mem::MaybeUninit, sync::Arc};
use iceoryx2_cal::shm_allocator::PointerOffset;

use crate::{
port::publisher::DataSegment, raw_sample::RawSampleMut, sample_mut::SampleMut,
port::publisher::PublisherBackend, raw_sample::RawSampleMut, sample_mut::SampleMut,
service::header::publish_subscribe::Header,
};

Expand Down Expand Up @@ -261,13 +261,13 @@ impl<Service: crate::service::Service, Payload: Debug, UserHeader>
SampleMutUninit<Service, MaybeUninit<Payload>, UserHeader>
{
pub(crate) fn new(
data_segment: &Arc<DataSegment<Service>>,
publisher_backend: &Arc<PublisherBackend<Service>>,
ptr: RawSampleMut<Header, UserHeader, MaybeUninit<Payload>>,
offset_to_chunk: PointerOffset,
) -> Self {
Self {
sample: SampleMut {
data_segment: Arc::clone(data_segment),
publisher_backend: Arc::clone(publisher_backend),
ptr,
offset_to_chunk,
},
Expand Down Expand Up @@ -341,13 +341,13 @@ impl<Service: crate::service::Service, Payload: Debug, UserHeader>
SampleMutUninit<Service, [MaybeUninit<Payload>], UserHeader>
{
pub(crate) fn new(
data_segment: &Arc<DataSegment<Service>>,
publisher_backend: &Arc<PublisherBackend<Service>>,
ptr: RawSampleMut<Header, UserHeader, [MaybeUninit<Payload>]>,
offset_to_chunk: PointerOffset,
) -> Self {
Self {
sample: SampleMut {
data_segment: Arc::clone(data_segment),
publisher_backend: Arc::clone(publisher_backend),
ptr,
offset_to_chunk,
},
Expand Down

0 comments on commit 0ef0a01

Please sign in to comment.