Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add send_batch method to kafka mod #324

Merged
merged 6 commits into from
Jan 3, 2025
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions crates/mod-kafka/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,6 @@ duration-serde = {path="../duration-serde"}
mlua = {workspace=true, features=["vendored", "lua54", "async", "send", "serialize"]}
rdkafka = {workspace=true}
serde = {workspace=true}
futures = {workspace=true}
tokio = {workspace=true, features=["full"]}
tracing = {workspace=true}
wez marked this conversation as resolved.
Show resolved Hide resolved
65 changes: 65 additions & 0 deletions crates/mod-kafka/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
use config::{any_err, get_or_create_sub_module};
use futures::stream::FuturesOrdered;
use futures::StreamExt;
use mlua::prelude::LuaUserData;
use mlua::{Lua, LuaSerdeExt, UserDataMethods, Value};
use rdkafka::message::{Header, OwnedHeaders};
Expand Down Expand Up @@ -94,6 +96,69 @@ impl LuaUserData for Producer {
Ok((partition, offset))
});

methods.add_async_method("send_batch", |lua, this, values: Vec<Value>| async move {
let mut tasks = FuturesOrdered::new();
let producer = this.get_producer()?;

for value in values {
let record: Record = lua.from_value(value)?;

let headers = if record.headers.is_empty() {
None
} else {
let mut headers = OwnedHeaders::new();
for (key, v) in &record.headers {
headers = headers.insert(Header {
key,
value: Some(v),
});
}
Some(headers)
};

let producer = producer.clone();

tasks.push_back(tokio::spawn(async move {
producer
.send(
FutureRecord {
topic: &record.topic,
partition: record.partition,
payload: record.payload.as_ref(),
key: record.key.as_ref(),
headers,
timestamp: None,
},
Timeout::After(record.timeout.unwrap_or(Duration::from_secs(60))),
)
.await
}));
}

let mut failed_indexes = vec![];
let mut index = 1;

while let Some(result) = tasks.next().await {
match result {
Ok(Ok(_)) => {}
Ok(Err((error, _msg))) => {
tracing::error!("Error sending to kafka {:?}", error);
failed_indexes.push(index);
}
Err(error) => {
tracing::error!("Error sending to kafka {:?}", error);
failed_indexes.push(index)
}
}
index += 1;
}
if failed_indexes.is_empty() {
Ok(Value::Nil)
} else {
Ok(lua.to_value(&failed_indexes)?)
}
wez marked this conversation as resolved.
Show resolved Hide resolved
});

methods.add_method("close", |_lua, this, _: ()| {
this.producer.lock().unwrap().take();
Ok(())
Expand Down
Loading