Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add md5 function #29

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 33 additions & 2 deletions levi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import pyarrow as pa
from pyarrow.interchange.from_dataframe import DataFrameObject
import pyarrow.compute as pc

from hashlib import md5

def skipped_stats(delta_table, filters):
df = delta_table.get_add_actions(flatten=True).to_pandas()
Expand Down Expand Up @@ -423,4 +423,35 @@ def drop_duplicates_pkey(delta_table: DeltaTable, primary_key: str, duplication_
delta_table,
deduped_pyarrow_table,
mode="overwrite"
)
)


def append_md5_column(delta_table: DeltaTable, cols: List[str]) -> None:
"""
<description>

:param delta_table: <description>
:type delta_table: DeltaTable
:param cols: <description>
:type cols: List[str]

:returns: <description>
:rtype: None
"""
col_name = "md5_" + "_".join(cols)

table = delta_table.to_pyarrow_table()
tmp_table = table.append_column(
"levi_tmp_concat_col",
pc.binary_join_element_wise( # type: ignore
*[pc.cast(table[col], pa.string()) for col in cols],
"||",
),
)
concat_values = tmp_table.select(["levi_tmp_concat_col"]).to_pylist()
md5_hash_values = [
md5(i["levi_tmp_concat_col"].encode("utf-8")).hexdigest() for i in concat_values
]
md5_table = table.append_column(col_name, [md5_hash_values])

write_deltalake(delta_table, md5_table, mode="overwrite", overwrite_schema=True)
215 changes: 214 additions & 1 deletion tests/test_public_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import pandas as pd
import random
import pytest

import hashlib

def test_skipped_stats():
delta_table = DeltaTable("./tests/reader_tests/generated/basic_append/delta")
Expand Down Expand Up @@ -942,3 +942,216 @@ def test_drop_duplicates_pkey_raises_errors(tmp_path):
levi.drop_duplicates_pkey(delta_table, "col1", "col2") # Wrong duplication_cols type
levi.drop_duplicates_pkey(delta_table, 1, ["col1","col2"]) # Wrong primary_key type provided


def test_append_md5_generates_expected_hashes_int_string(tmp_path: Path):
path = tmp_path / "append_md5_int_string"

initial_schema = pa.schema(
[
("col1", pa.int64()),
("col2", pa.string()),
("col3", pa.string()),
("col4", pa.string()),
]
)
expected_schema = pa.schema(
[
("col1", pa.int64()),
("col2", pa.string()),
("col3", pa.string()),
("col4", pa.string()),
("md5_col1_col2", pa.string())
]
)

initial_data = {
"col1": [1, 2, 3, 4, 5, 6, 9],
"col2": ["A", "A", "A", "A", "B", "D", "B"],
"col3": ["A", "B", "A", "A", "B", "D", "B"],
"col4": ["C", "C", "D", "E", "C", "C", "E"],
}

n_rows = len(initial_data["col1"])
expected_data = initial_data
expected_md5_values = []
for i in range(n_rows):
concat_val = str(initial_data["col1"][i]) + "||" + str(initial_data['col2'][i])
hash_val = hashlib.md5(concat_val.encode("utf-8")).hexdigest()
expected_md5_values.append(hash_val)

expected_data["md5_col1_col2"] = expected_md5_values

pyarrow_table = pa.Table.from_pydict(initial_data, schema=initial_schema)
write_deltalake(path, pyarrow_table)

delta_table = DeltaTable(path)
levi.append_md5_column(delta_table, ["col1", "col2"])

actual_delta_table = DeltaTable(path)
actual_pyarrow_table = actual_delta_table.to_pyarrow_table()
expected_pyarrow_table = pa.Table.from_pydict(
expected_data,
schema=expected_schema,
)

assert actual_pyarrow_table == expected_pyarrow_table

def test_append_md5_generates_expected_hashes_string_string(tmp_path: Path):
path = tmp_path / "append_md5_string_string"

initial_schema = pa.schema(
[
("col1", pa.int64()),
("col2", pa.string()),
("col3", pa.string()),
("col4", pa.string()),
]
)
expected_schema = pa.schema(
[
("col1", pa.int64()),
("col2", pa.string()),
("col3", pa.string()),
("col4", pa.string()),
("md5_col3_col4", pa.string())
]
)

initial_data = {
"col1": [1, 2, 3, 4, 5, 6, 9],
"col2": ["A", "A", "A", "A", "B", "D", "B"],
"col3": ["A", "B", "A", "A", "B", "D", "B"],
"col4": ["C", "C", "D", "E", "C", "C", "E"],
}

n_rows = len(initial_data["col1"])
expected_data = initial_data
expected_md5_values = []
for i in range(n_rows):
concat_val = str(initial_data["col3"][i]) + "||" + str(initial_data['col4'][i])
hash_val = hashlib.md5(concat_val.encode("utf-8")).hexdigest()
expected_md5_values.append(hash_val)

expected_data["md5_col3_col4"] = expected_md5_values

pyarrow_table = pa.Table.from_pydict(initial_data, schema=initial_schema)
write_deltalake(path, pyarrow_table)

delta_table = DeltaTable(path)
levi.append_md5_column(delta_table, ["col3", "col4"])

actual_delta_table = DeltaTable(path)
actual_pyarrow_table = actual_delta_table.to_pyarrow_table()
expected_pyarrow_table = pa.Table.from_pydict(
expected_data,
schema=expected_schema,
)

assert actual_pyarrow_table == expected_pyarrow_table

def test_append_md5_generates_expected_hashes_string_3(tmp_path: Path):
path = tmp_path / "append_md5_string3"

initial_schema = pa.schema(
[
("col1", pa.int64()),
("col2", pa.string()),
("col3", pa.string()),
("col4", pa.string()),
]
)
expected_schema = pa.schema(
[
("col1", pa.int64()),
("col2", pa.string()),
("col3", pa.string()),
("col4", pa.string()),
("md5_col2_col3_col4", pa.string())
]
)

initial_data = {
"col1": [1, 2, 3, 4, 5, 6, 9],
"col2": ["A", "A", "A", "A", "B", "D", "B"],
"col3": ["A", "B", "A", "A", "B", "D", "B"],
"col4": ["C", "C", "D", "E", "C", "C", "E"],
}

n_rows = len(initial_data["col1"])
expected_data = initial_data
expected_md5_values = []
for i in range(n_rows):
concat_val = str(initial_data["col2"][i]) + "||" + str(initial_data["col3"][i]) + "||" + str(initial_data['col4'][i])
hash_val = hashlib.md5(concat_val.encode("utf-8")).hexdigest()
expected_md5_values.append(hash_val)

expected_data["md5_col2_col3_col4"] = expected_md5_values

pyarrow_table = pa.Table.from_pydict(initial_data, schema=initial_schema)
write_deltalake(path, pyarrow_table)

delta_table = DeltaTable(path)
levi.append_md5_column(delta_table, ["col2", "col3", "col4"])

actual_delta_table = DeltaTable(path)
actual_pyarrow_table = actual_delta_table.to_pyarrow_table()
expected_pyarrow_table = pa.Table.from_pydict(
expected_data,
schema=expected_schema,
)

assert actual_pyarrow_table == expected_pyarrow_table


def test_append_md5_generates_expected_hashes_string(tmp_path: Path):
path = tmp_path / "append_md5_string"

initial_schema = pa.schema(
[
("col1", pa.int64()),
("col2", pa.string()),
("col3", pa.string()),
("col4", pa.string()),
]
)
expected_schema = pa.schema(
[
("col1", pa.int64()),
("col2", pa.string()),
("col3", pa.string()),
("col4", pa.string()),
("md5_col2", pa.string())
]
)

initial_data = {
"col1": [1, 2, 3, 4, 5, 6, 9],
"col2": ["A", "A", "A", "A", "B", "D", "B"],
"col3": ["A", "B", "A", "A", "B", "D", "B"],
"col4": ["C", "C", "D", "E", "C", "C", "E"],
}

n_rows = len(initial_data["col1"])
expected_data = initial_data
expected_md5_values = []
for i in range(n_rows):
concat_val = str(initial_data["col2"][i])
hash_val = hashlib.md5(concat_val.encode("utf-8")).hexdigest()
expected_md5_values.append(hash_val)

expected_data["md5_col2"] = expected_md5_values

pyarrow_table = pa.Table.from_pydict(initial_data, schema=initial_schema)
write_deltalake(path, pyarrow_table)

delta_table = DeltaTable(path)
levi.append_md5_column(delta_table, ["col2"])

actual_delta_table = DeltaTable(path)
actual_pyarrow_table = actual_delta_table.to_pyarrow_table()
expected_pyarrow_table = pa.Table.from_pydict(
expected_data,
schema=expected_schema,
)

assert actual_pyarrow_table == expected_pyarrow_table