Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support fit_partial() for LightFM #223

Merged
merged 6 commits into from
Dec 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- `load_model` function ([#213](https://github.com/MobileTeleSystems/RecTools/pull/213))
- `model_from_config` function ([#214](https://github.com/MobileTeleSystems/RecTools/pull/214))
- `get_cat_features` method to `SparseFeatures` ([#221](https://github.com/MobileTeleSystems/RecTools/pull/221))
- Support `fit_partial()` for LightFM ([#223](https://github.com/MobileTeleSystems/RecTools/pull/223))
chezou marked this conversation as resolved.
Show resolved Hide resolved
- LightFM Python 3.12+ support ([#224](https://github.com/MobileTeleSystems/RecTools/pull/224))

### Removed
Expand Down
11 changes: 8 additions & 3 deletions rectools/models/lightfm.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,20 +164,25 @@ def _from_config(cls, config: LightFMWrapperModelConfig) -> tpe.Self:
model = model_cls(**params)
return cls(model=model, epochs=config.epochs, num_threads=config.num_threads, verbose=config.verbose)

def _fit(self, dataset: Dataset) -> None: # type: ignore
def _fit(self, dataset: Dataset) -> None:
self.model = deepcopy(self._model)
self._fit_partial(dataset, self.n_epochs)

def _fit_partial(self, dataset: Dataset, epochs: int) -> None:
chezou marked this conversation as resolved.
Show resolved Hide resolved
if not self.is_fitted:
self.model = deepcopy(self._model)

ui_coo = dataset.get_user_item_matrix(include_weights=True).tocoo(copy=False)
user_features = self._prepare_features(dataset.get_hot_user_features(), dataset.n_hot_users)
item_features = self._prepare_features(dataset.get_hot_item_features(), dataset.n_hot_items)
sample_weight = None if self._model.loss == "warp-kos" else ui_coo

self.model.fit(
self.model.fit_partial(
ui_coo,
user_features=user_features,
item_features=item_features,
sample_weight=sample_weight,
epochs=self.n_epochs,
epochs=epochs,
num_threads=self.n_threads,
verbose=self.verbose > 0,
)
Expand Down
27 changes: 27 additions & 0 deletions tests/models/test_lightfm.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,6 +323,33 @@ def test_second_fit_refits_model(self, dataset: Dataset) -> None:
model = LightFMWrapperModel(model=base_model, epochs=5, num_threads=1)
assert_second_fit_refits_model(model, dataset)

@pytest.mark.parametrize("loss", ("logistic", "bpr", "warp"))
@pytest.mark.parametrize("use_features_in_dataset", (False, True))
chezou marked this conversation as resolved.
Show resolved Hide resolved
def test_per_epoch_partial_fit_consistent_with_regular_fit(
self,
dataset: Dataset,
dataset_with_features: Dataset,
use_features_in_dataset: bool,
loss: str,
) -> None:
if use_features_in_dataset:
dataset = dataset_with_features

epochs = 20

base_model_1 = LightFM(no_components=2, loss=loss, random_state=1)
model_1 = LightFMWrapperModel(model=base_model_1, epochs=epochs, num_threads=1).fit(dataset)
chezou marked this conversation as resolved.
Show resolved Hide resolved

base_model_2 = LightFM(no_components=2, loss=loss, random_state=1)
model_2 = LightFMWrapperModel(model=base_model_2, epochs=epochs, num_threads=1)
for _ in range(epochs):
model_2.fit_partial(dataset, epochs=1)

assert np.allclose(model_1.model.item_biases, model_2.model.item_biases)
chezou marked this conversation as resolved.
Show resolved Hide resolved
assert np.allclose(model_1.model.user_biases, model_2.model.user_biases)
assert np.allclose(model_1.model.item_embeddings, model_2.model.item_embeddings)
assert np.allclose(model_1.model.user_embeddings, model_2.model.user_embeddings)

def test_fail_when_getting_cold_reco_with_no_biases(self, dataset: Dataset) -> None:
class NoBiasesLightFMWrapperModel(LightFMWrapperModel):
def _get_items_factors(self, dataset: Dataset) -> Factors:
Expand Down