Skip to content

Commit

Permalink
Added legacy mongodb handler for migration
Browse files Browse the repository at this point in the history
  • Loading branch information
WillFP committed Aug 26, 2024
1 parent a11815a commit 19fc168
Show file tree
Hide file tree
Showing 7 changed files with 198 additions and 38 deletions.
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
package com.willfp.eco.core.data.handlers;

import com.willfp.eco.core.Eco;
import com.willfp.eco.core.data.keys.PersistentDataKey;
import com.willfp.eco.core.registry.Registrable;
import org.bukkit.Bukkit;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;

import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
Expand Down Expand Up @@ -43,10 +40,12 @@ protected PersistentDataHandler(@NotNull final String id) {

/**
* Get all UUIDs with saved data.
* <p>
* This is a blocking operation.
*
* @return All saved UUIDs.
*/
protected abstract Set<UUID> getSavedUUIDs();
public abstract Set<UUID> getSavedUUIDs();

/**
* Save to disk.
Expand Down Expand Up @@ -113,30 +112,26 @@ public final <T> void write(@NotNull final UUID uuid,
}

/**
* Serialize data.
* Serialize profile.
*
* @param uuid The uuid to serialize.
* @param keys The keys to serialize.
* @return The serialized data.
*/
@NotNull
public final Set<SerializedProfile> serializeData(@NotNull final Set<PersistentDataKey<?>> keys) {
Set<SerializedProfile> profiles = new HashSet<>();

for (UUID uuid : getSavedUUIDs()) {
Map<PersistentDataKey<?>, Object> data = new HashMap<>();
public final SerializedProfile serializeProfile(@NotNull final UUID uuid,
@NotNull final Set<PersistentDataKey<?>> keys) {
Map<PersistentDataKey<?>, Object> data = new HashMap<>();

for (PersistentDataKey<?> key : keys) {
Object value = read(uuid, key);
for (PersistentDataKey<?> key : keys) {
Object value = read(uuid, key);

if (value != null) {
data.put(key, value);
}
if (value != null) {
data.put(key, value);
}

profiles.add(new SerializedProfile(uuid, data));
}

return profiles;
return new SerializedProfile(uuid, data);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import com.willfp.eco.core.data.handlers.PersistentDataHandler
import com.willfp.eco.core.registry.KRegistrable
import com.willfp.eco.core.registry.Registry
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.data.handlers.impl.MongoPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.MongoDBPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.MySQLPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.YamlPersistentDataHandler

Expand All @@ -26,9 +26,15 @@ object PersistentDataHandlers: Registry<PersistentDataHandlerFactory>() {
MySQLPersistentDataHandler(plugin.configYml.getSubsection("mysql"))
})

register(object : PersistentDataHandlerFactory("mongodb") {
override fun create(plugin: EcoSpigotPlugin) =
MongoDBPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
})

// Configs should also accept "mongo"
register(object : PersistentDataHandlerFactory("mongo") {
override fun create(plugin: EcoSpigotPlugin) =
MongoPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
MongoDBPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
})
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
package com.willfp.eco.internal.spigot.data.handlers.impl

import com.mongodb.MongoClientSettings
import com.mongodb.client.model.Filters
import com.mongodb.kotlin.client.coroutine.MongoClient
import com.willfp.eco.core.config.Configs
import com.willfp.eco.core.config.interfaces.Config
import com.willfp.eco.core.data.handlers.DataTypeSerializer
import com.willfp.eco.core.data.handlers.PersistentDataHandler
import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.core.data.keys.PersistentDataKeyType
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlerFactory
import kotlinx.coroutines.flow.firstOrNull
import kotlinx.coroutines.flow.toList
import kotlinx.coroutines.runBlocking
import org.bson.BsonArray
import org.bson.BsonBoolean
import org.bson.BsonDecimal128
import org.bson.BsonDocument
import org.bson.BsonDouble
import org.bson.BsonInt32
import org.bson.BsonString
import org.bson.BsonValue
import org.bson.codecs.configuration.CodecRegistries
import org.bson.codecs.pojo.PojoCodecProvider
import java.math.BigDecimal
import java.util.UUID

class LegacyMongoDBPersistentDataHandler(
config: Config
) : PersistentDataHandler("legacy_mongodb") {
private val codecRegistry = CodecRegistries.fromRegistries(
MongoClientSettings.getDefaultCodecRegistry(),
CodecRegistries.fromProviders(PojoCodecProvider.builder().automatic(true).build())
)

private val client = MongoClient.create(config.getString("url"))
private val database = client.getDatabase(config.getString("database"))

private val collection = database.getCollection<BsonDocument>("uuidprofile")
.withCodecRegistry(codecRegistry)

init {
PersistentDataKeyType.STRING.registerSerializer(this, object : LegacyMongoSerializer<String>() {
override fun deserialize(value: BsonValue): String {
return value.asString().value
}
})

PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : LegacyMongoSerializer<Boolean>() {
override fun deserialize(value: BsonValue): Boolean {
return value.asBoolean().value
}
})

PersistentDataKeyType.INT.registerSerializer(this, object : LegacyMongoSerializer<Int>() {
override fun deserialize(value: BsonValue): Int {
return value.asInt32().value
}
})

PersistentDataKeyType.DOUBLE.registerSerializer(this, object : LegacyMongoSerializer<Double>() {
override fun deserialize(value: BsonValue): Double {
return value.asDouble().value
}
})

PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : LegacyMongoSerializer<List<String>>() {
override fun deserialize(value: BsonValue): List<String> {
return value.asArray().values.map { it.asString().value }
}
})

PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : LegacyMongoSerializer<BigDecimal>() {
override fun deserialize(value: BsonValue): BigDecimal {
return value.asDecimal128().value.bigDecimalValue()
}
})

PersistentDataKeyType.CONFIG.registerSerializer(this, object : LegacyMongoSerializer<Config>() {
private fun deserializeConfigValue(value: BsonValue): Any {
return when (value) {
is BsonString -> value.value
is BsonInt32 -> value.value
is BsonDouble -> value.value
is BsonBoolean -> value.value
is BsonDecimal128 -> value.value.bigDecimalValue()
is BsonArray -> value.values.map { deserializeConfigValue(it) }
is BsonDocument -> value.mapValues { (_, v) -> deserializeConfigValue(v) }

else -> throw IllegalArgumentException("Could not deserialize config value type ${value::class.simpleName}")
}
}

override fun deserialize(value: BsonValue): Config {
@Suppress("UNCHECKED_CAST")
return Configs.fromMap(deserializeConfigValue(value.asDocument()) as Map<String, Any>)
}
})
}

override fun getSavedUUIDs(): Set<UUID> {
return runBlocking {
collection.find().toList().map {
UUID.fromString(it.getString("_id").value)
}.toSet()
}
}

private abstract inner class LegacyMongoSerializer<T : Any> : DataTypeSerializer<T>() {
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
return runBlocking {
val filter = Filters.eq("_id", uuid.toString())

val profile = collection.find(filter)
.firstOrNull() ?: return@runBlocking null

val dataMap = profile.getDocument("data")
val value = dataMap[key.key.toString()] ?: return@runBlocking null

try {
return@runBlocking deserialize(value)
} catch (e: Exception) {
null
}
}
}

override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
throw UnsupportedOperationException("Legacy Mongo does not support writing")
}

protected abstract fun deserialize(value: BsonValue): T
}

object Factory: PersistentDataHandlerFactory("legacy_mongo") {
override fun create(plugin: EcoSpigotPlugin): PersistentDataHandler {
return LegacyMongoDBPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,13 @@ class LegacyMySQLPersistentDataHandler(
SchemaUtils.create(table)
}

PersistentDataKeyType.STRING.registerSerializer(this, LegacySerializer<String>())
PersistentDataKeyType.BOOLEAN.registerSerializer(this, LegacySerializer<Boolean>())
PersistentDataKeyType.INT.registerSerializer(this, LegacySerializer<Int>())
PersistentDataKeyType.DOUBLE.registerSerializer(this, LegacySerializer<Double>())
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, LegacySerializer<BigDecimal>())
PersistentDataKeyType.CONFIG.registerSerializer(this, LegacySerializer<Config>())
PersistentDataKeyType.STRING_LIST.registerSerializer(this, LegacySerializer<List<String>>())
PersistentDataKeyType.STRING.registerSerializer(this, LegacyMySQLSerializer<String>())
PersistentDataKeyType.BOOLEAN.registerSerializer(this, LegacyMySQLSerializer<Boolean>())
PersistentDataKeyType.INT.registerSerializer(this, LegacyMySQLSerializer<Int>())
PersistentDataKeyType.DOUBLE.registerSerializer(this, LegacyMySQLSerializer<Double>())
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, LegacyMySQLSerializer<BigDecimal>())
PersistentDataKeyType.CONFIG.registerSerializer(this, LegacyMySQLSerializer<Config>())
PersistentDataKeyType.STRING_LIST.registerSerializer(this, LegacyMySQLSerializer<List<String>>())
}

override fun getSavedUUIDs(): Set<UUID> {
Expand All @@ -61,7 +61,7 @@ class LegacyMySQLPersistentDataHandler(
}.map { it.value }.toSet()
}

private inner class LegacySerializer<T : Any> : DataTypeSerializer<T>() {
private inner class LegacyMySQLSerializer<T : Any> : DataTypeSerializer<T>() {
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
val json = transaction(database) {
table.selectAll()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.bson.types.Decimal128
import java.math.BigDecimal
import java.util.UUID

class MongoPersistentDataHandler(
class MongoDBPersistentDataHandler(
config: Config
) : PersistentDataHandler("mongo") {
private val codecRegistry = CodecRegistries.fromRegistries(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ import com.willfp.eco.internal.spigot.ServerLocking
import com.willfp.eco.internal.spigot.data.KeyRegistry
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlerFactory
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlers
import com.willfp.eco.internal.spigot.data.handlers.impl.LegacyMongoDBPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.LegacyMySQLPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.MongoDBPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.MySQLPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.YamlPersistentDataHandler
import com.willfp.eco.internal.spigot.data.profiles.impl.EcoPlayerProfile
Expand All @@ -15,6 +17,8 @@ import com.willfp.eco.internal.spigot.data.profiles.impl.serverProfileUUID
import java.util.UUID
import java.util.concurrent.ConcurrentHashMap

const val LEGACY_MIGRATED_KEY = "legacy-data-migrated"

class ProfileHandler(
private val plugin: EcoSpigotPlugin
) {
Expand Down Expand Up @@ -57,7 +61,7 @@ class ProfileHandler(
// First install
if (!plugin.dataYml.has("previous-handler")) {
plugin.dataYml.set("previous-handler", defaultHandler.id)
plugin.dataYml.set("legacy-mysql-migrated", true)
plugin.dataYml.set(LEGACY_MIGRATED_KEY, true)
plugin.dataYml.save()
return false
}
Expand All @@ -70,13 +74,20 @@ class ProfileHandler(
return true
}

if (defaultHandler is MySQLPersistentDataHandler && !plugin.dataYml.getBool("legacy-mysql-migrated")) {
if (defaultHandler is MySQLPersistentDataHandler && !plugin.dataYml.getBool(LEGACY_MIGRATED_KEY)) {
plugin.logger.info("eco has detected a legacy MySQL database. Migrating to new MySQL database...")
scheduleMigration(LegacyMySQLPersistentDataHandler.Factory)

return true
}

if (defaultHandler is MongoDBPersistentDataHandler && !plugin.dataYml.getBool(LEGACY_MIGRATED_KEY)) {
plugin.logger.info("eco has detected a legacy MongoDB database. Migrating to new MongoDB database...")
scheduleMigration(LegacyMongoDBPersistentDataHandler.Factory)

return true
}

return false
}

Expand All @@ -87,7 +98,7 @@ class ProfileHandler(
plugin.scheduler.runLater(5) {
doMigrate(fromFactory)

plugin.dataYml.set("legacy-mysql-migrated", true)
plugin.dataYml.set(LEGACY_MIGRATED_KEY, true)
plugin.dataYml.save()
}
}
Expand All @@ -100,14 +111,20 @@ class ProfileHandler(
val fromHandler = fromFactory.create(plugin)
val toHandler = defaultHandler

plugin.logger.info("Loading data from ${fromFactory.id}...")
val keys = KeyRegistry.getRegisteredKeys()

plugin.logger.info("Keys to migrate: ${keys.map { it.key }.joinToString(", ") }}")

plugin.logger.info("Loading profile UUIDs from ${fromFactory.id}...")
plugin.logger.info("This step may take a while depending on the size of your database.")

val serialized = fromHandler.serializeData(KeyRegistry.getRegisteredKeys())
val uuids = fromHandler.getSavedUUIDs()

plugin.logger.info("Found ${serialized.size} profiles to migrate")
plugin.logger.info("Found ${uuids.size} profiles to migrate")

for ((index, profile) in serialized.withIndex()) {
plugin.logger.info("(${index + 1}/${serialized.size}) Migrating ${profile.uuid}")
for ((index, uuid) in uuids.withIndex()) {
plugin.logger.info("(${index + 1}/${uuids.size}) Migrating $uuid")
val profile = fromHandler.serializeProfile(uuid, keys)
toHandler.loadSerializedProfile(profile)
}

Expand Down
2 changes: 1 addition & 1 deletion eco-core/core-plugin/src/main/resources/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
# How player/server data is saved:
# yaml - Stored in data.yml: Good option for single-node servers (i.e. no BungeeCord/Velocity)
# mysql - Standard database, great option for multi-node servers (i.e. BungeeCord/Velocity)
# mongo - Alternative database, great option for multi-node servers (i.e. BungeeCord/Velocity)
# mongodb - Alternative database, great option for multi-node servers (i.e. BungeeCord/Velocity)
data-handler: yaml

# If data should be migrated automatically when changing data handler.
Expand Down

0 comments on commit 19fc168

Please sign in to comment.