Skip to content

Commit

Permalink
server: Migrate database from BLOCKHASH_TYPE to HASH_TYPE
Browse files Browse the repository at this point in the history
The change reduces the disk usage because BLOCKHASH_TYPE is a TEXT while
the HASH_TYPE is a byte array.

Be aware that applying this evolution will take a while on production.
  • Loading branch information
AlexITC committed Sep 21, 2019
1 parent f421ae5 commit 14e99fa
Show file tree
Hide file tree
Showing 8 changed files with 87 additions and 42 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ class BlockFilterPostgresDAO {
|RETURNING blockhash, m, n, p, hex
""".stripMargin
).on(
'blockhash -> blockhash.string,
'blockhash -> blockhash.toBytesBE.toArray,
'm -> filter.m,
'n -> filter.n,
'p -> filter.p,
Expand All @@ -44,7 +44,7 @@ class BlockFilterPostgresDAO {
| hex = EXCLUDED.hex
""".stripMargin
).on(
'blockhash -> blockhash.string,
'blockhash -> blockhash.toBytesBE.toArray,
'm -> filter.m,
'n -> filter.n,
'p -> filter.p,
Expand All @@ -61,7 +61,7 @@ class BlockFilterPostgresDAO {
|RETURNING blockhash, m, n, p, hex
""".stripMargin
).on(
'blockhash -> blockhash.string
'blockhash -> blockhash.toBytesBE.toArray
)
.as(parseFilter.singleOpt)
}
Expand All @@ -74,7 +74,7 @@ class BlockFilterPostgresDAO {
|WHERE blockhash = {blockhash}
""".stripMargin
).on(
'blockhash -> blockhash.string
'blockhash -> blockhash.toBytesBE.toArray
)
.as(parseFilter.singleOpt)
}
Expand Down
24 changes: 12 additions & 12 deletions server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ class BlockPostgresDAO @Inject()(
| height, version, time, median_time, nonce, bits, chainwork, difficulty, extraction_method
""".stripMargin
).on(
'blockhash -> block.hash.string,
'previous_blockhash -> block.previousBlockhash.map(_.string),
'next_blockhash -> block.nextBlockhash.map(_.string),
'blockhash -> block.hash.toBytesBE.toArray,
'previous_blockhash -> block.previousBlockhash.map(_.toBytesBE.toArray),
'next_blockhash -> block.nextBlockhash.map(_.toBytesBE.toArray),
'tpos_contract -> block.tposContract.map(_.string),
'merkle_root -> block.merkleRoot.toBytesBE.toArray,
'size -> block.size.int,
Expand Down Expand Up @@ -84,9 +84,9 @@ class BlockPostgresDAO @Inject()(
| extraction_method = EXCLUDED.extraction_method
""".stripMargin
).on(
'blockhash -> block.hash.string,
'previous_blockhash -> block.previousBlockhash.map(_.string),
'next_blockhash -> block.nextBlockhash.map(_.string),
'blockhash -> block.hash.toBytesBE.toArray,
'previous_blockhash -> block.previousBlockhash.map(_.toBytesBE.toArray),
'next_blockhash -> block.nextBlockhash.map(_.toBytesBE.toArray),
'tpos_contract -> block.tposContract.map(_.string),
'merkle_root -> block.merkleRoot.toBytesBE.toArray,
'size -> block.size.int,
Expand Down Expand Up @@ -114,8 +114,8 @@ class BlockPostgresDAO @Inject()(
| height, version, time, median_time, nonce, bits, chainwork, difficulty, extraction_method
""".stripMargin
).on(
'blockhash -> blockhash.string,
'next_blockhash -> nextBlockhash.string
'blockhash -> blockhash.toBytesBE.toArray,
'next_blockhash -> nextBlockhash.toBytesBE.toArray
)
.as(parseBlock.singleOpt)
}
Expand All @@ -129,7 +129,7 @@ class BlockPostgresDAO @Inject()(
|WHERE blockhash = {blockhash}
""".stripMargin
).on(
"blockhash" -> blockhash.string
"blockhash" -> blockhash.toBytesBE.toArray
)
.as(parseBlock.singleOpt)
}
Expand Down Expand Up @@ -189,7 +189,7 @@ class BlockPostgresDAO @Inject()(
| height, version, time, median_time, nonce, bits, chainwork, difficulty, extraction_method
""".stripMargin
).on(
"blockhash" -> blockhash.string
"blockhash" -> blockhash.toBytesBE.toArray
)
.as(parseBlock.singleOpt)
}
Expand Down Expand Up @@ -254,7 +254,7 @@ class BlockPostgresDAO @Inject()(
|LIMIT {limit}
""".stripMargin
).on(
'lastSeenHash -> lastSeenHash.string,
'lastSeenHash -> lastSeenHash.toBytesBE.toArray,
'limit -> limit.int
)
.as(parseHeader.*)
Expand All @@ -276,7 +276,7 @@ class BlockPostgresDAO @Inject()(
|WHERE blockhash = {blockhash}
""".stripMargin
).on(
"blockhash" -> blockhash.string
"blockhash" -> blockhash.toBytesBE.toArray
)
.as(parseHeader.singleOpt)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ class TransactionPostgresDAO @Inject()(
case (transaction, index) =>
List(
'txid -> transaction.id.string: NamedParameter,
'blockhash -> transaction.blockhash.string: NamedParameter,
'blockhash -> transaction.blockhash.toBytesBE.toArray: NamedParameter,
'time -> transaction.time: NamedParameter,
'size -> transaction.size.int: NamedParameter,
'index -> index: NamedParameter
Expand Down Expand Up @@ -140,7 +140,7 @@ class TransactionPostgresDAO @Inject()(
|ORDER BY index DESC
""".stripMargin
).on(
'blockhash -> blockhash.string
'blockhash -> blockhash.toBytesBE.toArray
)
.as(parseTransaction.*)

Expand Down Expand Up @@ -168,7 +168,7 @@ class TransactionPostgresDAO @Inject()(
|RETURNING txid, blockhash, time, size
""".stripMargin
).on(
'blockhash -> blockhash.string
'blockhash -> blockhash.toBytesBE.toArray
)
.as(parseTransaction.*)

Expand Down Expand Up @@ -264,7 +264,7 @@ class TransactionPostgresDAO @Inject()(
|WHERE blockhash = {blockhash}
""".stripMargin
).on(
'blockhash -> blockhash.string
'blockhash -> blockhash.toBytesBE.toArray
)
.as(SqlParser.scalar[Int].single)

Expand All @@ -284,7 +284,7 @@ class TransactionPostgresDAO @Inject()(
""".stripMargin
).on(
'limit -> limit.int,
'blockhash -> blockhash.string
'blockhash -> blockhash.toBytesBE.toArray
)
.as(parseTransactionWithValues.*)
}
Expand All @@ -310,7 +310,7 @@ class TransactionPostgresDAO @Inject()(
""".stripMargin
).on(
'limit -> limit.int,
'blockhash -> blockhash.string,
'blockhash -> blockhash.toBytesBE.toArray,
'lastSeenTxid -> lastSeenTxid.string
)
.as(parseTransactionWithValues.*)
Expand All @@ -329,7 +329,7 @@ class TransactionPostgresDAO @Inject()(
""".stripMargin
).on(
'limit -> limit.int,
'blockhash -> blockhash.string
'blockhash -> blockhash.toBytesBE.toArray
)
.as(parseTransaction.*)

Expand Down Expand Up @@ -361,7 +361,7 @@ class TransactionPostgresDAO @Inject()(
""".stripMargin
).on(
'limit -> limit.int,
'blockhash -> blockhash.string,
'blockhash -> blockhash.toBytesBE.toArray,
'lastSeenTxid -> lastSeenTxid.string
)
.as(parseTransaction.*)
Expand Down Expand Up @@ -391,7 +391,7 @@ class TransactionPostgresDAO @Inject()(
""".stripMargin
).on(
'txid -> transaction.id.string,
'blockhash -> transaction.blockhash.string,
'blockhash -> transaction.blockhash.toBytesBE.toArray,
'time -> transaction.time,
'size -> transaction.size.int,
'index -> index
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ object BlockParsers {

import CommonParsers._

val parseNextBlockhash = parseBlockhash("next_blockhash")
val parsePreviousBlockhash = parseBlockhash("previous_blockhash")
val parseNextBlockhash = parseBlockhashBytes("next_blockhash")
val parsePreviousBlockhash = parseBlockhashBytes("previous_blockhash")
val parseTposContract = parseTransactionId("tpos_contract")
val parseMerkleRoot = parseBlockhashBytes("merkle_root")

Expand All @@ -27,7 +27,7 @@ object BlockParsers {
val parseChainwork = str("chainwork")
val parseDifficulty = get[BigDecimal]("difficulty")

val parseBlock = (parseBlockhash() ~
val parseBlock = (parseBlockhashBytes() ~
parseNextBlockhash.? ~
parsePreviousBlockhash.? ~
parseTposContract.? ~
Expand Down Expand Up @@ -77,7 +77,7 @@ object BlockParsers {
)
}

val parseHeader = (parseBlockhash() ~
val parseHeader = (parseBlockhashBytes() ~
parseNextBlockhash.? ~
parsePreviousBlockhash.? ~
parseTposContract.? ~
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,11 @@ import com.xsn.explorer.models.values._

object CommonParsers {

// TODO: replace the parseBlockhash method with this one after all fields blockhash fields are migrated to bytea
def parseBlockhashBytes(field: String) =
def parseBlockhashBytes(field: String = "blockhash") =
byteArray(field)
.map(Blockhash.fromBytesBE)
.map { _.getOrElse(throw new RuntimeException(s"corrupted $field")) }

def parseBlockhash(field: String = "blockhash") =
str(field)
.map(Blockhash.from)
.map { _.getOrElse(throw new RuntimeException(s"corrupted $field")) }

def parseAddress(field: String = "address") =
str(field)
.map { string =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,12 @@ object TransactionParsers {
val parseValue = get[BigDecimal]("value")
val parseHexScript = parseHexString("hex_script")

val parseTransaction = (parseTransactionId() ~ parseBlockhash() ~ parseTime ~ parseSize).map {
val parseTransaction = (parseTransactionId() ~ parseBlockhashBytes() ~ parseTime ~ parseSize).map {
case txid ~ blockhash ~ time ~ size => Transaction(txid, blockhash, time, size)
}

val parseTransactionWithValues = (parseTransactionId() ~
parseBlockhash() ~
parseBlockhashBytes() ~
parseTime ~
parseSize ~
parseSent ~
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ class BlockSynchronizationProgressDAO {
|ON CONFLICT (blockhash) DO UPDATE
| SET state = EXCLUDED.state
""".stripMargin
).on("blockhash" -> blockhash.string, "state" -> state.entryName).execute()
).on("blockhash" -> blockhash.toBytesBE.toArray, "state" -> state.entryName).execute()
}

def find(blockhash: Blockhash)(implicit conn: Connection): Option[BlockSynchronizationState] = {
Expand All @@ -30,7 +30,7 @@ class BlockSynchronizationProgressDAO {
|FROM block_synchronization_progress
|WHERE blockhash = {blockhash}
""".stripMargin
).on("blockhash" -> blockhash.string).as(scalar[String].singleOpt)
).on("blockhash" -> blockhash.toBytesBE.toArray).as(scalar[String].singleOpt)

maybe.map(BlockSynchronizationState.withNameInsensitive)
}
Expand All @@ -42,7 +42,7 @@ class BlockSynchronizationProgressDAO {
|FROM block_synchronization_progress
|LIMIT 1
""".stripMargin
).as(parseBlockhash().singleOpt)
).as(parseBlockhashBytes().singleOpt)
}

def delete(blockhash: Blockhash)(implicit conn: Connection): Unit = {
Expand All @@ -51,6 +51,6 @@ class BlockSynchronizationProgressDAO {
|DELETE FROM block_synchronization_progress
|WHERE blockhash = {blockhash}
""".stripMargin
).on("blockhash" -> blockhash.string).execute()
).on("blockhash" -> blockhash.toBytesBE.toArray).execute()
}
}
51 changes: 51 additions & 0 deletions server/conf/evolutions/default/21.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@

# --- !Ups

-- drop FK constraints to be able to update column types one after another
ALTER TABLE transactions DROP CONSTRAINT transactions_blockhash_fk;
ALTER TABLE block_address_gcs DROP CONSTRAINT block_address_gcs_blockhash_fk;

-- update main types
ALTER TABLE blocks
ALTER COLUMN blockhash TYPE HASH_TYPE USING DECODE(blockhash, 'hex'),
ALTER COLUMN previous_blockhash TYPE HASH_TYPE USING DECODE(previous_blockhash, 'hex'),
ALTER COLUMN next_blockhash TYPE HASH_TYPE USING DECODE(next_blockhash, 'hex');

-- update tables with FKs
ALTER TABLE transactions ALTER COLUMN blockhash TYPE HASH_TYPE USING DECODE(blockhash, 'hex');
ALTER TABLE block_address_gcs ALTER COLUMN blockhash TYPE HASH_TYPE USING DECODE(blockhash, 'hex');
ALTER TABLE block_synchronization_progress ALTER COLUMN blockhash TYPE HASH_TYPE USING DECODE(blockhash, 'hex');

-- add FKs back
ALTER TABLE transactions ADD CONSTRAINT transactions_blockhash_fk FOREIGN KEY (blockhash) REFERENCES blocks(blockhash);
ALTER TABLE block_address_gcs ADD CONSTRAINT block_address_gcs_blockhash_fk FOREIGN KEY (blockhash) REFERENCES blocks(blockhash);

-- drop unused type
DROP DOMAIN BLOCKHASH_TYPE;

# --- !Downs

-- add domain
CREATE DOMAIN BLOCKHASH_TYPE AS TEXT
CHECK (
VALUE ~ '^[a-f0-9]{64}$'
);

-- drop FK constraints to be able to update column types one after another
ALTER TABLE transactions DROP CONSTRAINT transactions_blockhash_fk;
ALTER TABLE block_address_gcs DROP CONSTRAINT block_address_gcs_blockhash_fk;

-- update main types
ALTER TABLE blocks
ALTER COLUMN blockhash TYPE BLOCKHASH_TYPE USING ENCODE(blockhash, 'hex'),
ALTER COLUMN previous_blockhash TYPE BLOCKHASH_TYPE USING ENCODE(previous_blockhash, 'hex'),
ALTER COLUMN next_blockhash TYPE BLOCKHASH_TYPE USING ENCODE(next_blockhash, 'hex');

-- update tables with FKs
ALTER TABLE transactions ALTER COLUMN blockhash TYPE BLOCKHASH_TYPE USING ENCODE(blockhash, 'hex');
ALTER TABLE block_address_gcs ALTER COLUMN blockhash TYPE BLOCKHASH_TYPE USING ENCODE(blockhash, 'hex');
ALTER TABLE block_synchronization_progress ALTER COLUMN blockhash TYPE BLOCKHASH_TYPE USING ENCODE(blockhash, 'hex');

-- add FKs back
ALTER TABLE transactions ADD CONSTRAINT transactions_blockhash_fk FOREIGN KEY (blockhash) REFERENCES blocks(blockhash);
ALTER TABLE block_address_gcs ADD CONSTRAINT block_address_gcs_blockhash_fk FOREIGN KEY (blockhash) REFERENCES blocks(blockhash);

0 comments on commit 14e99fa

Please sign in to comment.