Skip to content

Commit

Permalink
server: Refactor the sql parsers to expect correct data from the data…
Browse files Browse the repository at this point in the history
…base

If the data turns out to be corrupted, an exception is going to be thrown,
as there are check constraints on the sql schema, this shouldn't occur.
  • Loading branch information
AlexITC committed Feb 17, 2019
1 parent 516dd40 commit 988694b
Show file tree
Hide file tree
Showing 4 changed files with 34 additions and 24 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
'bits -> block.bits,
'chainwork -> block.chainwork,
'difficulty -> block.difficulty
).as(parseBlock.single)
).as(parseBlock.singleOpt)
}

def setNextBlockhash(
Expand All @@ -64,7 +64,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
).on(
'blockhash -> blockhash.string,
'next_blockhash -> nextBlockhash.string
).as(parseBlock.singleOpt).flatten
).as(parseBlock.singleOpt)
}

def getBy(blockhash: Blockhash)(implicit conn: Connection): Option[Block] = {
Expand All @@ -77,7 +77,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
""".stripMargin
).on(
"blockhash" -> blockhash.string
).as(parseBlock.singleOpt).flatten
).as(parseBlock.singleOpt)
}

def getBy(height: Height)(implicit conn: Connection): Option[Block] = {
Expand All @@ -90,7 +90,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
""".stripMargin
).on(
"height" -> height.int
).as(parseBlock.singleOpt).flatten
).as(parseBlock.singleOpt)
}

def getBy(
Expand All @@ -111,7 +111,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
).on(
'offset -> paginatedQuery.offset.int,
'limit -> paginatedQuery.limit.int
).as(parseBlock.*).flatten
).as(parseBlock.*)
}

def count(implicit conn: Connection): Count = {
Expand All @@ -135,7 +135,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
""".stripMargin
).on(
"blockhash" -> blockhash.string
).as(parseBlock.singleOpt).flatten
).as(parseBlock.singleOpt)
}

def getLatestBlock(implicit conn: Connection): Option[Block] = {
Expand Down
35 changes: 22 additions & 13 deletions server/app/com/xsn/explorer/data/anorm/parsers/BlockParsers.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,22 @@ object BlockParsers {

import CommonParsers._

val parseNextBlockhash = str("next_blockhash").map(Blockhash.from)
val parsePreviousBlockhash = str("previous_blockhash").map(Blockhash.from)
val parseTposContract = str("tpos_contract").map(TransactionId.from)
val parseMerkleRoot = str("merkle_root").map(Blockhash.from)
val parseNextBlockhash = str("next_blockhash")
.map(Blockhash.from)
.map { _.getOrElse(throw new RuntimeException("corrupted next_blockhash")) }

val parsePreviousBlockhash = str("previous_blockhash")
.map(Blockhash.from)
.map { _.getOrElse(throw new RuntimeException("corrupted previous_blockhash")) }

val parseTposContract = str("tpos_contract")
.map(TransactionId.from)
.map { _.getOrElse(throw new RuntimeException("corrupted tpos_contract")) }

val parseMerkleRoot = str("merkle_root")
.map(Blockhash.from)
.map { _.getOrElse(throw new RuntimeException("corrupted merkle_root")) }

val parseSize = int("size").map(Size.apply)
val parseHeight = int("height").map(Height.apply)
val parseVersion = int("version")
Expand All @@ -38,11 +50,11 @@ object BlockParsers {
parseChainwork ~
parseDifficulty).map {

case hashMaybe ~
case hash ~
nextBlockhash ~
previousBlockhash ~
tposContract ~
merkleRootMaybe ~
merkleRoot ~
size ~
height ~
version ~
Expand All @@ -53,14 +65,11 @@ object BlockParsers {
chainwork ~
difficulty =>

for {
hash <- hashMaybe
merkleRoot <- merkleRootMaybe
} yield Block(
Block(
hash = hash,
previousBlockhash = previousBlockhash.flatten,
nextBlockhash = nextBlockhash.flatten,
tposContract = tposContract.flatten,
previousBlockhash = previousBlockhash,
nextBlockhash = nextBlockhash,
tposContract = tposContract,
merkleRoot = merkleRoot,
size = size,
height = height,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,10 @@ import com.xsn.explorer.models.{Address, Blockhash, Size}

object CommonParsers {

val parseBlockhash = str("blockhash").map(Blockhash.from)
val parseBlockhash = str("blockhash")
.map(Blockhash.from)
.map { _.getOrElse(throw new RuntimeException("corrupted blockhash")) }

val parseAddress = str("address").map(Address.from)
val parseTime = long("time")
val parseSize = int("size").map(Size.apply)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,9 @@ object TransactionParsers {
val parseTposMerchantAddress = str("tpos_merchant_address").map(Address.from)

val parseTransaction = (parseTransactionId ~ parseBlockhash ~ parseTime ~ parseSize).map {
case txidMaybe ~ blockhashMaybe ~ time ~ size =>
case txidMaybe ~ blockhash ~ time ~ size =>
for {
txid <- txidMaybe
blockhash <- blockhashMaybe
} yield Transaction(txid, blockhash, time, size, List.empty, List.empty)
}

Expand All @@ -38,10 +37,9 @@ object TransactionParsers {
parseSent ~
parseReceived).map {

case txidMaybe ~ blockhashMaybe ~ time ~ size ~ sent ~ received =>
case txidMaybe ~ blockhash ~ time ~ size ~ sent ~ received =>
for {
txid <- txidMaybe
blockhash <- blockhashMaybe
} yield TransactionWithValues(txid, blockhash, time, size, sent, received)
}

Expand Down

0 comments on commit 988694b

Please sign in to comment.