Skip to content

Commit

Permalink
server: update blockHeader model
Browse files Browse the repository at this point in the history
blockHeader model matchs with the persisted block model
  • Loading branch information
adinael committed Jun 8, 2019
1 parent 1daf6f0 commit 2086918
Show file tree
Hide file tree
Showing 6 changed files with 137 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,8 @@ class BlockPostgresDAO @Inject()(

val headers = SQL(
s"""
|SELECT blockhash, previous_blockhash, merkle_root, height, time
|SELECT blockhash, previous_blockhash, next_blockhash, tpos_contract, merkle_root, size,
| height, version, time, median_time, nonce, bits, chainwork, difficulty, extraction_method
|FROM blocks
|ORDER BY height $order
|LIMIT {limit}
Expand Down Expand Up @@ -195,7 +196,8 @@ class BlockPostgresDAO @Inject()(
| FROM blocks
| WHERE blockhash = {lastSeenHash}
|)
|SELECT b.blockhash, b.previous_blockhash, b.merkle_root, b.height, b.time
|SELECT blockhash, previous_blockhash, next_blockhash, tpos_contract, merkle_root, size,
| height, version, time, median_time, nonce, bits, chainwork, difficulty, extraction_method
|FROM CTE CROSS JOIN blocks b
|WHERE b.height $comparator lastSeenHeight
|ORDER BY height $order
Expand All @@ -218,7 +220,8 @@ class BlockPostgresDAO @Inject()(
def getHeader(blockhash: Blockhash, includeFilter: Boolean)(implicit conn: Connection): Option[BlockHeader] = {
val blockMaybe = SQL(
"""
|SELECT blockhash, previous_blockhash, merkle_root, height, time
|SELECT blockhash, previous_blockhash, next_blockhash, tpos_contract, merkle_root, size,
| height, version, time, median_time, nonce, bits, chainwork, difficulty, extraction_method
|FROM blocks
|WHERE blockhash = {blockhash}
""".stripMargin
Expand All @@ -237,7 +240,8 @@ class BlockPostgresDAO @Inject()(
def getHeader(height: Height, includeFilter: Boolean)(implicit conn: Connection): Option[BlockHeader] = {
val blockMaybe = SQL(
"""
|SELECT blockhash, previous_blockhash, merkle_root, height, time
|SELECT blockhash, previous_blockhash, next_blockhash, tpos_contract, merkle_root, size,
| height, version, time, median_time, nonce, bits, chainwork, difficulty, extraction_method
|FROM blocks
|WHERE height = {height}
""".stripMargin
Expand Down
51 changes: 48 additions & 3 deletions server/app/com/xsn/explorer/data/anorm/parsers/BlockParsers.scala
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,53 @@ object BlockParsers {
)
}

val parseHeader = (parseBlockhash() ~ parsePreviousBlockhash.? ~ parseMerkleRoot ~ parseHeight ~ parseTime).map {
case blockhash ~ previousBlockhash ~ merkleRoot ~ height ~ time =>
BlockHeader.Simple(blockhash, previousBlockhash, merkleRoot, height, time)
val parseHeader = (parseBlockhash() ~
parseNextBlockhash.? ~
parsePreviousBlockhash.? ~
parseTposContract.? ~
parseMerkleRoot ~
parseSize ~
parseHeight ~
parseVersion ~
parseTime ~
parseMedianTime ~
parseNonce ~
parseBits ~
parseChainwork ~
parseDifficulty ~
parseExtractionMethod).map {

case hash ~
nextBlockhash ~
previousBlockhash ~
tposContract ~
merkleRoot ~
size ~
height ~
version ~
time ~
medianTime ~
nonce ~
bits ~
chainwork ~
difficulty ~
extractionMethod =>
BlockHeader.Simple(
hash = hash,
previousBlockhash = previousBlockhash,
nextBlockhash = nextBlockhash,
tposContract = tposContract,
merkleRoot = merkleRoot,
size = size,
height = height,
time = time,
medianTime = medianTime,
nonce = nonce,
bits = bits,
chainwork = chainwork,
difficulty = difficulty,
version = version,
extractionMethod = extractionMethod
)
}
}
61 changes: 59 additions & 2 deletions server/app/com/xsn/explorer/models/persisted/BlockHeader.scala
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.xsn.explorer.models.persisted

import com.xsn.explorer.gcs.GolombCodedSet
import com.xsn.explorer.models.BlockExtractionMethod
import com.xsn.explorer.models.values._
import io.scalaland.chimney.dsl._
import play.api.libs.json.{Json, Writes}
Expand All @@ -9,9 +10,19 @@ sealed trait BlockHeader extends Product with Serializable {

def hash: Blockhash
def previousBlockhash: Option[Blockhash]
def nextBlockhash: Option[Blockhash]
def tposContract: Option[TransactionId]
def merkleRoot: Blockhash
def size: Size
def height: Height
def version: Int
def time: Long
def medianTime: Long
def nonce: Long
def bits: String
def chainwork: String
def difficulty: BigDecimal
def extractionMethod: BlockExtractionMethod

def withFilter(filter: GolombCodedSet): BlockHeader.HasFilter = {
this
Expand All @@ -26,17 +37,37 @@ object BlockHeader {
case class Simple(
hash: Blockhash,
previousBlockhash: Option[Blockhash],
nextBlockhash: Option[Blockhash],
tposContract: Option[TransactionId],
merkleRoot: Blockhash,
size: Size,
height: Height,
time: Long
version: Int,
time: Long,
medianTime: Long,
nonce: Long,
bits: String,
chainwork: String,
difficulty: BigDecimal,
extractionMethod: BlockExtractionMethod
) extends BlockHeader

case class HasFilter(
hash: Blockhash,
previousBlockhash: Option[Blockhash],
nextBlockhash: Option[Blockhash],
tposContract: Option[TransactionId],
merkleRoot: Blockhash,
size: Size,
height: Height,
version: Int,
time: Long,
medianTime: Long,
nonce: Long,
bits: String,
chainwork: String,
difficulty: BigDecimal,
extractionMethod: BlockExtractionMethod,
filter: GolombCodedSet
) extends BlockHeader

Expand All @@ -49,7 +80,23 @@ object BlockHeader {
)
}

implicit val writes: Writes[BlockHeader] = (obj: BlockHeader) => {
val partialWrites: Writes[BlockHeader] = (obj: BlockHeader) => {
val filterMaybe = obj match {
case x: HasFilter => Some(x.filter)
case _ => Option.empty
}

Json.obj(
"hash" -> obj.hash,
"previousBlockhash" -> obj.previousBlockhash,
"merkleRoot" -> obj.merkleRoot,
"height" -> obj.height,
"time" -> obj.time,
"filter" -> filterMaybe
)
}

val completeWrites: Writes[BlockHeader] = (obj: BlockHeader) => {
val filterMaybe = obj match {
case x: HasFilter => Some(x.filter)
case _ => Option.empty
Expand All @@ -58,9 +105,19 @@ object BlockHeader {
Json.obj(
"hash" -> obj.hash,
"previousBlockhash" -> obj.previousBlockhash,
"nextBlockhash" -> obj.nextBlockhash,
"tposContract" -> obj.tposContract,
"merkleRoot" -> obj.merkleRoot,
"size" -> obj.size,
"height" -> obj.height,
"version" -> obj.version,
"time" -> obj.time,
"medianTime" -> obj.medianTime,
"nonce" -> obj.nonce,
"bits" -> obj.bits,
"chainwork" -> obj.chainwork,
"difficulty" -> obj.difficulty,
"extractionMethod" -> obj.extractionMethod.entryName,
"filter" -> filterMaybe
)
}
Expand Down
3 changes: 3 additions & 0 deletions server/app/controllers/BlocksController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import com.alexitc.playsonify.core.FutureOr.Implicits.FutureOps
import com.alexitc.playsonify.models.ordering.OrderingQuery
import com.alexitc.playsonify.models.pagination.{Limit, Offset, PaginatedQuery}
import com.xsn.explorer.models.LightWalletTransaction
import com.xsn.explorer.models.persisted.BlockHeader
import com.xsn.explorer.models.values.Height
import com.xsn.explorer.services.{BlockService, TransactionService}
import controllers.common.{Codecs, MyJsonController, MyJsonControllerComponents}
Expand All @@ -26,6 +27,7 @@ class BlocksController @Inject()(
}

def getBlockHeaders(lastSeenHash: Option[String], limit: Int, orderingCondition: String) = public { _ =>
implicit val codec: Writes[BlockHeader] = BlockHeader.partialWrites
blockService
.getBlockHeaders(Limit(limit), lastSeenHash, orderingCondition)
.toFutureOr
Expand All @@ -47,6 +49,7 @@ class BlocksController @Inject()(
* retrieve the blockHeader by blockhash.
*/
def getBlockHeader(query: String, includeFilter: Boolean) = public { _ =>
implicit val codec: Writes[BlockHeader] = BlockHeader.completeWrites
val (cache, resultF) = Try(query.toInt)
.map(Height.apply)
.map { value =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -318,10 +318,19 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAn

private def matches(expected: BlockHeader, result: BlockHeader) = {
result.hash mustEqual expected.hash
result.tposContract mustEqual expected.tposContract
result.nextBlockhash mustEqual expected.nextBlockhash
result.previousBlockhash mustEqual expected.previousBlockhash
result.merkleRoot mustEqual expected.merkleRoot
result.size mustEqual expected.size
result.height mustEqual expected.height
result.version mustEqual expected.version
result.medianTime mustEqual expected.medianTime
result.time mustEqual expected.time
result.bits mustEqual expected.bits
result.chainwork mustEqual expected.chainwork
result.difficulty mustEqual expected.difficulty
result.nonce mustEqual expected.nonce

(expected, result) match {
case (e: BlockHeader.HasFilter, r: BlockHeader.HasFilter) => matchFilter(e.filter, r.filter)
Expand Down
12 changes: 10 additions & 2 deletions server/test/controllers/BlocksControllerSpec.scala
Original file line number Diff line number Diff line change
Expand Up @@ -508,21 +508,29 @@ class BlocksControllerSpec extends MyAPISpec {
val block = expected

(jsonBlockHeader \ "hash").as[Blockhash] mustEqual block.hash
(jsonBlockHeader \ "size").as[Size] mustEqual block.size
(jsonBlockHeader \ "bits").as[String] mustEqual block.bits
(jsonBlockHeader \ "chainwork").as[String] mustEqual block.chainwork
(jsonBlockHeader \ "difficulty").as[BigDecimal] mustEqual block.difficulty
(jsonBlockHeader \ "height").as[Height] mustEqual block.height
(jsonBlockHeader \ "medianTime").as[Long] mustEqual block.medianTime
(jsonBlockHeader \ "time").as[Long] mustEqual block.time
(jsonBlockHeader \ "merkleRoot").as[Blockhash] mustEqual block.merkleRoot
(jsonBlockHeader \ "version").as[Long] mustEqual block.version
(jsonBlockHeader \ "nonce").as[Int] mustEqual block.nonce
(jsonBlockHeader \ "previousBlockhash").asOpt[Blockhash] mustEqual block.previousBlockhash
(jsonBlockHeader \ "nextBlockhash").asOpt[Blockhash] mustEqual block.nextBlockhash

block match {
case BlockHeader.HasFilter(_, _, _, _, _, filter) => {
case BlockHeader.HasFilter(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, filter) => {
val jsonFilter = (jsonBlockHeader \ "filter").as[JsValue]

(jsonFilter \ "n").as[Int] mustEqual filter.n
(jsonFilter \ "m").as[Int] mustEqual filter.m
(jsonFilter \ "p").as[Int] mustEqual filter.p
(jsonFilter \ "hex").as[String] mustEqual filter.hex.string
}
case BlockHeader.Simple(_, _, _, _, _) => ()
case BlockHeader.Simple(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _) => ()
}
}

Expand Down

0 comments on commit 2086918

Please sign in to comment.