Skip to content

Commit

Permalink
server: Add endpoint "GET /blocks/headers"
Browse files Browse the repository at this point in the history
  • Loading branch information
AlexITC committed Feb 23, 2019
1 parent ae42e3c commit 2798b52
Show file tree
Hide file tree
Showing 11 changed files with 122 additions and 9 deletions.
2 changes: 2 additions & 0 deletions infra/deployment/config/ltc-routes
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ GET /addresses/:address controllers.AddressesContro
GET /v2/addresses/:address/transactions controllers.AddressesController.getLightWalletTransactions(address: String, limit: Int ?= 10, lastSeenTxid: Option[String], order: String ?= "desc")

GET /blocks controllers.BlocksController.getLatestBlocks()
GET /blocks/headers controllers.BlocksController.getBlockHeaders(lastSeenHash: Option[String], limit: Int ?= 10)

GET /blocks/:query controllers.BlocksController.getDetails(query: String)
GET /blocks/:query/raw controllers.BlocksController.getRawBlock(query: String)
GET /v2/blocks/:blockhash/transactions controllers.BlocksController.getTransactionsV2(blockhash: String, limit: Int ?= 10, lastSeenTxid: Option[String])
Expand Down
6 changes: 4 additions & 2 deletions server/app/com/xsn/explorer/data/BlockDataHandler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ package com.xsn.explorer.data

import com.alexitc.playsonify.core.ApplicationResult
import com.alexitc.playsonify.models.ordering.FieldOrdering
import com.alexitc.playsonify.models.pagination.{PaginatedQuery, PaginatedResult}
import com.alexitc.playsonify.models.pagination.{Limit, PaginatedQuery, PaginatedResult}
import com.xsn.explorer.models.fields.BlockField
import com.xsn.explorer.models.persisted.Block
import com.xsn.explorer.models.persisted.{Block, BlockHeader}
import com.xsn.explorer.models.values.{Blockhash, Height}

import scala.language.higherKinds
Expand All @@ -24,6 +24,8 @@ trait BlockDataHandler[F[_]] {
def getLatestBlock(): F[Block]

def getFirstBlock(): F[Block]

def getHeaders(limit: Limit, lastSeenHash: Option[Blockhash]): F[List[BlockHeader]]
}

trait BlockBlockingDataHandler extends BlockDataHandler[ApplicationResult]
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,13 @@ package com.xsn.explorer.data.anorm

import com.alexitc.playsonify.core.ApplicationResult
import com.alexitc.playsonify.models.ordering.FieldOrdering
import com.alexitc.playsonify.models.pagination
import com.alexitc.playsonify.models.pagination.{PaginatedQuery, PaginatedResult}
import com.xsn.explorer.data.BlockBlockingDataHandler
import com.xsn.explorer.data.anorm.dao.BlockPostgresDAO
import com.xsn.explorer.errors._
import com.xsn.explorer.models.fields.BlockField
import com.xsn.explorer.models.persisted.Block
import com.xsn.explorer.models.persisted.{Block, BlockHeader}
import com.xsn.explorer.models.values.{Blockhash, Height}
import javax.inject.Inject
import org.scalactic.{Good, One, Or}
Expand Down Expand Up @@ -54,4 +55,17 @@ class BlockPostgresDataHandler @Inject() (
val maybe = blockPostgresDAO.getFirstBlock
Or.from(maybe, One(BlockNotFoundError))
}

override def getHeaders(
limit: pagination.Limit,
lastSeenHash: Option[Blockhash]): ApplicationResult[List[BlockHeader]] = withConnection { implicit conn =>

val result = lastSeenHash
.map { hash =>
blockPostgresDAO.getHeaders(hash, limit)
}
.getOrElse { blockPostgresDAO.getHeaders(limit) }

Good(result)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import com.alexitc.playsonify.models.pagination.{Count, Limit, Offset, Paginated
import com.alexitc.playsonify.sql.FieldOrderingSQLInterpreter
import com.xsn.explorer.data.anorm.parsers.BlockParsers._
import com.xsn.explorer.models.fields.BlockField
import com.xsn.explorer.models.persisted.Block
import com.xsn.explorer.models.persisted.{Block, BlockHeader}
import com.xsn.explorer.models.values.{Blockhash, Height}
import javax.inject.Inject

Expand Down Expand Up @@ -150,4 +150,37 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
val ordering = FieldOrdering(BlockField.Height, OrderingCondition.AscendingOrder)
getBy(query, ordering).headOption
}

def getHeaders(limit: Limit)(implicit conn: Connection): List[BlockHeader] = {
SQL(
"""
|SELECT blockhash, previous_blockhash, merkle_root, height, time
|FROM blocks
|ORDER BY height
|LIMIT {limit}
""".stripMargin
).on(
'limit -> limit.int
).as(parseHeader.*)
}

def getHeaders(lastSeenHash: Blockhash, limit: Limit)(implicit conn: Connection): List[BlockHeader] = {
SQL(
"""
|WITH CTE AS (
| SELECT height as lastSeenHeight
| FROM blocks
| WHERE blockhash = {lastSeenHash}
|)
|SELECT b.blockhash, b.previous_blockhash, b.merkle_root, b.height, b.time
|FROM CTE CROSS JOIN blocks b
|WHERE b.height > lastSeenHeight
|ORDER BY height
|LIMIT {limit}
""".stripMargin
).on(
'lastSeenHash -> lastSeenHash.string,
'limit -> limit.int
).as(parseHeader.*)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package com.xsn.explorer.data.anorm.parsers
import anorm.SqlParser._
import anorm._
import com.xsn.explorer.models._
import com.xsn.explorer.models.persisted.Block
import com.xsn.explorer.models.persisted.{Block, BlockHeader}
import com.xsn.explorer.models.values._

object BlockParsers {
Expand Down Expand Up @@ -90,4 +90,9 @@ object BlockParsers {
extractionMethod = extractionMethod
)
}

val parseHeader = (parseBlockhash ~ parsePreviousBlockhash.? ~ parseMerkleRoot ~ parseHeight ~ parseTime).map {
case blockhash ~ previousBlockhash ~ merkleRoot ~ height ~ time =>
BlockHeader(blockhash, previousBlockhash, merkleRoot, height, time)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@ package com.xsn.explorer.data.async

import com.alexitc.playsonify.core.{FutureApplicationResult, FuturePaginatedResult}
import com.alexitc.playsonify.models.ordering.FieldOrdering
import com.alexitc.playsonify.models.pagination
import com.alexitc.playsonify.models.pagination.PaginatedQuery
import com.xsn.explorer.data.{BlockBlockingDataHandler, BlockDataHandler}
import com.xsn.explorer.executors.DatabaseExecutionContext
import com.xsn.explorer.models.fields.BlockField
import com.xsn.explorer.models.persisted.Block
import com.xsn.explorer.models.persisted.{Block, BlockHeader}
import com.xsn.explorer.models.values.{Blockhash, Height}
import javax.inject.Inject

Expand Down Expand Up @@ -40,4 +41,8 @@ class BlockFutureDataHandler @Inject() (
override def getFirstBlock(): FutureApplicationResult[Block] = Future {
blockBlockingDataHandler.getFirstBlock()
}

override def getHeaders(limit: pagination.Limit, lastSeenHash: Option[Blockhash]): FutureApplicationResult[List[BlockHeader]] = Future {
blockBlockingDataHandler.getHeaders(limit, lastSeenHash)
}
}
16 changes: 16 additions & 0 deletions server/app/com/xsn/explorer/models/persisted/BlockHeader.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
package com.xsn.explorer.models.persisted

import com.xsn.explorer.models.values._
import play.api.libs.json.{Json, Writes}

case class BlockHeader(
hash: Blockhash,
previousBlockhash: Option[Blockhash],
merkleRoot: Blockhash,
height: Height,
time: Long)

object BlockHeader {

implicit val writes: Writes[BlockHeader] = Json.writes[BlockHeader]
}
29 changes: 27 additions & 2 deletions server/app/com/xsn/explorer/services/BlockService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,24 +2,49 @@ package com.xsn.explorer.services

import com.alexitc.playsonify.core.FutureApplicationResult
import com.alexitc.playsonify.core.FutureOr.Implicits.{FutureOps, OrOps}
import com.xsn.explorer.errors.BlockRewardsNotFoundError
import com.alexitc.playsonify.models.pagination.{Limit, Offset, PaginatedQuery}
import com.alexitc.playsonify.validators.PaginatedQueryValidator
import com.xsn.explorer.data.async.BlockFutureDataHandler
import com.xsn.explorer.errors.{BlockRewardsNotFoundError, BlockhashFormatError}
import com.xsn.explorer.models._
import com.xsn.explorer.models.rpc.{Block, TransactionVIN}
import com.xsn.explorer.models.values.{Blockhash, Height}
import com.xsn.explorer.services.logic.{BlockLogic, TransactionLogic}
import com.xsn.explorer.util.Extensions.FutureOrExt
import javax.inject.Inject
import org.scalactic.{Bad, Good}
import org.scalactic.{Bad, Good, One, Or}
import play.api.libs.json.JsValue

import scala.concurrent.{ExecutionContext, Future}

class BlockService @Inject() (
xsnService: XSNService,
blockDataHandler: BlockFutureDataHandler,
paginatedQueryValidator: PaginatedQueryValidator,
blockLogic: BlockLogic,
transactionLogic: TransactionLogic)(
implicit ec: ExecutionContext) {

private val maxHeadersPerQuery = 100

def getBlockHeaders(limit: Limit, lastSeenHashString: Option[String]): FutureApplicationResult[WrappedResult[List[persisted.BlockHeader]]] = {
val result = for {
lastSeenHash <- {
lastSeenHashString
.map(Blockhash.from)
.map { txid => Or.from(txid, One(BlockhashFormatError)).map(Option.apply) }
.getOrElse(Good(Option.empty))
.toFutureOr
}

_ <- paginatedQueryValidator.validate(PaginatedQuery(Offset(0), limit), maxHeadersPerQuery).toFutureOr

headers <- blockDataHandler.getHeaders(limit, lastSeenHash).toFutureOr
} yield WrappedResult(headers)

result.toFuture
}

def getRawBlock(blockhashString: String): FutureApplicationResult[JsValue] = {
val result = for {
blockhash <- blockLogic
Expand Down
4 changes: 4 additions & 0 deletions server/app/controllers/BlocksController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@ class BlocksController @Inject() (
blockService.getLatestBlocks()
}

def getBlockHeaders(lastSeenHash: Option[String], limit: Int) = public { _ =>
blockService.getBlockHeaders(Limit(limit), lastSeenHash)
}

/**
* Try to retrieve a block by height, in case the query argument
* is not a valid height, we assume it might be a blockhash and try to
Expand Down
2 changes: 2 additions & 0 deletions server/conf/routes
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ GET /v2/addresses/:address/transactions controllers.AddressesContro
GET /addresses/:address/utxos controllers.AddressesController.getUnspentOutputs(address: String)

GET /blocks controllers.BlocksController.getLatestBlocks()
GET /blocks/headers controllers.BlocksController.getBlockHeaders(lastSeenHash: Option[String], limit: Int ?= 10)

GET /blocks/:query controllers.BlocksController.getDetails(query: String)
GET /blocks/:query/raw controllers.BlocksController.getRawBlock(query: String)
GET /blocks/:blockhash/transactions controllers.BlocksController.getTransactions(blockhash: String, offset: Int ?= 0, limit: Int ?= 10, orderBy: String ?= "")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,12 @@ class LedgerSynchronizerServiceSpec extends PostgresDataHandlerSpec with BeforeA
new TransactionOrderingParser,
new TransactionFutureDataHandler(transactionDataHandler)(Executors.databaseEC))

val blockService = new BlockService(xsnService, new BlockLogic, new TransactionLogic)
val blockService = new BlockService(
xsnService,
new BlockFutureDataHandler(blockDataHandler)(Executors.databaseEC),
new PaginatedQueryValidator,
new BlockLogic,
new TransactionLogic)
val transactionRPCService = new TransactionRPCService(xsnService)
new LedgerSynchronizerService(
xsnService,
Expand Down

0 comments on commit 2798b52

Please sign in to comment.