Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixed indexer issues #1971

Merged
merged 2 commits into from
Apr 12, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,10 @@ case class BalanceInfo() extends ScorexLogging {
*/
def retreiveAdditionalTokenInfo(history: ErgoHistoryReader): BalanceInfo = {
additionalTokenInfo ++= tokens.map(token => {
val iT: IndexedToken = history.typedExtraIndexById[IndexedToken](uniqueId(token._1)).get
(token._1,(iT.name,iT.decimals))
history.typedExtraIndexById[IndexedToken](uniqueId(token._1)) match {
case Some(iT) => (token._1,(iT.name,iT.decimals))
case None => (token._1,("", 0))
}
})
this
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ import org.ergoplatform.nodeView.history.extra.ExtraIndexer.{GlobalBoxIndexKey,
import org.ergoplatform.nodeView.history.{ErgoHistory, ErgoHistoryReader}
import org.ergoplatform.nodeView.history.extra.ExtraIndexer.ReceivableMessages.{GetSegmentTreshold, StartExtraIndexer}
import org.ergoplatform.nodeView.history.extra.IndexedErgoAddressSerializer.hashErgoTree
import org.ergoplatform.nodeView.history.extra.IndexedTokenSerializer.tokenRegistersSet
import org.ergoplatform.nodeView.history.storage.HistoryStorage
import org.ergoplatform.settings.{Algos, CacheSettings, ChainSettings}
import scorex.util.{ModifierId, ScorexLogging, bytesToId}
Expand Down Expand Up @@ -208,11 +207,10 @@ trait ExtraIndexerBase extends ScorexLogging {
findAndUpdateTree(hashErgoTree(box.ergoTree), Right(boxes(boxId)))

// check if box is creating a new token, if yes record it
if(tokenRegistersSet(box))
cfor(0)(_ < box.additionalTokens.length, _ + 1) { j =>
if (!tokens.exists(x => java.util.Arrays.equals(x._1, box.additionalTokens(j)._1)))
general += IndexedToken.fromBox(box, j)
}
cfor(0)(_ < box.additionalTokens.length, _ + 1) { j =>
if (!tokens.exists(x => java.util.Arrays.equals(x._1, box.additionalTokens(j)._1)))
general += IndexedToken.fromBox(box, j)
}

globalBoxIndex += 1
boxCount += 1
Expand Down Expand Up @@ -310,14 +308,13 @@ trait ExtraIndexerBase extends ScorexLogging {
val iEb: IndexedErgoBox = NumericBoxIndex.getBoxByNumber(history, globalBoxIndex).get
val address: IndexedErgoAddress = history.typedExtraIndexById[IndexedErgoAddress](hashErgoTree(iEb.box.ergoTree)).get
address.spendBox(iEb)
if(tokenRegistersSet(iEb.box))
cfor(0)(_ < iEb.box.additionalTokens.length, _ + 1) { i =>
history.typedExtraIndexById[IndexedToken](IndexedToken.fromBox(iEb.box, i).id) match {
case Some(token) if token.boxId == iEb.id =>
toRemove += token.id // token created, delete
case None => // no token created
}
cfor(0)(_ < iEb.box.additionalTokens.length, _ + 1) { i =>
history.typedExtraIndexById[IndexedToken](IndexedToken.fromBox(iEb.box, i).id) match {
case Some(token) if token.boxId == iEb.id =>
toRemove += token.id // token created, delete
case None => // no token created
}
}
address.rollback(txTarget, boxTarget, _history)
toRemove += iEb.id // box by id
toRemove += bytesToId(NumericBoxIndex.indexToBytes(globalBoxIndex)) // box id by number
Expand Down Expand Up @@ -434,7 +431,7 @@ object ExtraIndexer {
x
}

val NewestVersion: Int = 1
val NewestVersion: Int = 2
val NewestVersionBytes: Array[Byte] = ByteBuffer.allocate(4).putInt(NewestVersion).array

val IndexedHeightKey: Array[Byte] = Algos.hash("indexed height")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ object IndexedErgoAddress {
* @return range in "arr" ArrayBuffer
*/
private def slice[T](arr: ArrayBuffer[T], offset: Int, limit: Int): ArrayBuffer[T] =
arr.slice(offset, offset + limit)
arr.slice(arr.length - limit - offset, arr.length - offset).reverse

/**
* Get an array of transactions with full bodies from an array of numeric transaction indexes
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@ package org.ergoplatform.nodeView.history.extra
import org.ergoplatform.ErgoBox
import org.ergoplatform.ErgoBox.{R4, R5, R6}
import org.ergoplatform.nodeView.history.extra.ExtraIndexer.{ExtraIndexTypeId, fastIdToBytes}
import org.ergoplatform.nodeView.history.extra.IndexedTokenSerializer.{ByteColl, getDecimals, uniqueId}
import org.ergoplatform.nodeView.history.extra.IndexedTokenSerializer.{ByteColl, uniqueId}
import org.ergoplatform.settings.Algos
import scorex.core.serialization.ScorexSerializer
import scorex.util.{ModifierId, bytesToId}
import scorex.util.serialization.{Reader, Writer}
import sigmastate.Values.{CollectionConstant, EvaluatedValue}
import sigmastate.{SByte, SType}
import sigmastate.Values.CollectionConstant
import sigmastate.SByte

/**
* Index of a token containing creation information.
Expand Down Expand Up @@ -44,49 +44,6 @@ object IndexedTokenSerializer extends ScorexSerializer[IndexedToken] {
*/
def uniqueId(tokenId: ModifierId): ModifierId = bytesToId(Algos.hash(tokenId + "token"))

/**
* Check if a box is creating a token.
* @param box - box to check
* @return true if the box is creation a token, false otherwise
*/
def tokenRegistersSet(box: ErgoBox): Boolean = {

// box has tokens
if(box.additionalTokens.length == 0) return false

// registers exist
if(!box.additionalRegisters.contains(R4) ||
!box.additionalRegisters.contains(R5) ||
!box.additionalRegisters.contains(R6))
return false

// registers correct type
try {
box.additionalRegisters(R4).asInstanceOf[ByteColl]
box.additionalRegisters(R5).asInstanceOf[ByteColl]
getDecimals(box.additionalRegisters(R6))
}catch {
case _: Throwable => return false
}

// ok
true
}

/**
* Get the number of decimals places from a register.
* Try-catch, because some old tokens used Int to store the decimals, rather than Byte Coll
* @param reg - register to extract decimals from
* @return number of decimals places
*/
def getDecimals(reg: EvaluatedValue[_ <: SType]): Int = {
try {
new String(reg.asInstanceOf[ByteColl].value.toArray, "UTF-8").toInt
}catch {
case _: Throwable => reg.value.asInstanceOf[Int]
}
}

override def serialize(iT: IndexedToken, w: Writer): Unit = {
w.putBytes(fastIdToBytes(iT.tokenId))
w.putBytes(fastIdToBytes(iT.boxId))
Expand Down Expand Up @@ -118,16 +75,45 @@ object IndexedToken {
val extraIndexTypeId: ExtraIndexTypeId = 35.toByte

/**
* Construct a token index from a box. Used after checking box with "tokenRegistersSet".
* Construct a token index from a box.
*
* @param box - box to use
* @return token index
*/
def fromBox(box: ErgoBox, tokenIndex: Int): IndexedToken =
def fromBox(box: ErgoBox, tokenIndex: Int): IndexedToken = {
val name: String = try {
new String(box.additionalRegisters(R4).asInstanceOf[ByteColl].value.toArray, "UTF-8")
}catch {
case _: Throwable => ""
}

val description: String = try {
new String(box.additionalRegisters(R5).asInstanceOf[ByteColl].value.toArray, "UTF-8")
} catch {
case _: Throwable => ""
}

val decimals: Int =
box.additionalRegisters.get(R6) match {
case Some(reg) =>
try {
new String(reg.asInstanceOf[ByteColl].value.toArray, "UTF-8").toInt
} catch {
case _: Throwable =>
try{
reg.value.asInstanceOf[Int]
}catch {
case _: Throwable => 0
}
}
case None => 0
}

IndexedToken(bytesToId(box.additionalTokens(tokenIndex)._1),
bytesToId(box.id),
box.additionalTokens(tokenIndex)._2,
new String(box.additionalRegisters(R4).asInstanceOf[ByteColl].value.toArray, "UTF-8"),
new String(box.additionalRegisters(R5).asInstanceOf[ByteColl].value.toArray, "UTF-8"),
getDecimals(box.additionalRegisters(R6)))
name,
description,
decimals)
}
}