diff --git a/core/src/main/java/bisq/core/dao/burningman/BurningManService.java b/core/src/main/java/bisq/core/dao/burningman/BurningManService.java index 1e63bb87f72..9cb4ae808c5 100644 --- a/core/src/main/java/bisq/core/dao/burningman/BurningManService.java +++ b/core/src/main/java/bisq/core/dao/burningman/BurningManService.java @@ -42,9 +42,12 @@ import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; +import java.util.ArrayList; import java.util.Collection; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; @@ -112,6 +115,10 @@ public BurningManService(DaoStateService daoStateService, /////////////////////////////////////////////////////////////////////////////////////////// Map getBurningManCandidatesByName(int chainHeight) { + return getBurningManCandidatesByName(chainHeight, !DelayedPayoutTxReceiverService.isProposal412Activated()); + } + + Map getBurningManCandidatesByName(int chainHeight, boolean limitCappingRounds) { Map burningManCandidatesByName = new TreeMap<>(); Map> proofOfBurnOpReturnTxOutputByHash = getProofOfBurnOpReturnTxOutputByHash(chainHeight); @@ -187,25 +194,58 @@ Map getBurningManCandidatesByName(int chainHeight) .sum(); burningManCandidates.forEach(candidate -> candidate.calculateShares(totalDecayedCompensationAmounts, totalDecayedBurnAmounts)); + int numRoundsWithCapsApplied = imposeCaps(burningManCandidates, limitCappingRounds); + double sumAllCappedBurnAmountShares = burningManCandidates.stream() - .filter(candidate -> candidate.getBurnAmountShare() >= candidate.getMaxBoostedCompensationShare()) + .filter(candidate -> candidate.getRoundCapped().isPresent()) .mapToDouble(BurningManCandidate::getMaxBoostedCompensationShare) .sum(); double sumAllNonCappedBurnAmountShares = burningManCandidates.stream() - .filter(candidate -> candidate.getBurnAmountShare() < candidate.getMaxBoostedCompensationShare()) + .filter(candidate -> candidate.getRoundCapped().isEmpty()) .mapToDouble(BurningManCandidate::getBurnAmountShare) .sum(); - burningManCandidates.forEach(candidate -> candidate.calculateCappedAndAdjustedShares(sumAllCappedBurnAmountShares, sumAllNonCappedBurnAmountShares)); + burningManCandidates.forEach(candidate -> candidate.calculateCappedAndAdjustedShares( + sumAllCappedBurnAmountShares, sumAllNonCappedBurnAmountShares, numRoundsWithCapsApplied)); return burningManCandidatesByName; } + private static int imposeCaps(Collection burningManCandidates, boolean limitCappingRounds) { + List candidatesInDescendingBurnCapRatio = new ArrayList<>(burningManCandidates); + candidatesInDescendingBurnCapRatio.sort(Comparator.comparing(BurningManCandidate::getBurnCapRatio).reversed()); + double thresholdBurnCapRatio = 1.0; + double remainingBurnShare = 1.0; + double remainingCapShare = 1.0; + int cappingRound = 0; + for (BurningManCandidate candidate : candidatesInDescendingBurnCapRatio) { + double invScaleFactor = remainingBurnShare / remainingCapShare; + double burnCapRatio = candidate.getBurnCapRatio(); + if (remainingCapShare <= 0.0 || burnCapRatio <= 0.0 || burnCapRatio < invScaleFactor || + limitCappingRounds && burnCapRatio < 1.0) { + cappingRound++; + break; + } + if (burnCapRatio < thresholdBurnCapRatio) { + thresholdBurnCapRatio = invScaleFactor; + cappingRound++; + } + candidate.imposeCap(cappingRound, candidate.getBurnAmountShare() / thresholdBurnCapRatio); + remainingBurnShare -= candidate.getBurnAmountShare(); + remainingCapShare -= candidate.getMaxBoostedCompensationShare(); + } + return cappingRound; + } + String getLegacyBurningManAddress(int chainHeight) { return daoStateService.getParamValue(Param.RECIPIENT_BTC_ADDRESS, chainHeight); } Set getActiveBurningManCandidates(int chainHeight) { - return getBurningManCandidatesByName(chainHeight).values().stream() + return getActiveBurningManCandidates(chainHeight, !DelayedPayoutTxReceiverService.isProposal412Activated()); + } + + Set getActiveBurningManCandidates(int chainHeight, boolean limitCappingRounds) { + return getBurningManCandidatesByName(chainHeight, limitCappingRounds).values().stream() .filter(burningManCandidate -> burningManCandidate.getCappedBurnAmountShare() > 0) .filter(candidate -> candidate.getReceiverAddress().isPresent()) .collect(Collectors.toSet()); diff --git a/core/src/main/java/bisq/core/dao/burningman/DelayedPayoutTxReceiverService.java b/core/src/main/java/bisq/core/dao/burningman/DelayedPayoutTxReceiverService.java index 3fb9ffd4859..81bf20fbfb5 100644 --- a/core/src/main/java/bisq/core/dao/burningman/DelayedPayoutTxReceiverService.java +++ b/core/src/main/java/bisq/core/dao/burningman/DelayedPayoutTxReceiverService.java @@ -56,11 +56,18 @@ public class DelayedPayoutTxReceiverService implements DaoStateListener { // requests change address. // See: https://github.com/bisq-network/bisq/issues/6699 public static final Date BUGFIX_6699_ACTIVATION_DATE = Utilities.getUTCDate(2023, GregorianCalendar.JULY, 24); + // See: https://github.com/bisq-network/proposals/issues/412 + public static final Date PROPOSAL_412_ACTIVATION_DATE = Utilities.getUTCDate(2024, GregorianCalendar.JANUARY, 1); public static boolean isBugfix6699Activated() { return new Date().after(BUGFIX_6699_ACTIVATION_DATE); } + @SuppressWarnings("BooleanMethodIsAlwaysInverted") + public static boolean isProposal412Activated() { + return new Date().after(PROPOSAL_412_ACTIVATION_DATE); + } + // We don't allow to get further back than 767950 (the block height from Dec. 18th 2022). static final int MIN_SNAPSHOT_HEIGHT = Config.baseCurrencyNetwork().isRegtest() ? 0 : 767950; diff --git a/core/src/main/java/bisq/core/dao/burningman/model/BurningManCandidate.java b/core/src/main/java/bisq/core/dao/burningman/model/BurningManCandidate.java index 7922febfb0a..ed65a207df0 100644 --- a/core/src/main/java/bisq/core/dao/burningman/model/BurningManCandidate.java +++ b/core/src/main/java/bisq/core/dao/burningman/model/BurningManCandidate.java @@ -28,6 +28,7 @@ import java.util.HashSet; import java.util.Map; import java.util.Optional; +import java.util.OptionalInt; import java.util.Set; import java.util.stream.Collectors; @@ -68,6 +69,7 @@ public class BurningManCandidate { // The burnAmountShare adjusted in case there are cappedBurnAmountShare. // We redistribute the over-burned amounts to the group of not capped candidates. protected double adjustedBurnAmountShare; + private OptionalInt roundCapped = OptionalInt.empty(); public BurningManCandidate() { } @@ -142,11 +144,19 @@ public void calculateShares(double totalDecayedCompensationAmounts, double total burnAmountShare = totalDecayedBurnAmounts > 0 ? accumulatedDecayedBurnAmount / totalDecayedBurnAmounts : 0; } + public void imposeCap(int cappingRound, double adjustedBurnAmountShare) { + roundCapped = OptionalInt.of(cappingRound); + // NOTE: The adjusted burn share set here will not affect the final capped burn share, only + // the presentation service, so we need not worry about rounding errors affecting consensus. + this.adjustedBurnAmountShare = adjustedBurnAmountShare; + } + public void calculateCappedAndAdjustedShares(double sumAllCappedBurnAmountShares, - double sumAllNonCappedBurnAmountShares) { + double sumAllNonCappedBurnAmountShares, + int numAppliedCappingRounds) { double maxBoostedCompensationShare = getMaxBoostedCompensationShare(); - adjustedBurnAmountShare = burnAmountShare; - if (burnAmountShare < maxBoostedCompensationShare) { + if (roundCapped.isEmpty()) { + adjustedBurnAmountShare = burnAmountShare; if (sumAllCappedBurnAmountShares == 0) { // If no one is capped we do not need to do any adjustment cappedBurnAmountShare = burnAmountShare; @@ -165,7 +175,11 @@ public void calculateCappedAndAdjustedShares(double sumAllCappedBurnAmountShares } else { // We exceeded the cap by the adjustment. This will lead to the legacy BM getting the // difference of the adjusted amount and the maxBoostedCompensationShare. + // NOTE: When the number of capping rounds are unlimited (that is post- Proposal 412 + // activation), we should only get to this branch as a result of floating point rounding + // errors. In that case, the extra amount the LBM gets is negligible. cappedBurnAmountShare = maxBoostedCompensationShare; + roundCapped = OptionalInt.of(roundCapped.orElse(numAppliedCappingRounds)); } } } @@ -174,6 +188,12 @@ public void calculateCappedAndAdjustedShares(double sumAllCappedBurnAmountShares } } + public double getBurnCapRatio() { + // NOTE: This is less than 1.0 precisely when burnAmountShare < maxBoostedCompensationShare, + // in spite of any floating point rounding errors, since 1.0 is proportionately at least as + // close to the previous double as any two consecutive nonzero doubles on the number line. + return burnAmountShare > 0.0 ? burnAmountShare / getMaxBoostedCompensationShare() : 0.0; + } public double getMaxBoostedCompensationShare() { return Math.min(BurningManService.MAX_BURN_SHARE, compensationShare * BurningManService.ISSUANCE_BOOST_FACTOR); @@ -194,6 +214,7 @@ public String toString() { ",\r\n burnAmountShare=" + burnAmountShare + ",\r\n cappedBurnAmountShare=" + cappedBurnAmountShare + ",\r\n adjustedBurnAmountShare=" + adjustedBurnAmountShare + + ",\r\n roundCapped=" + roundCapped + "\r\n}"; } } diff --git a/core/src/main/java/bisq/core/dao/burningman/model/LegacyBurningMan.java b/core/src/main/java/bisq/core/dao/burningman/model/LegacyBurningMan.java index 0089c8c7637..2a1be4d4c2f 100644 --- a/core/src/main/java/bisq/core/dao/burningman/model/LegacyBurningMan.java +++ b/core/src/main/java/bisq/core/dao/burningman/model/LegacyBurningMan.java @@ -48,9 +48,15 @@ public void calculateShares(double totalDecayedCompensationAmounts, double total // do nothing } + @Override + public void imposeCap(int cappingRound, double adjustedBurnAmountShare) { + // do nothing + } + @Override public void calculateCappedAndAdjustedShares(double sumAllCappedBurnAmountShares, - double sumAllNonCappedBurnAmountShares) { + double sumAllNonCappedBurnAmountShares, + int numAppliedCappingRounds) { // do nothing } diff --git a/core/src/test/java/bisq/core/dao/burningman/BurningManServiceTest.java b/core/src/test/java/bisq/core/dao/burningman/BurningManServiceTest.java index 460025436ee..de3a29d25c0 100644 --- a/core/src/test/java/bisq/core/dao/burningman/BurningManServiceTest.java +++ b/core/src/test/java/bisq/core/dao/burningman/BurningManServiceTest.java @@ -58,6 +58,8 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import static java.nio.charset.StandardCharsets.UTF_8; import static org.junit.jupiter.api.Assertions.assertAll; @@ -128,8 +130,9 @@ private void addCompensationIssuanceAndPayloads(Tuple2 compensationIssuanceAndPayload("alice" + i, "000" + i, 710000, 100000) ).collect(Collectors.toList())); @@ -196,7 +209,7 @@ public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_noneCapped_ proofOfBurnTx("alice" + i, "100" + i, 760000, 400000) ).toArray(Tx[]::new)); - var candidateMap = burningManService.getBurningManCandidatesByName(800000); + var candidateMap = burningManService.getBurningManCandidatesByName(800000, limitCappingRounds); assertAll(IntStream.range(0, 10).mapToObj(i -> () -> { var candidate = candidateMap.get("alice" + i); @@ -204,11 +217,13 @@ public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_noneCapped_ assertEquals(0.1, candidate.getBurnAmountShare()); assertEquals(0.1, candidate.getAdjustedBurnAmountShare(), 1e-10); assertEquals(0.1, candidate.getCappedBurnAmountShare()); + assertEquals(-1, candidate.getRoundCapped().orElse(-1)); })); } - @Test - public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_someCapped_twoCappingRoundsNeeded() { + @ValueSource(booleans = {true, false}) + @ParameterizedTest(name = "[{index}] limitCappingRounds={0}") + public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_someCapped_twoCappingRoundsNeeded(boolean limitCappingRounds) { addCompensationIssuanceAndPayloads(IntStream.range(0, 10).mapToObj(i -> compensationIssuanceAndPayload("alice" + i, "000" + i, 710000, 100000) ).collect(Collectors.toList())); @@ -217,7 +232,7 @@ public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_someCapped_ proofOfBurnTx("alice" + i, "100" + i, 760000, i < 6 ? 400000 : 200000) ).toArray(Tx[]::new)); - var candidateMap = burningManService.getBurningManCandidatesByName(800000); + var candidateMap = burningManService.getBurningManCandidatesByName(800000, limitCappingRounds); // Note the expected rounding error below. To prevent DPT verification failures, the // capping algorithm output must be well defined to the nearest floating point ULP. @@ -227,6 +242,7 @@ public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_someCapped_ assertEquals(i < 6 ? 0.125 : 0.0625, candidate.getBurnAmountShare()); assertEquals(i < 6 ? 0.125 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10); assertEquals(i < 6 ? 0.11 : 0.08499999999999999, candidate.getCappedBurnAmountShare()); + assertEquals(i < 6 ? 0 : -1, candidate.getRoundCapped().orElse(-1)); })); // Only two capping rounds were required to achieve a burn share total of 100%, so // nothing goes to the LBM in this case. @@ -234,8 +250,9 @@ public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_someCapped_ assertEquals(1.0, burnShareTotal); } - @Test - public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_someCapped_threeCappingRoundsNeeded() { + @ValueSource(booleans = {true, false}) + @ParameterizedTest(name = "[{index}] limitCappingRounds={0}") + public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_someCapped_threeCappingRoundsNeeded(boolean limitCappingRounds) { addCompensationIssuanceAndPayloads(IntStream.range(0, 10).mapToObj(i -> compensationIssuanceAndPayload("alice" + i, "000" + i, 710000, i < 8 ? 123250 : 7000) ).collect(Collectors.toList())); @@ -244,29 +261,38 @@ public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_someCapped_ proofOfBurnTx("alice" + i, "100" + i, 760000, i < 6 ? 400000 : 200000) ).toArray(Tx[]::new)); - var candidateMap = burningManService.getBurningManCandidatesByName(800000); + var candidateMap = burningManService.getBurningManCandidatesByName(800000, limitCappingRounds); - // Note the expected rounding error below. To prevent DPT verification failures, the + // Note the expected rounding errors below. To prevent DPT verification failures, the // capping algorithm output must be well defined to the nearest floating point ULP. assertAll(IntStream.range(0, 10).mapToObj(i -> () -> { var candidate = candidateMap.get("alice" + i); assertEquals(i < 8 ? 0.11 : 0.07, candidate.getMaxBoostedCompensationShare()); assertEquals(i < 6 ? 0.125 : 0.0625, candidate.getBurnAmountShare()); - assertEquals(i < 6 ? 0.125 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10); - assertEquals(i < 6 ? 0.11 : i < 8 ? 0.08499999999999999 : 0.07, candidate.getCappedBurnAmountShare()); + if (limitCappingRounds) { + assertEquals(i < 6 ? 0.125 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10); + assertEquals(i < 6 ? 0.11 : i < 8 ? 0.08499999999999999 : 0.07, candidate.getCappedBurnAmountShare()); + } else { + assertEquals(i < 6 ? 0.125 : i < 8 ? 0.1 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10); + assertEquals(i < 6 ? 0.11 : i < 8 ? 0.09999999999999998 : 0.07, candidate.getCappedBurnAmountShare()); + } + assertEquals(i < 6 ? 0 : i < 8 ? -1 : 1, candidate.getRoundCapped().orElse(-1)); })); - // Three capping rounds would have been required to achieve a burn share total of - // 100%, but our capping algorithm only applies two, so 3% ends up going to the LBM - // in this case, instead of being distributed between `alice6` & `alice7`. The caps - // sum to more than 100%, however, so we could have avoided giving him any. + // Three capping rounds are required to achieve a burn share total of 100%, but our + // algorithm only applies two when `limitCappingRounds` is true (that is, prior to + // the activation of the capping algorithm change), so 3% ends up going to the LBM in + // that case, instead of being distributed between `alice6` & `alice7`. The caps sum + // to more than 100%, however, so we could have avoided giving him any. double capTotal = candidateMap.values().stream().mapToDouble(BurningManCandidate::getMaxBoostedCompensationShare).sum(); double burnShareTotal = candidateMap.values().stream().mapToDouble(BurningManCandidate::getCappedBurnAmountShare).sum(); assertEquals(1.02, capTotal); - assertEquals(0.97, burnShareTotal); + assertEquals(limitCappingRounds ? 0.97 : 1.0, burnShareTotal); } - @Test - public void testGetBurningManCandidatesByName_capsSumToLessThanUnity_allShouldBeCapped_fourCappingRoundsNeeded() { + @ValueSource(booleans = {true, false}) + @ParameterizedTest(name = "[{index}] limitCappingRounds={0}") + public void testGetBurningManCandidatesByName_capsSumToLessThanUnity_allShouldBeCapped_fourCappingRoundsNeeded( + boolean limitCappingRounds) { addCompensationIssuanceAndPayloads(IntStream.range(0, 10).mapToObj(i -> compensationIssuanceAndPayload("alice" + i, "000" + i, 710000, i < 6 ? 483200 : i == 6 ? 31800 : i == 7 ? 27000 : 21000) @@ -276,7 +302,7 @@ public void testGetBurningManCandidatesByName_capsSumToLessThanUnity_allShouldBe proofOfBurnTx("alice" + i, "100" + i, 760000, i < 6 ? 400000 : 200000) ).toArray(Tx[]::new)); - var candidateMap = burningManService.getBurningManCandidatesByName(800000); + var candidateMap = burningManService.getBurningManCandidatesByName(800000, limitCappingRounds); // Note the expected rounding error below. To prevent DPT verification failures, the // capping algorithm output must be well defined to the nearest floating point ULP. @@ -284,18 +310,26 @@ public void testGetBurningManCandidatesByName_capsSumToLessThanUnity_allShouldBe var candidate = candidateMap.get("alice" + i); assertEquals(i < 6 ? 0.11 : i == 6 ? 0.106 : i == 7 ? 0.09 : 0.07, candidate.getMaxBoostedCompensationShare()); assertEquals(i < 6 ? 0.125 : 0.0625, candidate.getBurnAmountShare()); - assertEquals(i < 6 ? 0.125 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10); - assertEquals(i < 6 ? 0.11 : i < 8 ? 0.08499999999999999 : 0.07, candidate.getCappedBurnAmountShare()); + if (limitCappingRounds) { + assertEquals(i < 6 ? 0.125 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10); + assertEquals(i < 6 ? 0.11 : i < 8 ? 0.08499999999999999 : 0.07, candidate.getCappedBurnAmountShare()); + assertEquals(i < 6 ? 0 : i < 8 ? -1 : 1, candidate.getRoundCapped().orElse(-1)); + } else { + assertEquals(i < 6 ? 0.125 : i == 6 ? 0.11 : i == 7 ? 0.1 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10); + assertEquals(candidate.getMaxBoostedCompensationShare(), candidate.getCappedBurnAmountShare()); + assertEquals(i < 6 ? 0 : i == 6 ? 3 : i == 7 ? 2 : 1, candidate.getRoundCapped().orElse(-1)); + } })); - // Four capping rounds would have been required to achieve a maximum possible burn - // share total of 99.6%, with all the contributors being capped. But our capping - // algorithm only applies two rounds, so 3% ends up going to the LBM instead of the - // minimum possible amount of 0.4% (100% less the cap sum). Contributors `alice6` & - // `alice7` therefore receive less than they could have done. + // Four capping rounds are required to achieve a maximum possible burn share total of + // 99.6%, with all the contributors being capped. But our algorithm only applies two + // rounds when `limitCappingRounds` is true (that is, prior to the activation of the + // capping algorithm change), so 3% ends up going to the LBM in that case, instead of + // the minimum possible amount of 0.4% (100% less the cap sum). Contributors `alice6` + // & `alice7` therefore receive less than they could have done. double capTotal = candidateMap.values().stream().mapToDouble(BurningManCandidate::getMaxBoostedCompensationShare).sum(); double burnShareTotal = candidateMap.values().stream().mapToDouble(BurningManCandidate::getCappedBurnAmountShare).sum(); assertEquals(0.996, capTotal); - assertEquals(0.97, burnShareTotal); + assertEquals(limitCappingRounds ? 0.97 : capTotal, burnShareTotal); } }