From 40df84252c9eb5b4d6c7a7c8df1db48a4cda28c3 Mon Sep 17 00:00:00 2001 From: Ivan Mashonskii Date: Wed, 5 Apr 2023 10:54:08 +0300 Subject: [PATCH] RocksDB (#3830) --- benchmark/build.sbt | 3 +- .../com/wavesplatform/state/DBState.scala | 20 +- .../com/wavesplatform/RollbackBenchmark.scala | 18 +- .../lang/v1/EvaluatorV2Benchmark.scala | 5 +- .../lang/v1/FractionIntBenchmark.scala | 12 +- .../com/wavesplatform/lang/v1/package.scala | 2 +- .../com/wavesplatform/state/BaseState.scala | 41 +- .../state/BloomFilterBenchmark.scala | 72 -- .../state/RocksDBGetBenchmark.scala | 75 ++ .../state/RocksDBIteratorBenchmark.scala | 84 ++ .../state/RocksDBSeekForPrevBenchmark.scala | 90 +++ .../state/RocksDBWriteBatchBenchmark.scala | 134 ++++ ...ark.scala => RocksDBWriterBenchmark.scala} | 34 +- .../state/WavesEnvironmentBenchmark.scala | 26 +- .../state/WavesEnvironmentRebenchmark.scala | 39 +- .../api/grpc/AccountsApiGrpcImpl.scala | 39 +- .../api/grpc/AssetsApiGrpcImpl.scala | 16 +- .../api/grpc/BlocksApiGrpcImpl.scala | 21 +- .../api/grpc/TransactionsApiGrpcImpl.scala | 30 +- .../events/BlockchainUpdates.scala | 16 +- .../com/wavesplatform/events/Loader.scala | 21 +- .../scala/com/wavesplatform/events/Repo.scala | 28 +- .../com/wavesplatform/events/events.scala | 2 +- .../src/test/resources/application.conf | 8 + .../api/grpc/test/GRPCBroadcastSpec.scala | 56 +- .../wavesplatform/events/InterferableDB.scala | 45 -- .../wavesplatform/events/WithBUDomain.scala | 12 +- .../lang/v1/evaluator/ContractEvaluator.scala | 11 +- .../lang/v1/evaluator/EvaluationResult.scala | 10 +- .../lang/v1/evaluator/EvaluatorV1.scala | 25 +- .../lang/v1/evaluator/EvaluatorV2.scala | 114 ++- .../v1/evaluator/ctx/EvaluationContext.scala | 26 +- .../lang/v1/evaluator/package.scala | 6 +- .../scala/com/wavesplatform/lang/Common.scala | 2 +- .../lang/ContractIntegrationTest.scala | 6 +- .../wavesplatform/lang/IntegrationTest.scala | 2 +- .../lang/estimator/package.scala | 10 +- .../lang/evaluator/EvaluatorSpec.scala | 2 +- .../lang/evaluator/EvaluatorV1V2Test.scala | 13 +- .../com/wavesplatform/utils/MerkleTest.scala | 2 +- .../wavesplatform/it/BaseTargetChecker.scala | 20 +- .../it/sync/transactions/IssueNFTSuite.scala | 33 +- .../test/BlockchainGenerator.scala | 19 +- node/build.sbt | 3 +- node/src/main/protobuf/waves/database.proto | 4 + node/src/main/resources/application.conf | 8 + .../scala/com/wavesplatform/Application.scala | 83 +- .../scala/com/wavesplatform/Explorer.scala | 227 ++++-- .../scala/com/wavesplatform/Exporter.scala | 14 +- .../scala/com/wavesplatform/Importer.scala | 174 +++-- .../com/wavesplatform/account/Recipient.scala | 4 +- .../com/wavesplatform/api/BlockMeta.scala | 21 +- .../api/common/AddressPortfolio.scala | 148 ++-- .../api/common/AddressTransactions.scala | 158 +++- .../api/common/BalanceDistribution.scala | 23 +- .../api/common/CommonAccountsApi.scala | 134 +++- .../api/common/CommonAssetsApi.scala | 27 +- .../api/common/CommonTransactionsApi.scala | 16 +- .../wavesplatform/api/common/package.scala | 66 +- .../api/http/AddressApiRoute.scala | 31 +- .../api/http/ApiMarshallers.scala | 71 +- .../api/http/BlocksApiRoute.scala | 1 - .../wavesplatform/api/http/CustomJson.scala | 62 +- .../api/http/DebugApiRoute.scala | 3 +- .../wavesplatform/api/http/RouteTimeout.scala | 5 + .../api/http/StreamSerializerUtils.scala | 64 ++ .../api/http/TransactionJsonSerializer.scala | 544 +++++++++++++ .../api/http/TransactionsApiRoute.scala | 219 +----- .../api/http/alias/AliasApiRoute.scala | 5 +- .../api/http/assets/AssetsApiRoute.scala | 252 +++++- .../api/http/utils/UtilsEvaluator.scala | 41 +- .../wavesplatform/database/BloomFilter.scala | 107 --- .../com/wavesplatform/database/Caches.scala | 321 +++++--- .../wavesplatform/database/DBResource.scala | 48 +- .../com/wavesplatform/database/Key.scala | 17 +- .../wavesplatform/database/KeyHelpers.scala | 6 +- .../com/wavesplatform/database/KeyTags.scala | 2 +- .../com/wavesplatform/database/Keys.scala | 134 ++-- .../database/LevelDBFactory.scala | 28 - .../com/wavesplatform/database/RDB.scala | 143 ++++ .../scala/com/wavesplatform/database/RW.scala | 27 +- .../wavesplatform/database/ReadOnlyDB.scala | 91 ++- ...evelDBWriter.scala => RocksDBWriter.scala} | 718 ++++++++++-------- .../wavesplatform/database/SortedBatch.scala | 27 - .../database/jna/LevelDBJNADB.scala | 168 ---- .../database/jna/LevelDBJNADBFactory.scala | 49 -- .../com/wavesplatform/database/package.scala | 518 ++++++++----- .../patch/DisableHijackedAliases.scala | 2 +- .../database/protobuf/package.scala | 12 + .../wavesplatform/extensions/Context.scala | 4 +- .../com/wavesplatform/history/History.scala | 32 +- .../history/StorageFactory.scala | 27 +- ...{LevelDBStats.scala => RocksDBStats.scala} | 4 +- .../network/InvalidBlockStorage.scala | 2 +- .../network/LegacyFrameCodec.scala | 14 +- .../network/MicroBlockSynchronizer.scala | 66 +- .../network/PeerDatabaseImpl.scala | 4 +- .../network/RxExtensionLoader.scala | 8 +- .../network/RxScoreObserver.scala | 14 +- .../network/TransactionSynchronizer.scala | 17 +- .../wavesplatform/serialization/package.scala | 1 + .../wavesplatform/settings/DBSettings.scala | 3 +- .../settings/RocksDBSettings.scala | 9 + .../com/wavesplatform/settings/package.scala | 8 + .../com/wavesplatform/state/Blockchain.scala | 9 +- .../state/BlockchainUpdaterImpl.scala | 194 ++--- .../com/wavesplatform/state/DataEntry.scala | 40 +- .../scala/com/wavesplatform/state/Diff.scala | 37 +- .../state/DiffToStateApplier.scala | 7 +- .../state/InvokeScriptResult.scala | 2 +- .../com/wavesplatform/state/NgState.scala | 37 +- .../state/ParSignatureChecker.scala | 50 ++ .../state/appender/BlockAppender.scala | 15 +- .../state/appender/ExtensionAppender.scala | 11 +- .../state/appender/package.scala | 24 +- .../state/diffs/AssetTransactionsDiff.scala | 6 +- .../state/diffs/BalanceDiffValidation.scala | 14 +- .../state/diffs/BlockDiffer.scala | 88 ++- .../state/diffs/CommonValidation.scala | 4 +- .../state/diffs/DataTransactionDiff.scala | 2 +- .../state/diffs/EthereumTransactionDiff.scala | 10 +- .../state/diffs/TransactionDiffer.scala | 72 +- .../diffs/invoke/InvokeDiffsCommon.scala | 20 +- .../state/diffs/invoke/InvokeScriptDiff.scala | 13 +- .../invoke/InvokeScriptTransactionDiff.scala | 13 +- .../state/reader/CompositeBlockchain.scala | 34 +- .../transaction/BlockchainUpdater.scala | 2 +- .../transaction/PaymentTransaction.scala | 4 - .../wavesplatform/transaction/Proven.scala | 14 + .../transaction/TransactionType.scala | 21 +- .../assets/exchange/ExchangeTransaction.scala | 12 + .../transaction/assets/exchange/Order.scala | 8 + .../impl/IssueTxSerializer.scala | 4 +- .../transaction/smart/Verifier.scala | 89 ++- .../transaction/smart/WavesEnvironment.scala | 8 +- .../smart/script/ScriptRunner.scala | 6 +- .../utils/ObservedLoadingCache.scala | 1 - .../generator/BlockchainGeneratorApp.scala | 21 +- .../scala/com/wavesplatform/utx/UtxPool.scala | 8 +- .../com/wavesplatform/utx/UtxPoolImpl.scala | 27 +- node/src/test/resources/application.conf | 6 + .../wavesplatform/BlockchainStubHelpers.scala | 21 +- .../test/scala/com/wavesplatform/WithDB.scala | 37 - .../wavesplatform/WithNewDBForEachTest.scala | 32 + .../api/common/AddressTransactionsSpec.scala | 2 +- .../api/common/CommonAccountApiSpec.scala | 14 +- .../api/http/CustomJsonMarshallerSpec.scala | 4 +- .../consensus/FPPoSSelectorTest.scala | 320 ++++---- ...iterSpec.scala => RocksDBWriterSpec.scala} | 16 +- .../database/TestStorageFactory.scala | 25 +- .../com/wavesplatform/db/InterferableDB.scala | 51 +- .../wavesplatform/db/ScriptCacheTest.scala | 93 ++- .../com/wavesplatform/db/WithState.scala | 135 ++-- .../history/BlockRewardSpec.scala | 12 +- ...roblockSequencesSameTransactionsTest.scala | 97 ++- .../history/BlockchainUpdaterNFTTest.scala | 2 +- .../com/wavesplatform/history/Domain.scala | 60 +- .../history/LeasingExpirySpec.scala | 137 ++-- .../wavesplatform/http/AddressRouteSpec.scala | 53 +- .../http/AssetsBroadcastRouteSpec.scala | 3 + .../wavesplatform/http/AssetsRouteSpec.scala | 5 +- .../http/DebugApiRouteSpec.scala | 56 +- .../wavesplatform/http/LeaseRouteSpec.scala | 4 +- .../http/ProtoVersionTransactionsSpec.scala | 2 + .../http/SpentComplexitySpec.scala | 1 + .../http/TransactionBroadcastSpec.scala | 13 +- .../http/TransactionsRouteSpec.scala | 27 +- .../wavesplatform/mining/BlockV5Test.scala | 8 +- .../mining/BlockWithMaxBaseTargetTest.scala | 25 +- .../mining/MicroBlockMinerSpec.scala | 1 + .../mining/MiningFailuresSuite.scala | 10 +- .../mining/MiningWithRewardSuite.scala | 26 +- ...criptComplexityMiningConstraintSuite.scala | 54 +- .../peer/PeerDatabaseImplSpecification.scala | 86 ++- .../EvaluatedPBSerializationTest.scala | 1 + .../state/BlockchainUpdaterImplSpec.scala | 40 +- .../wavesplatform/state/RollbackSpec.scala | 47 +- .../diffs/BlockDifferDetailedDiffTest.scala | 43 +- .../state/diffs/CommonValidationTest.scala | 55 +- .../diffs/CommonValidationTimeTest.scala | 4 +- .../CreateAliasTransactionDiffTest.scala | 3 +- .../diffs/ReissueTransactionDiffTest.scala | 38 +- .../TransactionValidationErrorPrintTest.scala | 3 +- .../state/diffs/ci/BigIntInvokeTest.scala | 2 +- .../state/diffs/ci/CallableV4DiffTest.scala | 2 +- .../diffs/ci/InvokeAffectedAddressTest.scala | 4 +- .../diffs/ci/InvokeAssetChecksTest.scala | 34 +- .../ci/InvokeScriptTransactionDiffTest.scala | 2 +- .../diffs/ci/MultiPaymentInvokeDiffTest.scala | 218 +++--- .../diffs/ci/sync/SyncDAppErrorLogTest.scala | 5 +- .../diffs/smart/eth/EthereumInvokeTest.scala | 4 +- .../smart/predef/MatcherBlockchainTest.scala | 5 + .../smart/predef/ScriptVersionsTest.scala | 2 +- .../smart/scenarios/BalancesV4Test.scala | 59 +- .../wavesplatform/state/utils/package.scala | 29 +- .../wavesplatform/test/DomainPresets.scala | 35 +- .../com/wavesplatform/test/SharedDomain.scala | 24 +- .../transaction/ChainIdSpecification.scala | 507 ++++++------- ...InvokeScriptTransactionSpecification.scala | 4 +- .../IssueTransactionV2Specification.scala | 4 +- .../assets/exchange/EthOrderSpec.scala | 27 + .../exchange/OrderJsonSpecification.scala | 2 +- .../assets/exchange/OrderSpecification.scala | 27 +- .../smart/EthereumTransactionSpec.scala | 25 + .../wavesplatform/utils/DiffMatchers.scala | 2 +- .../wavesplatform/utils/EmptyBlockchain.scala | 12 +- .../ObservedLoadingCacheSpecification.scala | 10 +- .../utx/UtxPoolSpecification.scala | 481 ++++++------ project/Dependencies.scala | 64 +- project/plugins.sbt | 6 +- 210 files changed, 6243 insertions(+), 4241 deletions(-) delete mode 100644 benchmark/src/test/scala/com/wavesplatform/state/BloomFilterBenchmark.scala create mode 100644 benchmark/src/test/scala/com/wavesplatform/state/RocksDBGetBenchmark.scala create mode 100644 benchmark/src/test/scala/com/wavesplatform/state/RocksDBIteratorBenchmark.scala create mode 100644 benchmark/src/test/scala/com/wavesplatform/state/RocksDBSeekForPrevBenchmark.scala create mode 100644 benchmark/src/test/scala/com/wavesplatform/state/RocksDBWriteBatchBenchmark.scala rename benchmark/src/test/scala/com/wavesplatform/state/{LevelDBWriterBenchmark.scala => RocksDBWriterBenchmark.scala} (79%) create mode 100644 grpc-server/src/test/resources/application.conf delete mode 100644 grpc-server/src/test/scala/com/wavesplatform/events/InterferableDB.scala create mode 100644 node/src/main/scala/com/wavesplatform/api/http/StreamSerializerUtils.scala create mode 100644 node/src/main/scala/com/wavesplatform/api/http/TransactionJsonSerializer.scala delete mode 100644 node/src/main/scala/com/wavesplatform/database/BloomFilter.scala delete mode 100644 node/src/main/scala/com/wavesplatform/database/LevelDBFactory.scala create mode 100644 node/src/main/scala/com/wavesplatform/database/RDB.scala rename node/src/main/scala/com/wavesplatform/database/{LevelDBWriter.scala => RocksDBWriter.scala} (58%) delete mode 100644 node/src/main/scala/com/wavesplatform/database/SortedBatch.scala delete mode 100644 node/src/main/scala/com/wavesplatform/database/jna/LevelDBJNADB.scala delete mode 100644 node/src/main/scala/com/wavesplatform/database/jna/LevelDBJNADBFactory.scala create mode 100644 node/src/main/scala/com/wavesplatform/database/protobuf/package.scala rename node/src/main/scala/com/wavesplatform/metrics/{LevelDBStats.scala => RocksDBStats.scala} (89%) create mode 100644 node/src/main/scala/com/wavesplatform/settings/RocksDBSettings.scala create mode 100644 node/src/main/scala/com/wavesplatform/state/ParSignatureChecker.scala delete mode 100644 node/src/test/scala/com/wavesplatform/WithDB.scala create mode 100644 node/src/test/scala/com/wavesplatform/WithNewDBForEachTest.scala rename node/src/test/scala/com/wavesplatform/database/{LevelDBWriterSpec.scala => RocksDBWriterSpec.scala} (91%) diff --git a/benchmark/build.sbt b/benchmark/build.sbt index 94ceb613e1a..fb9b5a66ff7 100644 --- a/benchmark/build.sbt +++ b/benchmark/build.sbt @@ -3,7 +3,8 @@ enablePlugins(JmhPlugin) Jmh / version := "1.33" libraryDependencies ++= Seq( - "org.scodec" %% "scodec-core" % "1.11.10" + "org.scodec" %% "scodec-core" % "1.11.10", + "org.eclipse.collections" % "eclipse-collections" % "11.1.0" ) ++ Dependencies.logDeps // https://github.com/ktoso/sbt-jmh#adding-to-your-project diff --git a/benchmark/src/main/scala/com/wavesplatform/state/DBState.scala b/benchmark/src/main/scala/com/wavesplatform/state/DBState.scala index 416376c3de6..6fb7e9f9dc3 100644 --- a/benchmark/src/main/scala/com/wavesplatform/state/DBState.scala +++ b/benchmark/src/main/scala/com/wavesplatform/state/DBState.scala @@ -5,13 +5,12 @@ import java.io.File import com.wavesplatform.Application import com.wavesplatform.account.AddressScheme import com.wavesplatform.common.state.ByteStr -import com.wavesplatform.database.{LevelDBWriter, openDB} +import com.wavesplatform.database.{RDB, RocksDBWriter} import com.wavesplatform.lang.directives.DirectiveSet import com.wavesplatform.settings.WavesSettings import com.wavesplatform.transaction.smart.WavesEnvironment import com.wavesplatform.utils.ScorexLogging import monix.eval.Coeval -import org.iq80.leveldb.DB import org.openjdk.jmh.annotations.{Param, Scope, State, TearDown} @State(Scope.Benchmark) @@ -21,12 +20,13 @@ abstract class DBState extends ScorexLogging { lazy val settings: WavesSettings = Application.loadApplicationConfig(Some(new File(configFile)).filter(_.exists())) - lazy val db: DB = openDB(settings.dbSettings.directory) + lazy val rdb: RDB = RDB.open(settings.dbSettings) - lazy val levelDBWriter: LevelDBWriter = - LevelDBWriter.readOnly( - db, - settings.copy(dbSettings = settings.dbSettings.copy(maxCacheSize = 1)) + lazy val rocksDBWriter: RocksDBWriter = + new RocksDBWriter( + rdb, + settings.blockchainSettings, + settings.dbSettings.copy(maxCacheSize = 1) ) AddressScheme.current = new AddressScheme { override val chainId: Byte = 'W' } @@ -34,8 +34,8 @@ abstract class DBState extends ScorexLogging { lazy val environment = new WavesEnvironment( AddressScheme.current.chainId, Coeval.raiseError(new NotImplementedError("`tx` is not implemented")), - Coeval(levelDBWriter.height), - levelDBWriter, + Coeval(rocksDBWriter.height), + rocksDBWriter, null, DirectiveSet.contractDirectiveSet, ByteStr.empty @@ -43,6 +43,6 @@ abstract class DBState extends ScorexLogging { @TearDown def close(): Unit = { - db.close() + rdb.close() } } diff --git a/benchmark/src/test/scala/com/wavesplatform/RollbackBenchmark.scala b/benchmark/src/test/scala/com/wavesplatform/RollbackBenchmark.scala index 7023f0f3537..d6900a4527d 100644 --- a/benchmark/src/test/scala/com/wavesplatform/RollbackBenchmark.scala +++ b/benchmark/src/test/scala/com/wavesplatform/RollbackBenchmark.scala @@ -1,29 +1,29 @@ package com.wavesplatform import java.io.File + import com.google.common.primitives.Ints import com.google.protobuf.ByteString import com.wavesplatform.account.{Address, AddressScheme, KeyPair} import com.wavesplatform.block.Block import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.* -import com.wavesplatform.database.{LevelDBWriter, openDB} +import com.wavesplatform.database.{RDB, RocksDBWriter} import com.wavesplatform.protobuf.transaction.PBRecipients import com.wavesplatform.state.{Diff, Portfolio} -import com.wavesplatform.transaction.{GenesisTransaction, Proofs, TxDecimals, TxPositiveAmount} import com.wavesplatform.transaction.Asset.IssuedAsset import com.wavesplatform.transaction.assets.IssueTransaction +import com.wavesplatform.transaction.{GenesisTransaction, Proofs, TxDecimals, TxPositiveAmount} import com.wavesplatform.utils.{NTP, ScorexLogging} -import monix.reactive.Observer import scala.collection.immutable.VectorMap object RollbackBenchmark extends ScorexLogging { def main(args: Array[String]): Unit = { val settings = Application.loadApplicationConfig(Some(new File(args(0)))) - val db = openDB(settings.dbSettings.directory) + val rdb = RDB.open(settings.dbSettings) val time = new NTP(settings.ntpServer) - val levelDBWriter = LevelDBWriter(db, Observer.stopped, settings) + val rocksDBWriter = new RocksDBWriter(rdb, settings.blockchainSettings, settings.dbSettings) val issuer = KeyPair(new Array[Byte](32)) @@ -73,7 +73,7 @@ object RollbackBenchmark extends ScorexLogging { } yield address -> Portfolio(assets = map) log.info("Appending genesis block") - levelDBWriter.append( + rocksDBWriter.append( Diff(portfolios = portfolios.toMap), 0, 0, @@ -89,13 +89,13 @@ object RollbackBenchmark extends ScorexLogging { val nextDiff = Diff(portfolios = addresses.map(_ -> Portfolio(1, assets = VectorMap(IssuedAsset(assets.head.id()) -> 1L))).toMap) log.info("Appending next block") - levelDBWriter.append(nextDiff, 0, 0, None, ByteStr.empty, nextBlock) + rocksDBWriter.append(nextDiff, 0, 0, None, ByteStr.empty, nextBlock) log.info("Rolling back") val start = System.nanoTime() - levelDBWriter.rollbackTo(1) + rocksDBWriter.rollbackTo(1) val end = System.nanoTime() log.info(f"Rollback took ${(end - start) * 1e-6}%.3f ms") - levelDBWriter.close() + rdb.close() } } diff --git a/benchmark/src/test/scala/com/wavesplatform/lang/v1/EvaluatorV2Benchmark.scala b/benchmark/src/test/scala/com/wavesplatform/lang/v1/EvaluatorV2Benchmark.scala index 93042b254d1..85b60437dc2 100644 --- a/benchmark/src/test/scala/com/wavesplatform/lang/v1/EvaluatorV2Benchmark.scala +++ b/benchmark/src/test/scala/com/wavesplatform/lang/v1/EvaluatorV2Benchmark.scala @@ -1,7 +1,6 @@ package com.wavesplatform.lang.v1 import java.util.concurrent.TimeUnit - import cats.Id import com.wavesplatform.lang.Common import com.wavesplatform.lang.directives.values.{V1, V3} @@ -9,7 +8,7 @@ import com.wavesplatform.lang.v1.EvaluatorV2Benchmark.* import com.wavesplatform.lang.v1.compiler.Terms.{EXPR, IF, TRUE} import com.wavesplatform.lang.v1.compiler.TestCompiler import com.wavesplatform.lang.v1.evaluator.EvaluatorV2 -import com.wavesplatform.lang.v1.evaluator.ctx.{EvaluationContext, LoggedEvaluationContext} +import com.wavesplatform.lang.v1.evaluator.ctx.{DisabledLogEvaluationContext, EvaluationContext} import com.wavesplatform.lang.v1.evaluator.ctx.impl.PureContext import com.wavesplatform.lang.v1.traits.Environment import org.openjdk.jmh.annotations.* @@ -20,7 +19,7 @@ import scala.annotation.tailrec object EvaluatorV2Benchmark { val pureContext: CTX[Environment] = PureContext.build(V1, useNewPowPrecision = true).withEnvironment[Environment] val pureEvalContext: EvaluationContext[Environment, Id] = pureContext.evaluationContext(Common.emptyBlockchainEnvironment()) - val evaluatorV2: EvaluatorV2 = new EvaluatorV2(LoggedEvaluationContext(_ => _ => (), pureEvalContext), V1, true, true) + val evaluatorV2: EvaluatorV2 = new EvaluatorV2(DisabledLogEvaluationContext(pureEvalContext), V1, true, true, false) } @OutputTimeUnit(TimeUnit.MILLISECONDS) diff --git a/benchmark/src/test/scala/com/wavesplatform/lang/v1/FractionIntBenchmark.scala b/benchmark/src/test/scala/com/wavesplatform/lang/v1/FractionIntBenchmark.scala index 8fd340679ea..05457b8df30 100644 --- a/benchmark/src/test/scala/com/wavesplatform/lang/v1/FractionIntBenchmark.scala +++ b/benchmark/src/test/scala/com/wavesplatform/lang/v1/FractionIntBenchmark.scala @@ -20,22 +20,22 @@ import org.openjdk.jmh.infra.Blackhole @Measurement(iterations = 10, time = 1) class FractionIntBenchmark { @Benchmark - def fraction1(bh: Blackhole, s: St): Unit = bh.consume(EvaluatorV2.applyCompleted(s.ctx, s.expr1, LogExtraInfo(), V5, true, true)) + def fraction1(bh: Blackhole, s: St): Unit = bh.consume(EvaluatorV2.applyCompleted(s.ctx, s.expr1, LogExtraInfo(), V5, true, true, false)) @Benchmark - def fraction2(bh: Blackhole, s: St): Unit = bh.consume(EvaluatorV2.applyCompleted(s.ctx, s.expr2, LogExtraInfo(), V5, true, true)) + def fraction2(bh: Blackhole, s: St): Unit = bh.consume(EvaluatorV2.applyCompleted(s.ctx, s.expr2, LogExtraInfo(), V5, true, true, false)) @Benchmark - def fraction3(bh: Blackhole, s: St): Unit = bh.consume(EvaluatorV2.applyCompleted(s.ctx, s.expr3, LogExtraInfo(), V5, true, true)) + def fraction3(bh: Blackhole, s: St): Unit = bh.consume(EvaluatorV2.applyCompleted(s.ctx, s.expr3, LogExtraInfo(), V5, true, true, false)) @Benchmark - def fraction1Round(bh: Blackhole, s: St): Unit = bh.consume(EvaluatorV2.applyCompleted(s.ctx, s.expr1Round, LogExtraInfo(), V5, true, true)) + def fraction1Round(bh: Blackhole, s: St): Unit = bh.consume(EvaluatorV2.applyCompleted(s.ctx, s.expr1Round, LogExtraInfo(), V5, true, true, false)) @Benchmark - def fraction2Round(bh: Blackhole, s: St): Unit = bh.consume(EvaluatorV2.applyCompleted(s.ctx, s.expr2Round, LogExtraInfo(), V5, true, true)) + def fraction2Round(bh: Blackhole, s: St): Unit = bh.consume(EvaluatorV2.applyCompleted(s.ctx, s.expr2Round, LogExtraInfo(), V5, true, true, false)) @Benchmark - def fraction3Round(bh: Blackhole, s: St): Unit = bh.consume(EvaluatorV2.applyCompleted(s.ctx, s.expr3Round, LogExtraInfo(), V5, true, true)) + def fraction3Round(bh: Blackhole, s: St): Unit = bh.consume(EvaluatorV2.applyCompleted(s.ctx, s.expr3Round, LogExtraInfo(), V5, true, true, false)) } @State(Scope.Benchmark) diff --git a/benchmark/src/test/scala/com/wavesplatform/lang/v1/package.scala b/benchmark/src/test/scala/com/wavesplatform/lang/v1/package.scala index aeed9077baf..076708514ad 100644 --- a/benchmark/src/test/scala/com/wavesplatform/lang/v1/package.scala +++ b/benchmark/src/test/scala/com/wavesplatform/lang/v1/package.scala @@ -31,5 +31,5 @@ package object v1 { expr: EXPR, stdLibVersion: StdLibVersion ): (Log[Id], Int, Either[ExecutionError, Terms.EVALUATED]) = - EvaluatorV2.applyCompleted(ctx, expr, LogExtraInfo(), stdLibVersion, newMode = true, correctFunctionCallScope = true) + EvaluatorV2.applyCompleted(ctx, expr, LogExtraInfo(), stdLibVersion, newMode = true, correctFunctionCallScope = true, enableExecutionLog = false) } diff --git a/benchmark/src/test/scala/com/wavesplatform/state/BaseState.scala b/benchmark/src/test/scala/com/wavesplatform/state/BaseState.scala index ab3fd41cb97..2f03f5de728 100644 --- a/benchmark/src/test/scala/com/wavesplatform/state/BaseState.scala +++ b/benchmark/src/test/scala/com/wavesplatform/state/BaseState.scala @@ -3,35 +3,35 @@ package com.wavesplatform.state import java.io.File import java.nio.file.Files +import com.typesafe.config.ConfigFactory import com.wavesplatform.account.KeyPair import com.wavesplatform.block.Block import com.wavesplatform.common.utils.EitherExt2 -import com.wavesplatform.database.{LevelDBFactory, LevelDBWriter} +import com.wavesplatform.database.{RDB, RocksDBWriter} import com.wavesplatform.lagonaki.mocks.TestBlock import com.wavesplatform.mining.MiningConstraint -import com.wavesplatform.settings.FunctionalitySettings +import com.wavesplatform.settings.{FunctionalitySettings, WavesSettings, loadConfig} import com.wavesplatform.state.diffs.BlockDiffer -import com.wavesplatform.state.utils.TestLevelDB +import com.wavesplatform.state.utils.TestRocksDB import com.wavesplatform.transaction.{GenesisTransaction, Transaction} -import monix.execution.UncaughtExceptionReporter -import monix.reactive.Observer -import org.iq80.leveldb.{DB, Options} import org.openjdk.jmh.annotations.{Setup, TearDown} import org.scalacheck.{Arbitrary, Gen} trait BaseState { - import BaseState._ + import BaseState.* + val benchSettings: Settings = Settings.fromConfig(ConfigFactory.load()) + val wavesSettings: WavesSettings = { + val config = loadConfig(ConfigFactory.parseFile(new File(benchSettings.networkConfigFile))) + WavesSettings.fromRootConfig(config) + } private val fsSettings: FunctionalitySettings = updateFunctionalitySettings(FunctionalitySettings.TESTNET) - private val db: DB = { - val dir = Files.createTempDirectory("state-synthetic").toAbsolutePath.toString - val options = new Options() - options.createIfMissing(true) - LevelDBFactory.factory.open(new File(dir), options) + private val rdb: RDB = { + val dir = Files.createTempDirectory("state-synthetic").toAbsolutePath.toString + RDB.open(wavesSettings.dbSettings.copy(directory = dir)) } - private val portfolioChanges = Observer.empty(UncaughtExceptionReporter.default) - val state: LevelDBWriter = TestLevelDB.withFunctionalitySettings(db, portfolioChanges, fsSettings) + val state: RocksDBWriter = TestRocksDB.withFunctionalitySettings(rdb, fsSettings) private var _richAccount: KeyPair = _ def richAccount: KeyPair = _richAccount @@ -52,12 +52,11 @@ trait BaseState { transferTxs <- Gen.sequence[Vector[Transaction], Transaction]((1 to TxsInBlock).map { i => txGenP(sender, base.header.timestamp + i) }) - } yield - TestBlock.create( - time = transferTxs.last.timestamp, - ref = base.id(), - txs = transferTxs - ) + } yield TestBlock.create( + time = transferTxs.last.timestamp, + ref = base.id(), + txs = transferTxs + ) private val initGen: Gen[(KeyPair, Block)] = for { rich <- accountGen @@ -98,7 +97,7 @@ trait BaseState { @TearDown def close(): Unit = { - db.close() + rdb.close() } } diff --git a/benchmark/src/test/scala/com/wavesplatform/state/BloomFilterBenchmark.scala b/benchmark/src/test/scala/com/wavesplatform/state/BloomFilterBenchmark.scala deleted file mode 100644 index dbecacb7e72..00000000000 --- a/benchmark/src/test/scala/com/wavesplatform/state/BloomFilterBenchmark.scala +++ /dev/null @@ -1,72 +0,0 @@ -package com.wavesplatform.state - -import com.wavesplatform.common.state.ByteStr -import com.wavesplatform.database.{Keys, LevelDBWriter} -import com.wavesplatform.transaction.assets.exchange.ExchangeTransaction -import com.wavesplatform.transaction.smart.Verifier -import org.openjdk.jmh.annotations._ -import org.openjdk.jmh.infra.Blackhole - -import java.util.concurrent.TimeUnit -import scala.util.Random - -@OutputTimeUnit(TimeUnit.MICROSECONDS) -@BenchmarkMode(Array(Mode.AverageTime)) -@Threads(1) -@Fork(1) -@Warmup(iterations = 100) -@Measurement(iterations = 100) -class BloomFilterBenchmark { - import BloomFilterBenchmark._ - - @Benchmark - def volumeAndFeeWithBloom(bh: Blackhole, st: St): Unit = { - bh.consume(st.levelDBWriterWithBloomFilter.filledVolumeAndFee(ByteStr(Random.nextBytes(32)))) - } - - @Benchmark - def volumeAndFeeWithoutBloom(bh: Blackhole, st: St): Unit = { - bh.consume(st.levelDBWriterWithoutBloomFilter.filledVolumeAndFee(ByteStr(Random.nextBytes(32)))) - } - - @Benchmark - def verifyExchangeTxSign(bh: Blackhole, st: St): Unit = { - bh.consume(Verifier.verifyAsEllipticCurveSignature(st.exchangeTransactions(Random.nextInt(1000)), checkWeakPk = false)) - } -} - -object BloomFilterBenchmark { - class St extends DBState { - - lazy val exchangeTransactions: List[ExchangeTransaction] = { - val txCountAtHeight = - Map.empty[Int, Int].withDefault(h => db.get(Keys.blockMetaAt(Height(h))).fold(0)(_.transactionCount)) - - val txs = LazyList.from(levelDBWriter.height, -1).flatMap { h => - val txCount = txCountAtHeight(h) - if (txCount == 0) - Seq.empty[ExchangeTransaction] - else - (0 until txCount).flatMap( - txNum => - db.get(Keys.transactionAt(Height(h), TxNum(txNum.toShort))) - .collect { case (m, tx: ExchangeTransaction) if m.succeeded => tx } - ) - } - - txs.take(1000).toList - } - - lazy val levelDBWriterWithBloomFilter: LevelDBWriter = - LevelDBWriter.readOnly( - db, - settings.copy(dbSettings = settings.dbSettings.copy(maxCacheSize = 1, useBloomFilter = true)) - ) - - lazy val levelDBWriterWithoutBloomFilter: LevelDBWriter = - LevelDBWriter.readOnly( - db, - settings.copy(dbSettings = settings.dbSettings.copy(maxCacheSize = 1, useBloomFilter = false)) - ) - } -} diff --git a/benchmark/src/test/scala/com/wavesplatform/state/RocksDBGetBenchmark.scala b/benchmark/src/test/scala/com/wavesplatform/state/RocksDBGetBenchmark.scala new file mode 100644 index 00000000000..4281db0613c --- /dev/null +++ b/benchmark/src/test/scala/com/wavesplatform/state/RocksDBGetBenchmark.scala @@ -0,0 +1,75 @@ +package com.wavesplatform.state + +import java.nio.file.Files +import java.util.concurrent.TimeUnit + +import com.typesafe.config.ConfigFactory +import com.wavesplatform.database.RDB +import com.wavesplatform.settings.{WavesSettings, loadConfig} +import com.wavesplatform.state.RocksDBGetBenchmark.* +import org.openjdk.jmh.annotations.* +import org.openjdk.jmh.infra.Blackhole +import org.rocksdb.{ReadOptions, WriteBatch, WriteOptions} +import sun.nio.ch.Util + +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@BenchmarkMode(Array(Mode.AverageTime)) +@Threads(1) +@Fork(1) +@Warmup(iterations = 10) +@Measurement(iterations = 100) +class RocksDBGetBenchmark { + @Benchmark + def simpleGet(st: BaseSt, bh: Blackhole): Unit = { + bh.consume(st.kvs.foreach { case (key, _) => + st.rdb.db.get(st.readOptions, key) + }) + } + + @Benchmark + def byteBufferGet(st: BaseSt, bh: Blackhole): Unit = { + bh.consume { + st.kvs.foreach { case (key, value) => + val keyBuffer = Util.getTemporaryDirectBuffer(key.length) + keyBuffer.put(key).flip() + val valBuffer = Util.getTemporaryDirectBuffer(value.length) + + st.rdb.db.get(st.readOptions, keyBuffer, valBuffer) + + Util.releaseTemporaryDirectBuffer(keyBuffer) + Util.releaseTemporaryDirectBuffer(valBuffer) + } + } + } +} + +object RocksDBGetBenchmark { + + @State(Scope.Benchmark) + class BaseSt { + private val wavesSettings: WavesSettings = + WavesSettings.fromRootConfig(loadConfig(ConfigFactory.load())) + + val rdb: RDB = { + val dir = Files.createTempDirectory("state-synthetic").toAbsolutePath.toString + RDB.open(wavesSettings.dbSettings.copy(directory = dir)) + } + + val kvs: Map[Array[Byte], Array[Byte]] = (1 to 10000).map { idx => + s"key$idx".getBytes -> s"value$idx".getBytes + }.toMap + + val readOptions: ReadOptions = new ReadOptions() + + private val wb: WriteBatch = new WriteBatch() + kvs.foreach { case (key, value) => + wb.put(key, value) + } + rdb.db.write(new WriteOptions(), wb) + + @TearDown + def close(): Unit = { + rdb.close() + } + } +} diff --git a/benchmark/src/test/scala/com/wavesplatform/state/RocksDBIteratorBenchmark.scala b/benchmark/src/test/scala/com/wavesplatform/state/RocksDBIteratorBenchmark.scala new file mode 100644 index 00000000000..386e69770c0 --- /dev/null +++ b/benchmark/src/test/scala/com/wavesplatform/state/RocksDBIteratorBenchmark.scala @@ -0,0 +1,84 @@ +package com.wavesplatform.state + +import java.nio.file.Files +import java.util.concurrent.TimeUnit + +import com.google.common.primitives.Ints +import com.typesafe.config.ConfigFactory +import com.wavesplatform.database.RDB +import com.wavesplatform.settings.{WavesSettings, loadConfig} +import com.wavesplatform.state.RocksDBIteratorBenchmark.* +import org.openjdk.jmh.annotations.* +import org.openjdk.jmh.infra.Blackhole +import org.rocksdb.{ReadOptions, WriteBatch, WriteOptions} + +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@BenchmarkMode(Array(Mode.AverageTime)) +@Threads(1) +@Fork(1) +@Warmup(iterations = 10) +@Measurement(iterations = 100) +class RocksDBIteratorBenchmark { + @Benchmark + def directOrderIterator(st: BaseSt, bh: Blackhole): Unit = { + bh.consume { + val iter = st.rdb.db.newIterator(st.readOptions) + iter.seek(st.firstKey) + while (iter.isValid) { + iter.key() + iter.value() + iter.next() + } + iter.close() + } + } + + @Benchmark + def reverseOrderIterator(st: BaseSt, bh: Blackhole): Unit = { + bh.consume { + val iter = st.rdb.db.newIterator(st.readOptions) + iter.seekForPrev(st.lastKey) + while (iter.isValid) { + iter.key() + iter.value() + iter.prev() + } + iter.close() + } + } +} + +object RocksDBIteratorBenchmark { + + @State(Scope.Benchmark) + class BaseSt { + private val wavesSettings: WavesSettings = + WavesSettings.fromRootConfig(loadConfig(ConfigFactory.load())) + + val rdb: RDB = { + val dir = Files.createTempDirectory("state-synthetic").toAbsolutePath.toString + RDB.open(wavesSettings.dbSettings.copy(directory = dir)) + } + + val keysPrefix = "keysPrefix" + val firstKey: Array[Byte] = keysPrefix.getBytes ++ Ints.toByteArray(1) + val lastKey: Array[Byte] = keysPrefix.getBytes ++ Ints.toByteArray(10000) + + val kvs: Map[Array[Byte], Array[Byte]] = (1 to 10000).map { idx => + (keysPrefix.getBytes ++ Ints.toByteArray(idx)) -> s"value$idx".getBytes + }.toMap + + val readOptions: ReadOptions = new ReadOptions().setTotalOrderSeek(false).setPrefixSameAsStart(true) + + private val wb: WriteBatch = new WriteBatch() + kvs.foreach { case (key, value) => + wb.put(key, value) + } + rdb.db.write(new WriteOptions(), wb) + + @TearDown + def close(): Unit = { + rdb.close() + } + } +} diff --git a/benchmark/src/test/scala/com/wavesplatform/state/RocksDBSeekForPrevBenchmark.scala b/benchmark/src/test/scala/com/wavesplatform/state/RocksDBSeekForPrevBenchmark.scala new file mode 100644 index 00000000000..a5b284e3021 --- /dev/null +++ b/benchmark/src/test/scala/com/wavesplatform/state/RocksDBSeekForPrevBenchmark.scala @@ -0,0 +1,90 @@ +package com.wavesplatform.state + +import java.nio.file.Files +import java.util.concurrent.TimeUnit + +import com.google.common.primitives.{Bytes, Shorts} +import com.typesafe.config.ConfigFactory +import com.wavesplatform.account.Address +import com.wavesplatform.database.{ + AddressId, + CurrentData, + DataNode, + KeyTags, + Keys, + RDB, + readCurrentData, + readDataNode, + writeCurrentData, + writeDataNode +} +import com.wavesplatform.settings.{WavesSettings, loadConfig} +import com.wavesplatform.state.RocksDBSeekForPrevBenchmark.* +import org.openjdk.jmh.annotations.* +import org.openjdk.jmh.infra.Blackhole +import org.rocksdb.{ReadOptions, WriteBatch, WriteOptions} + +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@BenchmarkMode(Array(Mode.AverageTime)) +@Threads(1) +@Fork(1) +@Warmup(iterations = 10) +@Measurement(iterations = 100) +class RocksDBSeekForPrevBenchmark { + @Benchmark + def seekForPrev(st: BaseSt, bh: Blackhole): Unit = { + bh.consume { + val iter = st.rdb.db.newIterator(st.readOptions) + iter.seekForPrev(st.dataNodeKey(Height(Int.MaxValue))) + if (iter.isValid && iter.key().startsWith(st.dataNodeKeyPrefix)) { + readDataNode(st.keyString)(iter.value()).prevHeight + } + iter.close() + } + } + + @Benchmark + def get(st: BaseSt, bh: Blackhole): Unit = { + bh.consume { + readCurrentData(st.keyString)(st.rdb.db.get(st.currentDataKey)).prevHeight + } + } +} + +object RocksDBSeekForPrevBenchmark { + + @State(Scope.Benchmark) + class BaseSt { + private val wavesSettings: WavesSettings = + WavesSettings.fromRootConfig(loadConfig(ConfigFactory.load())) + + val rdb: RDB = { + val dir = Files.createTempDirectory("state-synthetic").toAbsolutePath.toString + RDB.open(wavesSettings.dbSettings.copy(directory = dir)) + } + + val address: Address = Address(Array.fill(20)(1.toByte)) + val addressId: AddressId = AddressId(1L) + + val keyString = "key" + val currentDataKey: Array[Byte] = Keys.data(address, keyString).keyBytes + val dataNodeKey: Height => Array[Byte] = Keys.dataAt(addressId, "key")(_).keyBytes + val dataNodeKeyPrefix: Array[Byte] = Bytes.concat(Shorts.toByteArray(KeyTags.DataHistory.id.toShort), addressId.toByteArray, keyString.getBytes) + + private val dataEntry: StringDataEntry = StringDataEntry(keyString, "value") + + val readOptions: ReadOptions = new ReadOptions() + + private val wb: WriteBatch = new WriteBatch() + wb.put(currentDataKey, writeCurrentData(CurrentData(dataEntry, Height(10000), Height(9999)))) + (1 to 1000).foreach { h => + wb.put(dataNodeKey(Height(h)), writeDataNode(DataNode(dataEntry, Height(h - 1)))) + } + rdb.db.write(new WriteOptions(), wb) + + @TearDown + def close(): Unit = { + rdb.close() + } + } +} diff --git a/benchmark/src/test/scala/com/wavesplatform/state/RocksDBWriteBatchBenchmark.scala b/benchmark/src/test/scala/com/wavesplatform/state/RocksDBWriteBatchBenchmark.scala new file mode 100644 index 00000000000..219e281ed5f --- /dev/null +++ b/benchmark/src/test/scala/com/wavesplatform/state/RocksDBWriteBatchBenchmark.scala @@ -0,0 +1,134 @@ +package com.wavesplatform.state + +import java.nio.file.Files +import java.util.Comparator +import java.util.concurrent.TimeUnit +import java.util.function.Consumer + +import com.google.common.primitives.{Ints, UnsignedBytes} +import com.typesafe.config.ConfigFactory +import com.wavesplatform.common.ByteStrComparator +import com.wavesplatform.common.state.ByteStr +import com.wavesplatform.database.RDB +import com.wavesplatform.settings.{WavesSettings, loadConfig} +import com.wavesplatform.state.RocksDBWriteBatchBenchmark.* +import org.eclipse.collections.api.block.HashingStrategy +import org.eclipse.collections.api.tuple.Pair +import org.eclipse.collections.impl.factory.{HashingStrategyMaps, HashingStrategySets} +import org.eclipse.collections.impl.utility.MapIterate +import org.openjdk.jmh.annotations.* +import org.openjdk.jmh.infra.Blackhole +import org.rocksdb.{WriteBatch, WriteOptions} + +import scala.util.Random + +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@BenchmarkMode(Array(Mode.AverageTime)) +@Threads(1) +@Fork(1) +@Warmup(iterations = 10) +@Measurement(iterations = 100) +class RocksDBWriteBatchBenchmark { + + @Benchmark + def sortedBatch(st: BaseSt, bh: Blackhole): Unit = { + bh.consume { + val sortedBatch = new SortedBatch + val nativeBatch = new WriteBatch() + st.kvsShuffled.foreach { case (k, v) => + sortedBatch.put(k, v) + } + MapIterate + .toListOfPairs(sortedBatch.addedEntries) + .sortThis((o1: Pair[Array[Byte], Array[Byte]], o2: Pair[Array[Byte], Array[Byte]]) => + UnsignedBytes.lexicographicalComparator().compare(o1.getOne, o2.getOne) + ) + .forEach(new Consumer[Pair[Array[Byte], Array[Byte]]] { + override def accept(t: Pair[Array[Byte], Array[Byte]]): Unit = nativeBatch.put(t.getOne, t.getTwo) + }) + st.rdb.db.write(st.writeOptions, nativeBatch) + } + } + + @Benchmark + def notSortedBatch(st: BaseSt, bh: Blackhole): Unit = { + bh.consume { + val nativeBatch = new WriteBatch() + st.kvsShuffled.foreach { case (k, v) => + nativeBatch.put(k, v) + } + st.rdb.db.write(st.writeOptions, nativeBatch) + } + } +} + +object RocksDBWriteBatchBenchmark { + + @State(Scope.Benchmark) + class BaseSt { + private val wavesSettings: WavesSettings = + WavesSettings.fromRootConfig(loadConfig(ConfigFactory.load())) + + val rdb: RDB = { + val dir = Files.createTempDirectory("state-synthetic").toAbsolutePath.toString + RDB.open(wavesSettings.dbSettings.copy(directory = dir)) + } + + private val minIdx = 1 + private val maxIdx = 10000 + private val firstPrefix = 'A' + private val lastPrefix = 'Z' + + private val firstKey = s"${firstPrefix}key".getBytes ++ Ints.toByteArray(minIdx) + private val lastKey = s"${lastPrefix}key".getBytes ++ Ints.toByteArray(maxIdx) + + private val kvs: Seq[(Array[Byte], Array[Byte])] = + for { + prefixChar <- firstPrefix to lastPrefix + idx <- minIdx to maxIdx + } yield { + (s"${prefixChar}key".getBytes ++ Ints.toByteArray(idx)) -> s"value$idx".getBytes + } + + Random.setSeed(42) + val kvsShuffled: Seq[(Array[Byte], Array[Byte])] = Random.shuffle(kvs) + + val writeOptions = new WriteOptions() + + @Setup(Level.Invocation) + def setup(): Unit = + rdb.db.deleteRange(firstKey, lastKey) + + @TearDown + def close(): Unit = { + writeOptions.close() + rdb.close() + } + } + + class SortedBatch extends WriteBatch { + val addedEntries = HashingStrategyMaps.mutable.`with`[Array[Byte], Array[Byte]](ByteArrayHashingStrategy) + val deletedEntries = HashingStrategySets.mutable.`with`[Array[Byte]](ByteArrayHashingStrategy) + + override def put(bytes: Array[Byte], bytes1: Array[Byte]): Unit = { + addedEntries.put(bytes, bytes1) + deletedEntries.remove(bytes) + } + + override def delete(bytes: Array[Byte]): Unit = { + addedEntries.remove(bytes) + deletedEntries.add(bytes) + } + + } + + object SortedBatch { + val byteStrComparator: Comparator[ByteStr] = (o1: ByteStr, o2: ByteStr) => ByteStrComparator.compare(o1, o2) + } + + object ByteArrayHashingStrategy extends HashingStrategy[Array[Byte]] { + override def computeHashCode(obj: Array[Byte]): Int = java.util.Arrays.hashCode(obj) + + override def equals(object1: Array[Byte], object2: Array[Byte]): Boolean = java.util.Arrays.equals(object1, object2) + } +} diff --git a/benchmark/src/test/scala/com/wavesplatform/state/LevelDBWriterBenchmark.scala b/benchmark/src/test/scala/com/wavesplatform/state/RocksDBWriterBenchmark.scala similarity index 79% rename from benchmark/src/test/scala/com/wavesplatform/state/LevelDBWriterBenchmark.scala rename to benchmark/src/test/scala/com/wavesplatform/state/RocksDBWriterBenchmark.scala index 7b45138d3ec..dbce07cc0c2 100644 --- a/benchmark/src/test/scala/com/wavesplatform/state/LevelDBWriterBenchmark.scala +++ b/benchmark/src/test/scala/com/wavesplatform/state/RocksDBWriterBenchmark.scala @@ -4,29 +4,25 @@ import java.io.File import java.util.concurrent.{ThreadLocalRandom, TimeUnit} import com.typesafe.config.ConfigFactory -import com.wavesplatform.account._ +import com.wavesplatform.account.* import com.wavesplatform.api.BlockMeta import com.wavesplatform.api.common.CommonBlocksApi import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.{Base58, EitherExt2} import com.wavesplatform.database -import com.wavesplatform.database.{DBExt, Keys, LevelDBFactory, LevelDBWriter} +import com.wavesplatform.database.{DBExt, Keys, RDB, RocksDBWriter} import com.wavesplatform.settings.{WavesSettings, loadConfig} -import com.wavesplatform.state.LevelDBWriterBenchmark._ +import com.wavesplatform.state.RocksDBWriterBenchmark.* import com.wavesplatform.transaction.Transaction -import org.iq80.leveldb.{DB, Options} -import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.annotations.* import org.openjdk.jmh.infra.Blackhole import scala.io.Codec -/** - * Tests over real database. How to test: - * 1. Download a database - * 2. Import it: https://github.com/wavesplatform/Waves/wiki/Export-and-import-of-the-blockchain#import-blocks-from-the-binary-file - * 3. Run ExtractInfo to collect queries for tests - * 4. Make Caches.MaxSize = 1 - * 5. Run this test +/** Tests over real database. How to test: + * 1. Download a database 2. Import it: + * https://github.com/wavesplatform/Waves/wiki/Export-and-import-of-the-blockchain#import-blocks-from-the-binary-file 3. Run ExtractInfo to + * collect queries for tests 4. Make Caches.MaxSize = 1 5. Run this test */ @OutputTimeUnit(TimeUnit.NANOSECONDS) @BenchmarkMode(Array(Mode.AverageTime)) @@ -34,7 +30,7 @@ import scala.io.Codec @Fork(1) @Warmup(iterations = 10) @Measurement(iterations = 100) -class LevelDBWriterBenchmark { +class RocksDBWriterBenchmark { @Benchmark def readFullBlock_test(st: BlocksByIdSt, bh: Blackhole): Unit = { bh.consume(st.blockById(st.allBlocks.random).get) @@ -51,7 +47,7 @@ class LevelDBWriterBenchmark { } } -object LevelDBWriterBenchmark { +object RocksDBWriterBenchmark { @State(Scope.Benchmark) class TransactionByIdSt extends BaseSt { @@ -85,20 +81,20 @@ object LevelDBWriterBenchmark { override val chainId: Byte = wavesSettings.blockchainSettings.addressSchemeCharacter.toByte } - private val rawDB: DB = { + private val rawDB: RDB = { val dir = new File(wavesSettings.dbSettings.directory) if (!dir.isDirectory) throw new IllegalArgumentException(s"Can't find directory at '${wavesSettings.dbSettings.directory}'") - LevelDBFactory.factory.open(dir, new Options) + RDB.open(wavesSettings.dbSettings) } - val db = LevelDBWriter.readOnly(rawDB, wavesSettings) + val db = new RocksDBWriter(rawDB, wavesSettings.blockchainSettings, wavesSettings.dbSettings) def loadBlockInfoAt(height: Int): Option[(BlockMeta, Seq[(TxMeta, Transaction)])] = loadBlockMetaAt(height).map { meta => - meta -> rawDB.readOnly(ro => database.loadTransactions(Height(height), ro)) + meta -> database.loadTransactions(Height(height), rawDB) } - def loadBlockMetaAt(height: Int): Option[BlockMeta] = rawDB.get(Keys.blockMetaAt(Height(height))) + def loadBlockMetaAt(height: Int): Option[BlockMeta] = rawDB.db.get(Keys.blockMetaAt(Height(height))).flatMap(BlockMeta.fromPb) val cba = CommonBlocksApi(db, loadBlockMetaAt, loadBlockInfoAt) diff --git a/benchmark/src/test/scala/com/wavesplatform/state/WavesEnvironmentBenchmark.scala b/benchmark/src/test/scala/com/wavesplatform/state/WavesEnvironmentBenchmark.scala index 533a930a8a4..edd7e82f488 100644 --- a/benchmark/src/test/scala/com/wavesplatform/state/WavesEnvironmentBenchmark.scala +++ b/benchmark/src/test/scala/com/wavesplatform/state/WavesEnvironmentBenchmark.scala @@ -8,29 +8,25 @@ import com.typesafe.config.ConfigFactory import com.wavesplatform.account.{AddressOrAlias, AddressScheme, Alias} import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.{Base58, EitherExt2} -import com.wavesplatform.database.{LevelDBFactory, LevelDBWriter} +import com.wavesplatform.database.{RDB, RocksDBWriter} import com.wavesplatform.lang.directives.DirectiveSet import com.wavesplatform.lang.v1.traits.Environment import com.wavesplatform.lang.v1.traits.domain.Recipient import com.wavesplatform.settings.{WavesSettings, loadConfig} -import com.wavesplatform.state.WavesEnvironmentBenchmark._ +import com.wavesplatform.state.WavesEnvironmentBenchmark.* import com.wavesplatform.state.bench.DataTestData import com.wavesplatform.transaction.smart.WavesEnvironment import monix.eval.Coeval -import org.iq80.leveldb.{DB, Options} -import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.annotations.* import org.openjdk.jmh.infra.Blackhole import scodec.bits.BitVector import scala.io.Codec -/** - * Tests over real database. How to test: - * 1. Download a database - * 2. Import it: https://github.com/wavesplatform/Waves/wiki/Export-and-import-of-the-blockchain#import-blocks-from-the-binary-file - * 3. Run ExtractInfo to collect queries for tests - * 4. Make Caches.MaxSize = 1 - * 5. Run this test +/** Tests over real database. How to test: + * 1. Download a database 2. Import it: + * https://github.com/wavesplatform/Waves/wiki/Export-and-import-of-the-blockchain#import-blocks-from-the-binary-file 3. Run ExtractInfo to + * collect queries for tests 4. Make Caches.MaxSize = 1 5. Run this test */ @OutputTimeUnit(TimeUnit.NANOSECONDS) @BenchmarkMode(Array(Mode.AverageTime)) @@ -133,14 +129,14 @@ object WavesEnvironmentBenchmark { override val chainId: Byte = wavesSettings.blockchainSettings.addressSchemeCharacter.toByte } - private val db: DB = { + private val rdb: RDB = { val dir = new File(wavesSettings.dbSettings.directory) if (!dir.isDirectory) throw new IllegalArgumentException(s"Can't find directory at '${wavesSettings.dbSettings.directory}'") - LevelDBFactory.factory.open(dir, new Options) + RDB.open(wavesSettings.dbSettings) } val environment: Environment[Id] = { - val state = LevelDBWriter.readOnly(db, wavesSettings) + val state = new RocksDBWriter(rdb, wavesSettings.blockchainSettings, wavesSettings.dbSettings) new WavesEnvironment( AddressScheme.current.chainId, Coeval.raiseError(new NotImplementedError("`tx` is not implemented")), @@ -154,7 +150,7 @@ object WavesEnvironmentBenchmark { @TearDown def close(): Unit = { - db.close() + rdb.close() } protected def load[T](label: String, absolutePath: String)(f: String => T): Vector[T] = { diff --git a/benchmark/src/test/scala/com/wavesplatform/state/WavesEnvironmentRebenchmark.scala b/benchmark/src/test/scala/com/wavesplatform/state/WavesEnvironmentRebenchmark.scala index b6e50a431f7..f83295e1047 100644 --- a/benchmark/src/test/scala/com/wavesplatform/state/WavesEnvironmentRebenchmark.scala +++ b/benchmark/src/test/scala/com/wavesplatform/state/WavesEnvironmentRebenchmark.scala @@ -11,7 +11,7 @@ import com.wavesplatform.lang.v1.traits.DataType.{Boolean, ByteArray, Long} import com.wavesplatform.lang.v1.traits.domain.Recipient import com.wavesplatform.transaction.DataTransaction import com.wavesplatform.transaction.transfer.TransferTransaction -import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.annotations.* import org.openjdk.jmh.infra.Blackhole import scala.util.Random @@ -23,7 +23,7 @@ import scala.util.Random @Warmup(iterations = 100) @Measurement(iterations = 100) class WavesEnvironmentRebenchmark { - import WavesEnvironmentRebenchmark._ + import WavesEnvironmentRebenchmark.* @Benchmark def resolveAlias(bh: Blackhole, st: St): Unit = { @@ -50,7 +50,7 @@ class WavesEnvironmentRebenchmark { @Benchmark def assetBalanceOf(bh: Blackhole, st: St): Unit = { val useUnexisting = Random.nextBoolean() - val addressNr = Random.nextInt(st.allAddresses.size) + val addressNr = Random.nextInt(st.allAddresses.size) if (useUnexisting) { bh.consume(st.environment.accountBalanceOf(st.allAddresses(addressNr), Some(Array[Byte](1, 2, 3)))) } else { @@ -138,7 +138,7 @@ object WavesEnvironmentRebenchmark { class St extends DBState { lazy val allAliases: Vector[Alias] = { val builder = Vector.newBuilder[Alias] - db.iterateOver(KeyTags.AddressIdOfAlias) { e => + rdb.db.iterateOver(KeyTags.AddressIdOfAlias) { e => builder += Alias.fromBytes(e.getKey.drop(2)).explicitGet() } builder.result() @@ -146,7 +146,7 @@ object WavesEnvironmentRebenchmark { lazy val allAssets: Vector[Array[Byte]] = { val builder = Vector.newBuilder[Array[Byte]] - db.iterateOver(KeyTags.AssetDetailsHistory) { e => + rdb.db.iterateOver(KeyTags.AssetDetailsHistory) { e => builder += e.getKey.drop(2) } builder.result() @@ -154,7 +154,7 @@ object WavesEnvironmentRebenchmark { lazy val allAddresses: IndexedSeq[Recipient.Address] = { val builder = Vector.newBuilder[Recipient.Address] - db.iterateOver(KeyTags.AddressId) { entry => + rdb.db.iterateOver(KeyTags.AddressId) { entry => builder += Recipient.Address(ByteStr(entry.getKey.drop(2))) } builder.result() @@ -162,7 +162,7 @@ object WavesEnvironmentRebenchmark { lazy val allTransactions: IndexedSeq[Array[Byte]] = { val txCountAtHeight = - Map.empty[Int, Int].withDefault(h => db.get(Keys.blockMetaAt(Height(h))).fold(0)(_.transactionCount)) + Map.empty[Int, Int].withDefault(h => rdb.db.get(Keys.blockMetaAt(Height(h))).fold(0)(_.transactionCount)) 1.to(environment.height.toInt, 100) .flatMap { h => @@ -170,13 +170,13 @@ object WavesEnvironmentRebenchmark { if (txCount == 0) None else - db.get(Keys.transactionAt(Height(h), TxNum(Random.nextInt(txCount).toShort))).map(_._2.id().arr) + rdb.db.get(Keys.transactionAt(Height(h), TxNum(Random.nextInt(txCount).toShort), rdb.txHandle)).map(_._2.id().arr) } } - lazy val dataEntries: IndexedSeq[(DataEntry[_], Recipient.Address)] = { + lazy val dataEntries: IndexedSeq[(DataEntry[?], Recipient.Address)] = { val txCountAtHeight = - Map.empty[Int, Int].withDefault(h => db.get(Keys.blockMetaAt(Height(h))).fold(0)(_.transactionCount)) + Map.empty[Int, Int].withDefault(h => rdb.db.get(Keys.blockMetaAt(Height(h))).fold(0)(_.transactionCount)) 1.to(environment.height.toInt, 10) .flatMap { h => @@ -184,19 +184,21 @@ object WavesEnvironmentRebenchmark { if (txCount == 0) None else - db.get(Keys.transactionAt(Height(h), TxNum(Random.nextInt(txCount).toShort))) - .collect { case (meta, dataTx: DataTransaction) if meta.succeeded && dataTx.data.nonEmpty => - ( - dataTx.data(Random.nextInt(dataTx.data.length)), - Recipient.Address(ByteStr(dataTx.sender.toAddress.bytes)) - ) + rdb.db + .get(Keys.transactionAt(Height(h), TxNum(Random.nextInt(txCount).toShort), rdb.txHandle)) + .collect { + case (meta, dataTx: DataTransaction) if meta.succeeded && dataTx.data.nonEmpty => + ( + dataTx.data(Random.nextInt(dataTx.data.length)), + Recipient.Address(ByteStr(dataTx.sender.toAddress.bytes)) + ) } } } lazy val transferTransactions: IndexedSeq[ByteStr] = { val txCountAtHeight = - Map.empty[Int, Int].withDefault(h => db.get(Keys.blockMetaAt(Height(h))).fold(0)(_.transactionCount)) + Map.empty[Int, Int].withDefault(h => rdb.db.get(Keys.blockMetaAt(Height(h))).fold(0)(_.transactionCount)) 1.to(environment.height.toInt, 100) .flatMap { h => @@ -204,7 +206,8 @@ object WavesEnvironmentRebenchmark { if (txCount == 0) None else - db.get(Keys.transactionAt(Height(h), TxNum(Random.nextInt(txCount).toShort))) + rdb.db + .get(Keys.transactionAt(Height(h), TxNum(Random.nextInt(txCount).toShort), rdb.txHandle)) .collect { case (meta, transferTx: TransferTransaction) if meta.succeeded => transferTx.id() } } } diff --git a/grpc-server/src/main/scala/com/wavesplatform/api/grpc/AccountsApiGrpcImpl.scala b/grpc-server/src/main/scala/com/wavesplatform/api/grpc/AccountsApiGrpcImpl.scala index 996db68795b..b103c675f78 100644 --- a/grpc-server/src/main/scala/com/wavesplatform/api/grpc/AccountsApiGrpcImpl.scala +++ b/grpc-server/src/main/scala/com/wavesplatform/api/grpc/AccountsApiGrpcImpl.scala @@ -44,10 +44,7 @@ class AccountsApiGrpcImpl(commonApi: CommonAccountsApi)(implicit sc: Scheduler) val responseStream = (addressOption, assetIds) match { case (Some(address), Seq()) => - // FIXME: Strict loading because of segfault in leveldb - Observable(loadWavesBalance(address)) ++ Observable.fromIterator( - commonApi.portfolio(address).map(assetBalanceResponse).toListL.map(_.iterator) - ) + Observable(loadWavesBalance(address)) ++ commonApi.portfolio(address).concatMapIterable(identity).map(assetBalanceResponse) case (Some(address), nonEmptyList) => Observable .fromIterable(nonEmptyList) @@ -75,24 +72,19 @@ class AccountsApiGrpcImpl(commonApi: CommonAccountsApi)(implicit sc: Scheduler) override def getActiveLeases(request: AccountRequest, responseObserver: StreamObserver[LeaseResponse]): Unit = responseObserver.interceptErrors { val result = - Observable.fromIterator( - commonApi - .activeLeases(request.address.toAddress) - .map { case LeaseInfo(leaseId, originTransactionId, sender, recipient, amount, height, status, _, _) => - assert(status == LeaseInfo.Status.Active) - LeaseResponse( - leaseId.toByteString, - originTransactionId.toByteString, - ByteString.copyFrom(sender.bytes), - Some(PBRecipients.create(recipient)), - amount, - height - ) - } - .toListL // FIXME: Strict loading because of segfault in leveldb - .map(_.iterator) - ) - + commonApi + .activeLeases(request.address.toAddress) + .map { case LeaseInfo(leaseId, originTransactionId, sender, recipient, amount, height, status, _, _) => + assert(status == LeaseInfo.Status.Active) + LeaseResponse( + leaseId.toByteString, + originTransactionId.toByteString, + ByteString.copyFrom(sender.bytes), + Some(PBRecipients.create(recipient)), + amount, + height + ) + } responseObserver.completeWith(result) } @@ -100,8 +92,7 @@ class AccountsApiGrpcImpl(commonApi: CommonAccountsApi)(implicit sc: Scheduler) val stream = if (request.key.nonEmpty) { Observable.fromIterable(commonApi.data(request.address.toAddress, request.key)) } else { - // FIXME: Strict loading because of segfault in leveldb - Observable.fromIterator(commonApi.dataStream(request.address.toAddress, Option(request.key).filter(_.nonEmpty)).toListL.map(_.iterator)) + commonApi.dataStream(request.address.toAddress, Option(request.key).filter(_.nonEmpty)) } responseObserver.completeWith(stream.map(de => DataEntryResponse(request.address, Some(PBTransactions.toPBDataEntry(de))))) diff --git a/grpc-server/src/main/scala/com/wavesplatform/api/grpc/AssetsApiGrpcImpl.scala b/grpc-server/src/main/scala/com/wavesplatform/api/grpc/AssetsApiGrpcImpl.scala index 0e4c1186804..5d847bea2ad 100644 --- a/grpc-server/src/main/scala/com/wavesplatform/api/grpc/AssetsApiGrpcImpl.scala +++ b/grpc-server/src/main/scala/com/wavesplatform/api/grpc/AssetsApiGrpcImpl.scala @@ -30,16 +30,12 @@ class AssetsApiGrpcImpl(assetsApi: CommonAssetsApi, accountsApi: CommonAccountsA val responseStream = addressOption match { case Some(address) => - Observable.fromIterator( - accountsApi - .nftList(address, afterAssetId) - .map { case (a, d) => - NFTResponse(a.id.toByteString, Some(assetInfoResponse(d))) - } - .take(request.limit) - .toListL // FIXME: Strict loading because of segfault in leveldb - .map(_.iterator) - ) + accountsApi + .nftList(address, afterAssetId) + .concatMapIterable(_.map { case (a, d) => + NFTResponse(a.id.toByteString, Some(assetInfoResponse(d))) + }) + .take(request.limit) case _ => Observable.empty } diff --git a/grpc-server/src/main/scala/com/wavesplatform/api/grpc/BlocksApiGrpcImpl.scala b/grpc-server/src/main/scala/com/wavesplatform/api/grpc/BlocksApiGrpcImpl.scala index bbf6cc61f5f..ee76fd5a7b6 100644 --- a/grpc-server/src/main/scala/com/wavesplatform/api/grpc/BlocksApiGrpcImpl.scala +++ b/grpc-server/src/main/scala/com/wavesplatform/api/grpc/BlocksApiGrpcImpl.scala @@ -13,7 +13,6 @@ import com.wavesplatform.state.TxMeta import com.wavesplatform.transaction.Transaction import io.grpc.stub.StreamObserver import monix.execution.Scheduler -import monix.reactive.Observable import scala.concurrent.Future @@ -26,18 +25,14 @@ class BlocksApiGrpcImpl(commonApi: CommonBlocksApi)(implicit sc: Scheduler) exte override def getBlockRange(request: BlockRangeRequest, responseObserver: StreamObserver[BlockWithHeight]): Unit = responseObserver.interceptErrors { val stream = - Observable.fromIterator( - (if (request.includeTransactions) { - commonApi - .blocksRange(request.fromHeight, request.toHeight) - .map(toBlockWithHeight) - } else { - commonApi - .metaRange(request.fromHeight, request.toHeight) - .map(toBlockWithHeight) - }).toListL // FIXME: Strict loading because of segfault in leveldb - .map(_.iterator) - ) + if (request.includeTransactions) + commonApi + .blocksRange(request.fromHeight, request.toHeight) + .map(toBlockWithHeight) + else + commonApi + .metaRange(request.fromHeight, request.toHeight) + .map(toBlockWithHeight) responseObserver.completeWith(request.filter match { case Filter.GeneratorPublicKey(publicKey) => stream.filter(_.getBlock.getHeader.generator.toPublicKey == publicKey.toPublicKey) diff --git a/grpc-server/src/main/scala/com/wavesplatform/api/grpc/TransactionsApiGrpcImpl.scala b/grpc-server/src/main/scala/com/wavesplatform/api/grpc/TransactionsApiGrpcImpl.scala index 7277e964e0f..64cbac32dfb 100644 --- a/grpc-server/src/main/scala/com/wavesplatform/api/grpc/TransactionsApiGrpcImpl.scala +++ b/grpc-server/src/main/scala/com/wavesplatform/api/grpc/TransactionsApiGrpcImpl.scala @@ -32,31 +32,21 @@ class TransactionsApiGrpcImpl(blockchain: Blockchain, commonApi: CommonTransacti val maybeSender = Option(request.sender) .collect { case s if !s.isEmpty => s.toAddress } - Observable.fromIterator( - commonApi - .transactionsByAddress( - recipientAddrOrAlias, - maybeSender, - Set.empty, - None - ) - .toListL // FIXME: Strict loading because of segfault in leveldb - .map(_.iterator) + commonApi.transactionsByAddress( + recipientAddrOrAlias, + maybeSender, + Set.empty, + None ) // By sender case None if !request.sender.isEmpty => val senderAddress = request.sender.toAddress - Observable.fromIterator( - commonApi - .transactionsByAddress( - senderAddress, - Some(senderAddress), - Set.empty, - None - ) - .toListL // FIXME: Strict loading because of segfault in leveldb - .map(_.iterator) + commonApi.transactionsByAddress( + senderAddress, + Some(senderAddress), + Set.empty, + None ) // By ids diff --git a/grpc-server/src/main/scala/com/wavesplatform/events/BlockchainUpdates.scala b/grpc-server/src/main/scala/com/wavesplatform/events/BlockchainUpdates.scala index e2630eb430f..c86d0bdd7ea 100644 --- a/grpc-server/src/main/scala/com/wavesplatform/events/BlockchainUpdates.scala +++ b/grpc-server/src/main/scala/com/wavesplatform/events/BlockchainUpdates.scala @@ -5,7 +5,7 @@ import java.util.concurrent.TimeUnit import com.wavesplatform.block.{Block, MicroBlock} import com.wavesplatform.common.state.ByteStr -import com.wavesplatform.database.openDB +import com.wavesplatform.database.RDB import com.wavesplatform.events.api.grpc.protobuf.BlockchainUpdatesApiGrpc import com.wavesplatform.events.settings.BlockchainUpdatesSettings import com.wavesplatform.extensions.{Context, Extension} @@ -16,10 +16,10 @@ import io.grpc.netty.NettyServerBuilder import io.grpc.{Metadata, Server, ServerStreamTracer, Status} import monix.execution.schedulers.SchedulerService import monix.execution.{ExecutionModel, Scheduler, UncaughtExceptionReporter} -import net.ceedubs.ficus.Ficus._ +import net.ceedubs.ficus.Ficus.* import scala.concurrent.Future -import scala.concurrent.duration._ +import scala.concurrent.duration.* import scala.util.Try class BlockchainUpdates(private val context: Context) extends Extension with ScorexLogging with BlockchainUpdateTriggers { @@ -32,18 +32,19 @@ class BlockchainUpdates(private val context: Context) extends Extension with Sco ) private[this] val settings = context.settings.config.as[BlockchainUpdatesSettings]("waves.blockchain-updates") - private[this] val db = openDB(context.settings.directory + "/blockchain-updates") - private[this] val repo = new Repo(db, context.blocksApi) + // todo: no need to open column families here + private[this] val rdb = RDB.open(context.settings.dbSettings.copy(directory = context.settings.directory + "/blockchain-updates")) + private[this] val repo = new Repo(rdb.db, context.blocksApi) private[this] val grpcServer: Server = NettyServerBuilder .forAddress(new InetSocketAddress("0.0.0.0", settings.grpcPort)) .permitKeepAliveTime(settings.minKeepAlive.toNanos, TimeUnit.NANOSECONDS) .addStreamTracerFactory((fullMethodName: String, headers: Metadata) => new ServerStreamTracer { - private[this] var callInfo = Option.empty[ServerStreamTracer.ServerCallInfo[_, _]] + private[this] var callInfo = Option.empty[ServerStreamTracer.ServerCallInfo[?, ?]] private[this] def callId = callInfo.fold("???")(ci => Integer.toHexString(System.identityHashCode(ci))) - override def serverCallStarted(callInfo: ServerStreamTracer.ServerCallInfo[_, _]): Unit = { + override def serverCallStarted(callInfo: ServerStreamTracer.ServerCallInfo[?, ?]): Unit = { this.callInfo = Some(callInfo) log.trace(s"[$callId] gRPC call started: $fullMethodName, headers: $headers") } @@ -89,6 +90,7 @@ class BlockchainUpdates(private val context: Context) extends Extension with Sco scheduler.shutdown() scheduler.awaitTermination(10 seconds) repo.shutdown() + rdb.close() }(Scheduler.global) override def onProcessBlock( diff --git a/grpc-server/src/main/scala/com/wavesplatform/events/Loader.scala b/grpc-server/src/main/scala/com/wavesplatform/events/Loader.scala index 81c04cc8587..d1158a2f5c9 100644 --- a/grpc-server/src/main/scala/com/wavesplatform/events/Loader.scala +++ b/grpc-server/src/main/scala/com/wavesplatform/events/Loader.scala @@ -2,28 +2,29 @@ package com.wavesplatform.events import com.google.common.primitives.Ints import com.wavesplatform.api.common.CommonBlocksApi -import com.wavesplatform.api.grpc._ +import com.wavesplatform.api.grpc.* import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.Base58 import com.wavesplatform.database.{DBExt, DBResource} import com.wavesplatform.events.protobuf.BlockchainUpdated.Append.Body -import com.wavesplatform.events.protobuf.{BlockchainUpdated => PBBlockchainUpdated} -import com.wavesplatform.protobuf._ +import com.wavesplatform.events.protobuf.BlockchainUpdated as PBBlockchainUpdated +import com.wavesplatform.protobuf.* import com.wavesplatform.protobuf.block.PBBlock import com.wavesplatform.utils.ScorexLogging import monix.reactive.Observable -import org.iq80.leveldb.DB +import org.rocksdb.RocksDB import scala.collection.mutable.ArrayBuffer import scala.util.{Failure, Success, Try} -class Loader(db: DB, blocksApi: CommonBlocksApi, target: Option[(Int, ByteStr)], streamId: String) extends ScorexLogging { +class Loader(db: RocksDB, blocksApi: CommonBlocksApi, target: Option[(Int, ByteStr)], streamId: String) extends ScorexLogging { private def loadBatch(res: DBResource, fromHeight: Int): Try[Seq[PBBlockchainUpdated]] = Try { - res.iterator.seek(Ints.toByteArray(fromHeight)) + res.fullIterator.seek(Ints.toByteArray(fromHeight)) val buffer = ArrayBuffer[PBBlockchainUpdated]() - while (res.iterator.hasNext && buffer.size < 100 && target.forall { case (h, _) => fromHeight + buffer.size <= h }) { - buffer.append(Loader.parseUpdate(res.iterator.next().getValue, blocksApi, fromHeight + buffer.size)) + while (res.fullIterator.isValid && buffer.size < 100 && target.forall { case (h, _) => fromHeight + buffer.size <= h }) { + buffer.append(Loader.parseUpdate(res.fullIterator.value(), blocksApi, fromHeight + buffer.size)) + res.fullIterator.next() } for ((h, id) <- target if h == fromHeight + buffer.size - 1; u <- buffer.lastOption) { @@ -59,8 +60,8 @@ object Loader { _.append.update( _.body.modify { case Body.Block(value) => - Body.Block(value.copy(block = blocksApi.blockAtHeight(height).map { - case (meta, txs) => PBBlock(Some(meta.header.toPBHeader), meta.signature.toByteString, txs.map(_._2.toPB)) + Body.Block(value.copy(block = blocksApi.blockAtHeight(height).map { case (meta, txs) => + PBBlock(Some(meta.header.toPBHeader), meta.signature.toByteString, txs.map(_._2.toPB)) })) case other => other } diff --git a/grpc-server/src/main/scala/com/wavesplatform/events/Repo.scala b/grpc-server/src/main/scala/com/wavesplatform/events/Repo.scala index 9151d73ee15..d3c80a51183 100644 --- a/grpc-server/src/main/scala/com/wavesplatform/events/Repo.scala +++ b/grpc-server/src/main/scala/com/wavesplatform/events/Repo.scala @@ -1,6 +1,8 @@ package com.wavesplatform.events import java.nio.{ByteBuffer, ByteOrder} +import java.util.concurrent.ConcurrentHashMap + import cats.syntax.semigroup.* import com.google.common.primitives.Ints import com.wavesplatform.api.common.CommonBlocksApi @@ -9,10 +11,10 @@ import com.wavesplatform.block.{Block, MicroBlock} import com.wavesplatform.common.state.ByteStr import com.wavesplatform.database.DBExt import com.wavesplatform.events.Repo.keyForHeight -import com.wavesplatform.events.api.grpc.protobuf.BlockchainUpdatesApiGrpc.BlockchainUpdatesApi import com.wavesplatform.events.api.grpc.protobuf.* -import com.wavesplatform.events.protobuf.serde.* +import com.wavesplatform.events.api.grpc.protobuf.BlockchainUpdatesApiGrpc.BlockchainUpdatesApi import com.wavesplatform.events.protobuf.BlockchainUpdated as PBBlockchainUpdated +import com.wavesplatform.events.protobuf.serde.* import com.wavesplatform.events.repo.LiquidState import com.wavesplatform.state.Blockchain import com.wavesplatform.state.diffs.BlockDiffer @@ -22,14 +24,16 @@ import monix.eval.Task import monix.execution.Scheduler import monix.reactive.Observable import monix.reactive.subjects.PublishToOneSubject -import org.iq80.leveldb.DB +import org.rocksdb.RocksDB -import java.util.concurrent.ConcurrentHashMap import scala.concurrent.Future import scala.util.Using import scala.util.control.Exception -class Repo(db: DB, blocksApi: CommonBlocksApi)(implicit s: Scheduler) extends BlockchainUpdatesApi with BlockchainUpdateTriggers with ScorexLogging { +class Repo(db: RocksDB, blocksApi: CommonBlocksApi)(implicit s: Scheduler) + extends BlockchainUpdatesApi + with BlockchainUpdateTriggers + with ScorexLogging { private[this] val monitor = new Object private[this] var liquidState = Option.empty[LiquidState] private[this] val handlers = ConcurrentHashMap.newKeySet[Handler]() @@ -43,7 +47,6 @@ class Repo(db: DB, blocksApi: CommonBlocksApi)(implicit s: Scheduler) extends Bl def shutdown(): Unit = { shutdownHandlers() - db.close() } def height: Int = @@ -51,8 +54,9 @@ class Repo(db: DB, blocksApi: CommonBlocksApi)(implicit s: Scheduler) extends Bl var lastHeight = 0 Using(ro.newIterator) { iter => Exception.ignoring(classOf[UnsupportedOperationException])(iter.seekToLast()) - while (iter.hasNext) { - lastHeight = Ints.fromByteArray(iter.next().getKey) + while (iter.isValid) { + lastHeight = Ints.fromByteArray(iter.key()) + iter.next() } } lastHeight @@ -104,11 +108,11 @@ class Repo(db: DB, blocksApi: CommonBlocksApi)(implicit s: Scheduler) extends Bl var buf: List[BlockAppended] = Nil Using(rw.newIterator) { iter => iter.seek(keyForHeight(toHeight + 1)) - while (iter.hasNext) { - val e = iter.next() - val height = Ints.fromByteArray(e.getKey) - val stateUpdate = Loader.parseUpdate(e.getValue, blocksApi, height).vanillaAppend + while (iter.isValid) { + val height = Ints.fromByteArray(iter.key()) + val stateUpdate = Loader.parseUpdate(iter.value(), blocksApi, height).vanillaAppend buf = stateUpdate :: buf + iter.next() } } diff --git a/grpc-server/src/main/scala/com/wavesplatform/events/events.scala b/grpc-server/src/main/scala/com/wavesplatform/events/events.scala index 48d9abed37a..9f1431adb52 100644 --- a/grpc-server/src/main/scala/com/wavesplatform/events/events.scala +++ b/grpc-server/src/main/scala/com/wavesplatform/events/events.scala @@ -405,7 +405,7 @@ object StateUpdate { LeasingBalanceUpdate(address, before, leaseBalance) }.toVector - val dataEntries = diff.accountData.toSeq.flatMap { case (address, AccountDataInfo(data)) => + val dataEntries = diff.accountData.toSeq.flatMap { case (address, data) => data.toSeq.map { case (_, entry) => val prev = blockchain.accountData(address, entry.key).getOrElse(EmptyDataEntry(entry.key)) DataEntryUpdate(address, prev, entry) diff --git a/grpc-server/src/test/resources/application.conf b/grpc-server/src/test/resources/application.conf new file mode 100644 index 00000000000..d59dd0e791f --- /dev/null +++ b/grpc-server/src/test/resources/application.conf @@ -0,0 +1,8 @@ +waves { + db.rocksdb { + main-cache-size = 1K + tx-cache-size = 1K + tx-meta-cache-size = 1K + write-buffer-size = 1M + } +} diff --git a/grpc-server/src/test/scala/com/wavesplatform/api/grpc/test/GRPCBroadcastSpec.scala b/grpc-server/src/test/scala/com/wavesplatform/api/grpc/test/GRPCBroadcastSpec.scala index 74e09ceb539..80704ce0232 100644 --- a/grpc-server/src/test/scala/com/wavesplatform/api/grpc/test/GRPCBroadcastSpec.scala +++ b/grpc-server/src/test/scala/com/wavesplatform/api/grpc/test/GRPCBroadcastSpec.scala @@ -25,13 +25,7 @@ import monix.reactive.Observable import org.scalamock.scalatest.PathMockFactory import org.scalatest.BeforeAndAfterAll -class GRPCBroadcastSpec - extends FlatSpec - with BeforeAndAfterAll - with PathMockFactory - with BlockchainStubHelpers - with EthHelpers - with DiffMatchers { +class GRPCBroadcastSpec extends FlatSpec with BeforeAndAfterAll with PathMockFactory with BlockchainStubHelpers with EthHelpers with DiffMatchers { // Fake NTP time val FakeTime: TestTime = TestTime(100) @@ -42,6 +36,15 @@ class GRPCBroadcastSpec val sh = StubHelpers(blockchain) sh.creditBalance(ethBuyOrder.senderAddress, *) sh.creditBalance(ethSellOrder.senderAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns( + Map( + TxHelpers.defaultAddress -> Long.MaxValue / 3, + ethBuyOrder.senderAddress -> Long.MaxValue / 3, + ethSellOrder.senderAddress -> Long.MaxValue / 3 + ) + ) sh.issueAsset(ByteStr(EthStubBytes32)) } @@ -62,27 +65,30 @@ class GRPCBroadcastSpec intercept[Exception](blockchain.assertBroadcast(transaction)).toString should include("ETH transactions should not be broadcasted over gRPC") } - //noinspection NotImplementedCode + // noinspection NotImplementedCode implicit class BlockchainBroadcastExt(blockchain: Blockchain) { def grpcTxApi: TransactionsApiGrpcImpl = - new TransactionsApiGrpcImpl(blockchain, new CommonTransactionsApi { - def aliasesOfAddress(address: Address): Observable[(Height, CreateAliasTransaction)] = ??? - def transactionById(txId: ByteStr): Option[TransactionMeta] = ??? - def unconfirmedTransactions: Seq[Transaction] = ??? - def unconfirmedTransactionById(txId: ByteStr): Option[Transaction] = ??? - def calculateFee(tx: Transaction): Either[ValidationError, (Asset, Long, Long)] = ??? - def transactionsByAddress( - subject: Address, - sender: Option[Address], - transactionTypes: Set[TransactionType], - fromId: Option[ByteStr] - ): Observable[TransactionMeta] = ??? - def transactionProofs(transactionIds: List[ByteStr]): List[Block.TransactionProof] = ??? - def broadcastTransaction(tx: Transaction): Future[TracedResult[ValidationError, Boolean]] = { - val differ = blockchain.stub.transactionDiffer(FakeTime) - Future.successful(differ(tx).map(_ => true)) + new TransactionsApiGrpcImpl( + blockchain, + new CommonTransactionsApi { + def aliasesOfAddress(address: Address): Observable[(Height, CreateAliasTransaction)] = ??? + def transactionById(txId: ByteStr): Option[TransactionMeta] = ??? + def unconfirmedTransactions: Seq[Transaction] = ??? + def unconfirmedTransactionById(txId: ByteStr): Option[Transaction] = ??? + def calculateFee(tx: Transaction): Either[ValidationError, (Asset, Long, Long)] = ??? + def transactionsByAddress( + subject: Address, + sender: Option[Address], + transactionTypes: Set[TransactionType], + fromId: Option[ByteStr] + ): Observable[TransactionMeta] = ??? + def transactionProofs(transactionIds: List[ByteStr]): List[Block.TransactionProof] = ??? + def broadcastTransaction(tx: Transaction): Future[TracedResult[ValidationError, Boolean]] = { + val differ = blockchain.stub.transactionDiffer(FakeTime) + Future.successful(differ(tx).map(_ => true)) + } } - })(Scheduler.global) + )(Scheduler.global) @throws[StatusException]("on failed broadcast") def assertBroadcast(tx: Transaction): Unit = { diff --git a/grpc-server/src/test/scala/com/wavesplatform/events/InterferableDB.scala b/grpc-server/src/test/scala/com/wavesplatform/events/InterferableDB.scala deleted file mode 100644 index b4ed830d081..00000000000 --- a/grpc-server/src/test/scala/com/wavesplatform/events/InterferableDB.scala +++ /dev/null @@ -1,45 +0,0 @@ -package com.wavesplatform.events - -import java.util.Map -import java.util.concurrent.locks.ReentrantLock - -import org.iq80.leveldb -import org.iq80.leveldb.{DB, DBIterator, ReadOptions, Snapshot, WriteBatch, WriteOptions} - -case class InterferableDB(db: DB, startRead: ReentrantLock) extends DB { - override def get(key: Array[Byte], options: ReadOptions): Array[Byte] = db.get(key, options) - override def put(key: Array[Byte], value: Array[Byte]): Unit = db.put(key, value) - override def getSnapshot: Snapshot = db.getSnapshot - override def close(): Unit = db.close() - - override def get(key: Array[Byte]): Array[Byte] = ??? - override def delete(key: Array[Byte]): Unit = ??? - override def write(updates: WriteBatch): Unit = ??? - override def createWriteBatch(): WriteBatch = ??? - override def put(key: Array[Byte], value: Array[Byte], options: WriteOptions): Snapshot = ??? - override def delete(key: Array[Byte], options: WriteOptions): Snapshot = ??? - override def write(updates: WriteBatch, options: WriteOptions): Snapshot = ??? - override def getApproximateSizes(ranges: leveldb.Range*): Array[Long] = ??? - override def getProperty(name: String): String = ??? - override def suspendCompactions(): Unit = ??? - override def resumeCompactions(): Unit = ??? - override def compactRange(begin: Array[Byte], end: Array[Byte]): Unit = ??? - override def iterator(): DBIterator = ??? - - override def iterator(options: ReadOptions): DBIterator = new DBIterator { - private val iterator = db.iterator() - startRead.lock() - - override def next(): Map.Entry[Array[Byte], Array[Byte]] = iterator.next() - override def close(): Unit = iterator.close() - override def seek(key: Array[Byte]): Unit = iterator.seek(key) - override def hasNext: Boolean = iterator.hasNext - - override def seekToFirst(): Unit = ??? - override def peekNext(): Map.Entry[Array[Byte], Array[Byte]] = ??? - override def hasPrev: Boolean = ??? - override def prev(): Map.Entry[Array[Byte], Array[Byte]] = ??? - override def peekPrev(): Map.Entry[Array[Byte], Array[Byte]] = ??? - override def seekToLast(): Unit = ??? - } -} diff --git a/grpc-server/src/test/scala/com/wavesplatform/events/WithBUDomain.scala b/grpc-server/src/test/scala/com/wavesplatform/events/WithBUDomain.scala index 4ed07bc21d3..0e51b75e751 100644 --- a/grpc-server/src/test/scala/com/wavesplatform/events/WithBUDomain.scala +++ b/grpc-server/src/test/scala/com/wavesplatform/events/WithBUDomain.scala @@ -11,15 +11,15 @@ import com.wavesplatform.settings.{Constants, WavesSettings} import com.wavesplatform.transaction.TxHelpers import monix.execution.Scheduler import monix.execution.Scheduler.Implicits.global +import org.rocksdb.RocksDB import monix.reactive.subjects.PublishToOneSubject -import org.iq80.leveldb.DB import org.scalatest.Suite trait WithBUDomain extends WithDomain { _: Suite => - def withDomainAndRepo(settings: WavesSettings)(f: (Domain, Repo) => Unit, wrapDB: DB => DB = identity): Unit = { + def withDomainAndRepo(settings: WavesSettings)(f: (Domain, Repo) => Unit, wrapDB: RocksDB => RocksDB = identity): Unit = { withDomain(settings) { d => - tempDb { db => - val repo = new Repo(wrapDB(db), d.blocksApi) + tempDb { rdb => + val repo = new Repo(wrapDB(rdb.db), d.blocksApi) d.triggers = Seq(repo) try f(d, repo) finally repo.shutdownHandlers() @@ -29,8 +29,8 @@ trait WithBUDomain extends WithDomain { _: Suite => def withManualHandle(settings: WavesSettings, setSendUpdate: (() => Unit) => Unit)(f: (Domain, Repo) => Unit): Unit = withDomain(settings) { d => - tempDb { db => - val repo = new Repo(db, d.blocksApi) { + tempDb { rdb => + val repo = new Repo(rdb.db, d.blocksApi) { override def newHandler( id: String, maybeLiquidState: Option[LiquidState], diff --git a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/ContractEvaluator.scala b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/ContractEvaluator.scala index 570ba0e7990..531b5e6576c 100644 --- a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/ContractEvaluator.scala +++ b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/ContractEvaluator.scala @@ -118,7 +118,8 @@ object ContractEvaluator { version: StdLibVersion, limit: Int, correctFunctionCallScope: Boolean, - newMode: Boolean + newMode: Boolean, + enableExecutionLog: Boolean ): Coeval[Either[(ExecutionError, Int, Log[Id]), (ScriptResult, Log[Id])]] = Coeval .now(buildExprFromInvocation(dApp, i, version).leftMap((_, limit, Nil))) @@ -132,7 +133,8 @@ object ContractEvaluator { i.transactionId, limit, correctFunctionCallScope, - newMode + newMode, + enableExecutionLog ) case Left(error) => Coeval.now(Left(error)) } @@ -145,10 +147,11 @@ object ContractEvaluator { transactionId: ByteStr, limit: Int, correctFunctionCallScope: Boolean, - newMode: Boolean + newMode: Boolean, + enableExecutionLog: Boolean ): Coeval[Either[(ExecutionError, Int, Log[Id]), (ScriptResult, Log[Id])]] = EvaluatorV2 - .applyLimitedCoeval(expr, logExtraInfo, limit, ctx, version, correctFunctionCallScope, newMode) + .applyLimitedCoeval(expr, logExtraInfo, limit, ctx, version, correctFunctionCallScope, newMode, enableExecutionLog = enableExecutionLog) .map(_.flatMap { case (expr, unusedComplexity, log) => val result = expr match { diff --git a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/EvaluationResult.scala b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/EvaluationResult.scala index e01f706acde..60cb164e02c 100644 --- a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/EvaluationResult.scala +++ b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/EvaluationResult.scala @@ -1,8 +1,8 @@ package com.wavesplatform.lang.v1.evaluator +import cats.syntax.either.* import cats.{Monad, StackSafeMonad} -import cats.syntax.either._ -import com.wavesplatform.lang.{ExecutionError, CommonError} +import com.wavesplatform.lang.{CommonError, ExecutionError} import monix.eval.Coeval case class EvaluationResult[+A](value: Coeval[Either[(ExecutionError, Int), A]]) { @@ -14,8 +14,8 @@ case class EvaluationResult[+A](value: Coeval[Either[(ExecutionError, Int), A]]) } object EvaluationResult { - def apply[A](value: A): EvaluationResult[A] = EvaluationResult(Coeval(Right(value))) - def apply[A](error: String, limit: Int): EvaluationResult[A] = EvaluationResult(Coeval(Left((CommonError(error), limit)))) + def apply[A](value: A): EvaluationResult[A] = EvaluationResult(Coeval.now(Right(value))) + def apply[A](error: String, limit: Int): EvaluationResult[A] = EvaluationResult(Coeval.now(Left((CommonError(error), limit)))) implicit val monad: Monad[EvaluationResult] = new StackSafeMonad[EvaluationResult] { override def pure[A](a: A): EvaluationResult[A] = @@ -23,7 +23,7 @@ object EvaluationResult { override def flatMap[A, B](fa: EvaluationResult[A])(f: A => EvaluationResult[B]): EvaluationResult[B] = EvaluationResult(fa.value.flatMap { - case l @ Left(_) => Coeval(l.rightCast[B]) + case l @ Left(_) => Coeval.now(l.rightCast[B]) case Right(r) => f(r).value }) } diff --git a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/EvaluatorV1.scala b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/EvaluatorV1.scala index 2b9b4c12ac8..cb750530586 100644 --- a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/EvaluatorV1.scala +++ b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/EvaluatorV1.scala @@ -8,7 +8,7 @@ import com.wavesplatform.lang.v1.compiler.Terms.* import com.wavesplatform.lang.v1.compiler.Types.{CASETYPEREF, NOTHING} import com.wavesplatform.lang.v1.evaluator.ContextfulNativeFunction.{Extended, Simple} import com.wavesplatform.lang.v1.evaluator.ctx.* -import com.wavesplatform.lang.v1.evaluator.ctx.LoggedEvaluationContext.Lenses +import com.wavesplatform.lang.v1.evaluator.ctx.EnabledLogEvaluationContext.Lenses import com.wavesplatform.lang.v1.task.imports.* import com.wavesplatform.lang.v1.traits.Environment import com.wavesplatform.lang.{CoevalF, CommonError, EvalF, ExecutionError} @@ -35,11 +35,11 @@ class EvaluatorV1[F[_]: Monad, C[_[_]]](implicit ev: Monad[EvalF[F, *]], ev2: Mo private def evalLetBlock(let: LET, inner: EXPR): EvalM[F, C, (EvaluationContext[C, F], EVALUATED)] = for { - ctx <- get[F, LoggedEvaluationContext[C, F], ExecutionError] + ctx <- get[F, EnabledLogEvaluationContext[C, F], ExecutionError] blockEvaluation = evalExpr(let.value) lazyBlock = LazyVal(blockEvaluation.ter(ctx), ctx.l(let.name)) result <- local { - modify[F, LoggedEvaluationContext[C, F], ExecutionError](lets.modify(_)(_.updated(let.name, lazyBlock))) + modify[F, EnabledLogEvaluationContext[C, F], ExecutionError](lets.modify(_)(_.updated(let.name, lazyBlock))) .flatMap(_ => evalExprWithCtx(inner)) } } yield result @@ -49,17 +49,17 @@ class EvaluatorV1[F[_]: Monad, C[_[_]]](implicit ev: Monad[EvalF[F, *]], ev2: Mo val function = UserFunction(func.name, 0, NOTHING, func.args.map(n => (n, NOTHING))*)(func.body) .asInstanceOf[UserFunction[C]] local { - modify[F, LoggedEvaluationContext[C, F], ExecutionError](funcs.modify(_)(_.updated(funcHeader, function))) + modify[F, EnabledLogEvaluationContext[C, F], ExecutionError](funcs.modify(_)(_.updated(funcHeader, function))) .flatMap(_ => evalExprWithCtx(inner)) } } private def evalRef(key: String): EvalM[F, C, (EvaluationContext[C, F], EVALUATED)] = for { - ctx <- get[F, LoggedEvaluationContext[C, F], ExecutionError] + ctx <- get[F, EnabledLogEvaluationContext[C, F], ExecutionError] r <- lets.get(ctx).get(key) match { case Some(lzy) => liftTER[F, C, EVALUATED](lzy.value) - case None => raiseError[F, LoggedEvaluationContext[C, F], ExecutionError, EVALUATED](s"A definition of '$key' not found") + case None => raiseError[F, EnabledLogEvaluationContext[C, F], ExecutionError, EVALUATED](s"A definition of '$key' not found") } } yield (ctx.ec, r) @@ -82,7 +82,7 @@ class EvaluatorV1[F[_]: Monad, C[_[_]]](implicit ev: Monad[EvalF[F, *]], ev2: Mo private def evalFunctionCall(header: FunctionHeader, args: List[EXPR]): EvalM[F, C, (EvaluationContext[C, F], EVALUATED)] = for { - ctx <- get[F, LoggedEvaluationContext[C, F], ExecutionError] + ctx <- get[F, EnabledLogEvaluationContext[C, F], ExecutionError] result <- funcs .get(ctx) .get(header) @@ -93,7 +93,8 @@ class EvaluatorV1[F[_]: Monad, C[_[_]]](implicit ev: Monad[EvalF[F, *]], ev2: Mo r + (argName -> LazyVal.fromEvaluated(argValue, ctx.l(s"$argName"))) } local { - val newState: EvalM[F, C, Unit] = set[F, LoggedEvaluationContext[C, F], ExecutionError](lets.set(ctx)(letDefsWithArgs)).map(_.pure[F]) + val newState: EvalM[F, C, Unit] = + set[F, EnabledLogEvaluationContext[C, F], ExecutionError](lets.set(ctx)(letDefsWithArgs)).map(_.pure[F]) Monad[EvalM[F, C, *]].flatMap(newState)(_ => evalExpr(func.ev(ctx.ec.environment, args))) } }: EvalM[F, C, EVALUATED] @@ -125,7 +126,7 @@ class EvaluatorV1[F[_]: Monad, C[_[_]]](implicit ev: Monad[EvalF[F, *]], ev2: Mo case _ => None } ) - .getOrElse(raiseError[F, LoggedEvaluationContext[C, F], ExecutionError, EVALUATED](s"function '$header' not found")) + .getOrElse(raiseError[F, EnabledLogEvaluationContext[C, F], ExecutionError, EVALUATED](s"function '$header' not found")) } yield (ctx.ec, result) private def evalExprWithCtx(t: EXPR): EvalM[F, C, (EvaluationContext[C, F], EVALUATED)] = @@ -138,7 +139,7 @@ class EvaluatorV1[F[_]: Monad, C[_[_]]](implicit ev: Monad[EvalF[F, *]], ev2: Mo case _: FAILED_DEC => raiseError("Attempt to evaluate failed declaration.") } case REF(str) => evalRef(str) - case c: EVALUATED => get[F, LoggedEvaluationContext[C, F], ExecutionError].map(ctx => (ctx.ec, c)) + case c: EVALUATED => get[F, EnabledLogEvaluationContext[C, F], ExecutionError].map(ctx => (ctx.ec, c)) case IF(cond, t1, t2) => evalIF(cond, t1, t2) case GETTER(expr, field) => evalGetter(expr, field) case FUNCTION_CALL(header, args) => evalFunctionCall(header, args) @@ -150,7 +151,7 @@ class EvaluatorV1[F[_]: Monad, C[_[_]]](implicit ev: Monad[EvalF[F, *]], ev2: Mo def applyWithLogging[A <: EVALUATED](c: EvaluationContext[C, F], expr: EXPR): F[Either[(ExecutionError, Log[F]), (A, Log[F])]] = { val log = ListBuffer[LogItem[F]]() - val lec = LoggedEvaluationContext[C, F]((str: String) => (v: LetExecResult[F]) => log.append((str, v)), c) + val lec = EnabledLogEvaluationContext[C, F]((str: String) => (v: LetExecResult[F]) => log.append((str, v)), c) val r = evalExpr(expr).map(_.asInstanceOf[A]).run(lec).value._2 r.map(_.bimap((_, log.toList), (_, log.toList))) } @@ -160,7 +161,7 @@ class EvaluatorV1[F[_]: Monad, C[_[_]]](implicit ev: Monad[EvalF[F, *]], ev2: Mo def applyWithCtx(c: EvaluationContext[C, F], expr: EXPR): F[Either[ExecutionError, (EvaluationContext[C, F], EVALUATED)]] = evalExprWithCtx(expr) - .run(LoggedEvaluationContext(_ => _ => (), c)) + .run(EnabledLogEvaluationContext(_ => _ => (), c)) .value ._2 } diff --git a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/EvaluatorV2.scala b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/EvaluatorV2.scala index 0f8ad79c266..aed5720e7c4 100644 --- a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/EvaluatorV2.scala +++ b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/EvaluatorV2.scala @@ -9,11 +9,18 @@ import com.wavesplatform.lang.v1.FunctionHeader import com.wavesplatform.lang.v1.compiler.Terms.* import com.wavesplatform.lang.v1.compiler.Types.CASETYPEREF import com.wavesplatform.lang.v1.evaluator.ContextfulNativeFunction.{Extended, Simple} +import com.wavesplatform.lang.v1.evaluator.ctx.{ + DisabledLogEvaluationContext, + EnabledLogEvaluationContext, + EvaluationContext, + LoggedEvaluationContext, + NativeFunction, + UserFunction +} import com.wavesplatform.lang.v1.evaluator.ContractEvaluator.LogExtraInfo import com.wavesplatform.lang.v1.evaluator.EvaluatorV2.logFunc import com.wavesplatform.lang.v1.evaluator.EvaluatorV2.LogKeys.* import com.wavesplatform.lang.v1.evaluator.ctx.impl.waves.Bindings -import com.wavesplatform.lang.v1.evaluator.ctx.{EvaluationContext, LoggedEvaluationContext, NativeFunction, UserFunction} import com.wavesplatform.lang.v1.traits.Environment import com.wavesplatform.lang.{CommonError, ExecutionError} import monix.eval.Coeval @@ -27,6 +34,7 @@ class EvaluatorV2( val stdLibVersion: StdLibVersion, val correctFunctionCallScope: Boolean, val newMode: Boolean, + val enableExecutionLog: Boolean, val checkConstructorArgsTypes: Boolean = false ) { private val overheadCost: Int = if (newMode) 0 else 1 @@ -258,7 +266,7 @@ class EvaluatorV2( .flatMap { unusedArgsComplexity => val argsEvaluated = fc.args.forall(_.isInstanceOf[EVALUATED]) if (argsEvaluated && unusedArgsComplexity > 0) { - logFunc(fc, ctx, stdLibVersion, unusedArgsComplexity) + logFunc(fc, ctx, stdLibVersion, unusedArgsComplexity, enableExecutionLog) evaluateFunction(fc, startArgs, unusedArgsComplexity) } else EvaluationResult(unusedArgsComplexity) @@ -350,13 +358,19 @@ object EvaluatorV2 { stdLibVersion: StdLibVersion, correctFunctionCallScope: Boolean, newMode: Boolean, - checkConstructorArgsTypes: Boolean = false + checkConstructorArgsTypes: Boolean = false, + enableExecutionLog: Boolean = false ): Coeval[Either[(ExecutionError, Int, Log[Id]), (EXPR, Int, Log[Id])]] = { - val log = ListBuffer[LogItem[Id]]() - val loggedCtx = LoggedEvaluationContext[Environment, Id](name => value => log.append((name, value)), ctx) - var ref = expr.deepCopy.value - logCall(loggedCtx, logExtraInfo, ref) - new EvaluatorV2(loggedCtx, stdLibVersion, correctFunctionCallScope, newMode, checkConstructorArgsTypes) + val log = ListBuffer[LogItem[Id]]() + + val loggedCtx = if (enableExecutionLog) { + EnabledLogEvaluationContext[Environment, Id](name => value => log.append((name, value)), ctx) + } else { + DisabledLogEvaluationContext[Environment, Id](ctx) + } + var ref = expr.deepCopy.value + logCall(loggedCtx, logExtraInfo, ref, enableExecutionLog) + new EvaluatorV2(loggedCtx, stdLibVersion, correctFunctionCallScope, newMode, enableExecutionLog, checkConstructorArgsTypes) .root(ref, v => EvaluationResult { ref = v }, limit, Nil) .map((ref, _)) .value @@ -374,10 +388,20 @@ object EvaluatorV2 { complexityLimit: Int, correctFunctionCallScope: Boolean, newMode: Boolean, - handleExpr: EXPR => Either[ExecutionError, EVALUATED] + handleExpr: EXPR => Either[ExecutionError, EVALUATED], + enableExecutionLog: Boolean ): (Log[Id], Int, Either[ExecutionError, EVALUATED]) = EvaluatorV2 - .applyLimitedCoeval(expr, logExtraInfo, complexityLimit, ctx, stdLibVersion, correctFunctionCallScope, newMode) + .applyLimitedCoeval( + expr, + logExtraInfo, + complexityLimit, + ctx, + stdLibVersion, + correctFunctionCallScope, + newMode, + enableExecutionLog = enableExecutionLog + ) .value() .fold( { case (error, complexity, log) => (log, complexity, Left(error)) }, @@ -395,7 +419,8 @@ object EvaluatorV2 { logExtraInfo: LogExtraInfo, stdLibVersion: StdLibVersion, correctFunctionCallScope: Boolean, - newMode: Boolean + newMode: Boolean, + enableExecutionLog: Boolean ): (Log[Id], Int, Either[ExecutionError, EVALUATED]) = applyOrDefault( ctx, @@ -405,10 +430,16 @@ object EvaluatorV2 { Int.MaxValue, correctFunctionCallScope, newMode, - expr => Left(s"Unexpected incomplete evaluation result $expr") + expr => Left(s"Unexpected incomplete evaluation result $expr"), + enableExecutionLog ) - private def logCall(loggedCtx: LoggedEvaluationContext[Environment, Id], logExtraInfo: LogExtraInfo, exprCopy: EXPR): Unit = { + private def logCall( + loggedCtx: LoggedEvaluationContext[Environment, Id], + logExtraInfo: LogExtraInfo, + exprCopy: EXPR, + enableExecutionLog: Boolean + ): Unit = { @tailrec def findInvArgLet(expr: EXPR, let: LET): Option[LET] = { expr match { @@ -417,35 +448,46 @@ object EvaluatorV2 { case _ => None } } - logExtraInfo.dAppAddress.foreach { addr => - val addrObj = Bindings.senderObject(addr) - loggedCtx.log(LET(InvokedDApp, addrObj), addrObj.asRight[ExecutionError]) - } - logExtraInfo.invokedFuncName.foreach { funcName => - val invokedFuncName = CONST_STRING(funcName) - invokedFuncName.foreach(name => loggedCtx.log(LET(InvokedFuncName, name), invokedFuncName)) - } + if (enableExecutionLog) { + logExtraInfo.dAppAddress.foreach { addr => + val addrObj = Bindings.senderObject(addr) + loggedCtx.log(LET(InvokedDApp, addrObj), addrObj.asRight[ExecutionError]) + } - logExtraInfo.invArg.flatMap(findInvArgLet(exprCopy, _)).foreach { - case let @ LET(_, obj: CaseObj) => loggedCtx.log(let, obj.asRight[ExecutionError]) - case _ => + logExtraInfo.invokedFuncName.foreach { funcName => + val invokedFuncName = CONST_STRING(funcName) + invokedFuncName.foreach(name => loggedCtx.log(LET(InvokedFuncName, name), invokedFuncName)) + } + + logExtraInfo.invArg.flatMap(findInvArgLet(exprCopy, _)).foreach { + case let @ LET(_, obj: CaseObj) => loggedCtx.log(let, obj.asRight[ExecutionError]) + case _ => + } } } - private def logFunc(fc: FUNCTION_CALL, ctx: LoggedEvaluationContext[Environment, Id], stdLibVersion: StdLibVersion, limit: Int): Unit = { - val func = ctx.ec.functions.get(fc.function) - val funcName = func.map(_.name).getOrElse(fc.function.funcName) - func match { - case Some(f) => - val cost = f.costByLibVersion(stdLibVersion) - if (limit >= cost) { + private def logFunc( + fc: FUNCTION_CALL, + ctx: LoggedEvaluationContext[Environment, Id], + stdLibVersion: StdLibVersion, + limit: Int, + enableExecutionLog: Boolean + ): Unit = { + if (enableExecutionLog) { + val func = ctx.ec.functions.get(fc.function) + val funcName = func.map(_.name).getOrElse(fc.function.funcName) + func match { + case Some(f) => + val cost = f.costByLibVersion(stdLibVersion) + if (limit >= cost) { + logFuncArgs(fc, funcName, ctx) + ctx.log(LET(s"$funcName.$Complexity", TRUE), CONST_LONG(cost).asRight[ExecutionError]) + ctx.log(LET(ComplexityLimit, TRUE), CONST_LONG(limit - cost).asRight[ExecutionError]) + } + case None => logFuncArgs(fc, funcName, ctx) - ctx.log(LET(s"$funcName.$Complexity", TRUE), CONST_LONG(cost).asRight[ExecutionError]) - ctx.log(LET(ComplexityLimit, TRUE), CONST_LONG(limit - cost).asRight[ExecutionError]) - } - case None => - logFuncArgs(fc, funcName, ctx) + } } } diff --git a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/ctx/EvaluationContext.scala b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/ctx/EvaluationContext.scala index c289844d1be..1013b3bae49 100644 --- a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/ctx/EvaluationContext.scala +++ b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/ctx/EvaluationContext.scala @@ -27,11 +27,17 @@ case class EvaluationContext[C[_[_]], F[_]]( ) } -case class LoggedEvaluationContext[C[_[_]], F[_]: Monad](l: LetLogCallback[F], ec: EvaluationContext[C, F]) { +trait LoggedEvaluationContext[C[_[_]], F[_]] { + def ec: EvaluationContext[C, F] + def log(let: LET, result: LetExecResult[F]): Unit +} + +case class EnabledLogEvaluationContext[C[_[_]], F[_]: Monad](l: LetLogCallback[F], ec: EvaluationContext[C, F]) + extends LoggedEvaluationContext[C, F] { val loggedLets: util.IdentityHashMap[LET, Unit] = new util.IdentityHashMap() val loggedErrors: collection.mutable.Set[ExecutionError] = collection.mutable.Set() - def log(let: LET, result: LetExecResult[F]): F[Unit] = { + override def log(let: LET, result: LetExecResult[F]): Unit = { result.map { case Left(err) if !loggedErrors.contains(err) => loggedErrors.addOne(err) @@ -45,15 +51,21 @@ case class LoggedEvaluationContext[C[_[_]], F[_]: Monad](l: LetLogCallback[F], e loggedLets.computeIfAbsent(let, _ => l(let.name)(result)) } -object LoggedEvaluationContext { +object EnabledLogEvaluationContext { class Lenses[F[_]: Monad, C[_[_]]] { - val types: Lens[LoggedEvaluationContext[C, F], Map[String, FINAL]] = lens[LoggedEvaluationContext[C, F]] >> Symbol("ec") >> Symbol("typeDefs") - val lets: Lens[LoggedEvaluationContext[C, F], Map[String, LazyVal[F]]] = lens[LoggedEvaluationContext[C, F]] >> Symbol("ec") >> Symbol("letDefs") - val funcs: Lens[LoggedEvaluationContext[C, F], Map[FunctionHeader, BaseFunction[C]]] = - lens[LoggedEvaluationContext[C, F]] >> Symbol("ec") >> Symbol("functions") + val types: Lens[EnabledLogEvaluationContext[C, F], Map[String, FINAL]] = + lens[EnabledLogEvaluationContext[C, F]] >> Symbol("ec") >> Symbol("typeDefs") + val lets: Lens[EnabledLogEvaluationContext[C, F], Map[String, LazyVal[F]]] = + lens[EnabledLogEvaluationContext[C, F]] >> Symbol("ec") >> Symbol("letDefs") + val funcs: Lens[EnabledLogEvaluationContext[C, F], Map[FunctionHeader, BaseFunction[C]]] = + lens[EnabledLogEvaluationContext[C, F]] >> Symbol("ec") >> Symbol("functions") } } +case class DisabledLogEvaluationContext[C[_[_]], F[_]](ec: EvaluationContext[C, F]) extends LoggedEvaluationContext[C, F] { + override def log(let: LET, result: LetExecResult[F]): Unit = () +} + object EvaluationContext { val empty = EvaluationContext(Contextful.empty[Id], Map.empty, Map.empty, Map.empty) diff --git a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/package.scala b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/package.scala index e708572fe18..6968162000a 100644 --- a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/package.scala +++ b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/package.scala @@ -2,15 +2,15 @@ package com.wavesplatform.lang.v1 import cats.Eval import cats.data.EitherT -import com.wavesplatform.lang.v1.evaluator.ctx.LoggedEvaluationContext +import com.wavesplatform.lang.v1.evaluator.ctx.EnabledLogEvaluationContext import com.wavesplatform.lang.v1.task.TaskMT import com.wavesplatform.lang.{EvalF, ExecutionError, TrampolinedExecResult} package object evaluator { - type EvalM[F[_], C[_[_]], A] = TaskMT[F, LoggedEvaluationContext[C, F], ExecutionError, A] + type EvalM[F[_], C[_[_]], A] = TaskMT[F, EnabledLogEvaluationContext[C, F], ExecutionError, A] implicit class EvalMOps[F[_], C[_[_]], A](ev: EvalM[F, C, A]) { - def ter(ctx: LoggedEvaluationContext[C, F]): TrampolinedExecResult[F, A] = + def ter(ctx: EnabledLogEvaluationContext[C, F]): TrampolinedExecResult[F, A] = EitherT[EvalF[F, *], ExecutionError, A](ev.run(ctx).map(_._2)) } diff --git a/lang/testkit/src/main/scala/com/wavesplatform/lang/Common.scala b/lang/testkit/src/main/scala/com/wavesplatform/lang/Common.scala index cee0a77dffa..60cdb6b3a6d 100644 --- a/lang/testkit/src/main/scala/com/wavesplatform/lang/Common.scala +++ b/lang/testkit/src/main/scala/com/wavesplatform/lang/Common.scala @@ -23,7 +23,7 @@ import scala.annotation.tailrec import scala.util.{Left, Right, Try} object Common { - import com.wavesplatform.lang.v1.evaluator.ctx.impl.converters._ + import com.wavesplatform.lang.v1.evaluator.ctx.impl.converters.* private val dataEntryValueType = UNION(LONG, BOOLEAN, BYTESTR, STRING) val dataEntryType = CASETYPEREF("DataEntry", List("key" -> STRING, "value" -> dataEntryValueType)) diff --git a/lang/tests/src/test/scala/com/wavesplatform/lang/ContractIntegrationTest.scala b/lang/tests/src/test/scala/com/wavesplatform/lang/ContractIntegrationTest.scala index 43e01827806..f4d504ccf74 100644 --- a/lang/tests/src/test/scala/com/wavesplatform/lang/ContractIntegrationTest.scala +++ b/lang/tests/src/test/scala/com/wavesplatform/lang/ContractIntegrationTest.scala @@ -174,7 +174,8 @@ class ContractIntegrationTest extends PropSpec with Inside { V3, Int.MaxValue, correctFunctionCallScope = true, - newMode = false + newMode = false, + enableExecutionLog = true ) .value() .leftMap { case (e, _, log) => (e, log) } @@ -188,7 +189,8 @@ class ContractIntegrationTest extends PropSpec with Inside { .verify( compiled.decs, compiled.verifierFuncOpt.get, - EvaluatorV2.applyCompleted(ctx.evaluationContext(environment), _, _, V3, correctFunctionCallScope = true, newMode = false), + EvaluatorV2 + .applyCompleted(ctx.evaluationContext(environment), _, _, V3, correctFunctionCallScope = true, newMode = false, enableExecutionLog = false), txObject ) ._3 diff --git a/lang/tests/src/test/scala/com/wavesplatform/lang/IntegrationTest.scala b/lang/tests/src/test/scala/com/wavesplatform/lang/IntegrationTest.scala index 16252b56dec..92f4864a3b8 100755 --- a/lang/tests/src/test/scala/com/wavesplatform/lang/IntegrationTest.scala +++ b/lang/tests/src/test/scala/com/wavesplatform/lang/IntegrationTest.scala @@ -92,7 +92,7 @@ class IntegrationTest extends PropSpec with Inside { val evalCtx = ctx.evaluationContext(env).asInstanceOf[EvaluationContext[Environment, Id]] compiled.flatMap(v => EvaluatorV2 - .applyCompleted(evalCtx, v._1, LogExtraInfo(), version, correctFunctionCallScope = true, newMode = true) + .applyCompleted(evalCtx, v._1, LogExtraInfo(), version, correctFunctionCallScope = true, newMode = true, enableExecutionLog = false) ._3 .bimap(_.message, _.asInstanceOf[T]) ) diff --git a/lang/tests/src/test/scala/com/wavesplatform/lang/estimator/package.scala b/lang/tests/src/test/scala/com/wavesplatform/lang/estimator/package.scala index e383e311e82..505857d05da 100644 --- a/lang/tests/src/test/scala/com/wavesplatform/lang/estimator/package.scala +++ b/lang/tests/src/test/scala/com/wavesplatform/lang/estimator/package.scala @@ -21,7 +21,15 @@ package object estimator { private val environment = Common.emptyBlockchainEnvironment() private def evaluator(overhead: Boolean, expr: EXPR) = - EvaluatorV2.applyCompleted(ctx.evaluationContext(environment), expr, LogExtraInfo(), V3, correctFunctionCallScope = true, overhead) + EvaluatorV2.applyCompleted( + ctx.evaluationContext(environment), + expr, + LogExtraInfo(), + V3, + correctFunctionCallScope = true, + overhead, + enableExecutionLog = false + ) def evaluatorV2AsEstimator(overhead: Boolean): ScriptEstimator = new ScriptEstimator { override val version: Int = 0 diff --git a/lang/tests/src/test/scala/com/wavesplatform/lang/evaluator/EvaluatorSpec.scala b/lang/tests/src/test/scala/com/wavesplatform/lang/evaluator/EvaluatorSpec.scala index 036265f70ad..8a56d49c8e4 100644 --- a/lang/tests/src/test/scala/com/wavesplatform/lang/evaluator/EvaluatorSpec.scala +++ b/lang/tests/src/test/scala/com/wavesplatform/lang/evaluator/EvaluatorSpec.scala @@ -75,7 +75,7 @@ abstract class EvaluatorSpec extends PropSpec with ScriptGen with Inside { private def evalExpr(expr: EXPR, version: StdLibVersion, useNewPowPrecision: Boolean): (Log[Id], Int, Either[ExecutionError, EVALUATED]) = { val ctx = lazyContexts((DirectiveSet(version, Account, Expression).explicitGet(), useNewPowPrecision, true)).value() val evalCtx = ctx.evaluationContext(Common.emptyBlockchainEnvironment()) - EvaluatorV2.applyCompleted(evalCtx, expr, LogExtraInfo(), version, correctFunctionCallScope = true, newMode = true) + EvaluatorV2.applyCompleted(evalCtx, expr, LogExtraInfo(), version, correctFunctionCallScope = true, newMode = true, enableExecutionLog = false) } def compile(code: String, version: StdLibVersion): Either[String, EXPR] = { diff --git a/lang/tests/src/test/scala/com/wavesplatform/lang/evaluator/EvaluatorV1V2Test.scala b/lang/tests/src/test/scala/com/wavesplatform/lang/evaluator/EvaluatorV1V2Test.scala index 61bfbed6f7c..b6943d8fd98 100644 --- a/lang/tests/src/test/scala/com/wavesplatform/lang/evaluator/EvaluatorV1V2Test.scala +++ b/lang/tests/src/test/scala/com/wavesplatform/lang/evaluator/EvaluatorV1V2Test.scala @@ -67,7 +67,15 @@ class EvaluatorV1V2Test extends PropSpec with EitherValues { private def evalV2[T <: EVALUATED](context: EvaluationContext[Environment, Id], expr: EXPR): Either[ExecutionError, T] = EvaluatorV2 - .applyCompleted(context, expr, LogExtraInfo(), implicitly[StdLibVersion], correctFunctionCallScope = true, newMode = true) + .applyCompleted( + context, + expr, + LogExtraInfo(), + implicitly[StdLibVersion], + correctFunctionCallScope = true, + newMode = true, + enableExecutionLog = false + ) ._3 .asInstanceOf[Either[ExecutionError, T]] @@ -91,7 +99,8 @@ class EvaluatorV1V2Test extends PropSpec with EitherValues { LogExtraInfo(), implicitly[StdLibVersion], correctFunctionCallScope = true, - newMode = true + newMode = true, + enableExecutionLog = true ) evaluatorV2Result shouldBe evaluatorV1Result.bimap(_._1, _._1) diff --git a/lang/tests/src/test/scala/com/wavesplatform/utils/MerkleTest.scala b/lang/tests/src/test/scala/com/wavesplatform/utils/MerkleTest.scala index fd5de71c9f4..23c3ae99426 100644 --- a/lang/tests/src/test/scala/com/wavesplatform/utils/MerkleTest.scala +++ b/lang/tests/src/test/scala/com/wavesplatform/utils/MerkleTest.scala @@ -110,7 +110,7 @@ class MerkleTest extends PropSpec { val ctx = lazyContexts((DirectiveSet(version, Account, Expression).explicitGet(), true, true))() val evalCtx = ctx.evaluationContext[Id](Common.emptyBlockchainEnvironment()) val typed = ExpressionCompiler(ctx.compilerContext, untyped) - typed.flatMap(v => EvaluatorV2.applyCompleted(evalCtx, v._1, LogExtraInfo(), version, true, true)._3.leftMap(_.toString)) + typed.flatMap(v => EvaluatorV2.applyCompleted(evalCtx, v._1, LogExtraInfo(), version, true, true, false)._3.leftMap(_.toString)) } private def scriptSrc(root: Array[Byte], proof: Array[Byte], value: Array[Byte]): String = { diff --git a/node-it/src/test/scala/com/wavesplatform/it/BaseTargetChecker.scala b/node-it/src/test/scala/com/wavesplatform/it/BaseTargetChecker.scala index 45528d48883..ba5e7f29996 100644 --- a/node-it/src/test/scala/com/wavesplatform/it/BaseTargetChecker.scala +++ b/node-it/src/test/scala/com/wavesplatform/it/BaseTargetChecker.scala @@ -5,34 +5,34 @@ import com.wavesplatform.account.KeyPair import com.wavesplatform.block.Block import com.wavesplatform.common.utils.EitherExt2 import com.wavesplatform.consensus.PoSSelector -import com.wavesplatform.database.openDB +import com.wavesplatform.database.RDB import com.wavesplatform.events.BlockchainUpdateTriggers import com.wavesplatform.features.BlockchainFeatures import com.wavesplatform.history.StorageFactory -import com.wavesplatform.settings._ +import com.wavesplatform.settings.* import com.wavesplatform.transaction.Asset.Waves import com.wavesplatform.utils.NTP -import monix.execution.UncaughtExceptionReporter -import monix.reactive.Observer -import net.ceedubs.ficus.Ficus._ +import net.ceedubs.ficus.Ficus.* object BaseTargetChecker { def main(args: Array[String]): Unit = { - implicit val reporter: UncaughtExceptionReporter = UncaughtExceptionReporter.default - val sharedConfig = Docker.genesisOverride() + val sharedConfig = Docker + .genesisOverride() .withFallback(Docker.configTemplate) .withFallback(defaultApplication()) .withFallback(defaultReference()) .resolve() + val settings = WavesSettings.fromRootConfig(sharedConfig) - val db = openDB("/tmp/tmp-db") + val db = RDB.open(settings.dbSettings.copy(directory = "/tmp/tmp-db")) val ntpTime = new NTP("ntp.pool.org") - val (blockchainUpdater, _) = StorageFactory(settings, db, ntpTime, Observer.empty, BlockchainUpdateTriggers.noop) + val (blockchainUpdater, _) = StorageFactory(settings, db, ntpTime, BlockchainUpdateTriggers.noop) val poSSelector = PoSSelector(blockchainUpdater, settings.synchronizationSettings.maxBaseTarget) try { - val genesisBlock = Block.genesis(settings.blockchainSettings.genesisSettings, blockchainUpdater.isFeatureActivated(BlockchainFeatures.RideV6)).explicitGet() + val genesisBlock = + Block.genesis(settings.blockchainSettings.genesisSettings, blockchainUpdater.isFeatureActivated(BlockchainFeatures.RideV6)).explicitGet() blockchainUpdater.processBlock(genesisBlock, genesisBlock.header.generationSignature) NodeConfigs.Default.map(_.withFallback(sharedConfig)).collect { diff --git a/node-it/src/test/scala/com/wavesplatform/it/sync/transactions/IssueNFTSuite.scala b/node-it/src/test/scala/com/wavesplatform/it/sync/transactions/IssueNFTSuite.scala index 236c67f0d4b..c7141e50253 100644 --- a/node-it/src/test/scala/com/wavesplatform/it/sync/transactions/IssueNFTSuite.scala +++ b/node-it/src/test/scala/com/wavesplatform/it/sync/transactions/IssueNFTSuite.scala @@ -19,9 +19,9 @@ class IssueNFTSuite extends BaseTransactionSuite with TableDrivenPropertyChecks override def nodeConfigs: Seq[Config] = NodeConfigs.newBuilder .overrideBase(_.raw("""waves { - | miner.quorum = 0 - | blockchain.custom.functionality.pre-activated-features.13 = 10 - |}""".stripMargin)) + | miner.quorum = 0 + | blockchain.custom.functionality.pre-activated-features.13 = 10 + |}""".stripMargin)) .withDefault(1) .withSpecial(_.nonMiner) .buildNonConflicting() @@ -156,20 +156,19 @@ class IssueNFTSuite extends BaseTransactionSuite with TableDrivenPropertyChecks val assetName = "NFTAsset" val assetDescription = "my asset description" - (1 to n).map( - i => - secondNode - .issue( - secondNode.keyPair, - assetName + i, - assetDescription + i, - quantity = 1, - decimals = 0, - reissuable = false, - fee = 0.001.waves, - script = None - ) - .id + (1 to n).map(i => + secondNode + .issue( + secondNode.keyPair, + assetName + i, + assetDescription + i, + quantity = 1, + decimals = 0, + reissuable = false, + fee = 0.001.waves, + script = None + ) + .id ) } } diff --git a/node-it/src/test/scala/com/wavesplatform/test/BlockchainGenerator.scala b/node-it/src/test/scala/com/wavesplatform/test/BlockchainGenerator.scala index 4e53699c1a1..e70bb9ee87b 100644 --- a/node-it/src/test/scala/com/wavesplatform/test/BlockchainGenerator.scala +++ b/node-it/src/test/scala/com/wavesplatform/test/BlockchainGenerator.scala @@ -7,13 +7,13 @@ import com.wavesplatform.block.{Block, BlockHeader} import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.EitherExt2 import com.wavesplatform.consensus.PoSSelector -import com.wavesplatform.database.openDB +import com.wavesplatform.database.RDB import com.wavesplatform.events.{BlockchainUpdateTriggers, UtxEvent} import com.wavesplatform.features.BlockchainFeatures import com.wavesplatform.history.StorageFactory import com.wavesplatform.lang.ValidationError import com.wavesplatform.mining.{Miner, MinerImpl} -import com.wavesplatform.settings.WavesSettings +import com.wavesplatform.settings.{DBSettings, WavesSettings} import com.wavesplatform.state.appender.BlockAppender import com.wavesplatform.test.BlockchainGenerator.{GenBlock, GenTx} import com.wavesplatform.transaction.TxValidationError.GenericError @@ -36,7 +36,6 @@ import com.wavesplatform.utx.UtxPoolImpl import com.wavesplatform.wallet.Wallet import io.netty.channel.group.DefaultChannelGroup import monix.execution.Scheduler.Implicits.global -import monix.reactive.Observer import monix.reactive.subjects.ConcurrentSubject import org.apache.commons.io.FileUtils import org.web3j.crypto.{ECKeyPair, RawTransaction} @@ -74,7 +73,7 @@ class BlockchainGenerator(wavesSettings: WavesSettings) extends ScorexLogging { private val settings: WavesSettings = wavesSettings.copy(minerSettings = wavesSettings.minerSettings.copy(quorum = 0)) def generateDb(genBlocks: Seq[GenBlock], dbDirPath: String = settings.dbSettings.directory): Unit = - generateBlockchain(genBlocks, dbDirPath) + generateBlockchain(genBlocks, settings.dbSettings.copy(directory = dbDirPath)) def generateBinaryFile(genBlocks: Seq[GenBlock]): Unit = { val targetHeight = genBlocks.size + 1 @@ -92,14 +91,18 @@ class BlockchainGenerator(wavesSettings: WavesSettings) extends ScorexLogging { } { output => Using.resource(new BufferedOutputStream(output, 10 * 1024 * 1024)) { bos => val dbDirPath = Files.createTempDirectory("generator-temp-db") - generateBlockchain(genBlocks, dbDirPath.toString, block => IO.exportBlockToBinary(bos, Some(block), legacy = true)) + generateBlockchain( + genBlocks, + settings.dbSettings.copy(directory = dbDirPath.toString), + block => IO.exportBlockToBinary(bos, Some(block), legacy = true) + ) log.info(s"Finished exporting $targetHeight blocks") FileUtils.deleteDirectory(dbDirPath.toFile) } } } - private def generateBlockchain(genBlocks: Seq[GenBlock], dbDirPath: String, exportToFile: Block => Unit = _ => ()): Unit = { + private def generateBlockchain(genBlocks: Seq[GenBlock], dbSettings: DBSettings, exportToFile: Block => Unit = _ => ()): Unit = { val scheduler = Schedulers.singleThread("appender") val time = new Time { val startTime: Long = settings.blockchainSettings.genesisSettings.timestamp @@ -110,9 +113,9 @@ class BlockchainGenerator(wavesSettings: WavesSettings) extends ScorexLogging { override def correctedTime(): Long = time override def getTimestamp(): Long = time } - Using.resource(openDB(dbDirPath)) { db => + Using.resource(RDB.open(dbSettings)) { db => val (blockchain, _) = - StorageFactory(settings, db, time, Observer.empty, BlockchainUpdateTriggers.noop) + StorageFactory(settings, db, time, BlockchainUpdateTriggers.noop) Using.resource(new UtxPoolImpl(time, blockchain, settings.utxSettings, settings.maxTxErrorLogSize, settings.minerSettings.enable)) { utxPool => val pos = PoSSelector(blockchain, settings.synchronizationSettings.maxBaseTarget) val extAppender = BlockAppender(blockchain, time, utxPool, pos, scheduler) _ diff --git a/node/build.sbt b/node/build.sbt index 500f025f027..2ba7441c3f8 100644 --- a/node/build.sbt +++ b/node/build.sbt @@ -105,7 +105,8 @@ inConfig(Universal)( "-J-XX:+ParallelRefProcEnabled", "-J-XX:+UseStringDeduplication", // JVM default charset for proper and deterministic getBytes behaviour - "-J-Dfile.encoding=UTF-8" + "-J-Dfile.encoding=UTF-8", + "-J--add-opens=java.base/sun.nio.ch=ALL-UNNAMED" ) ) ) diff --git a/node/src/main/protobuf/waves/database.proto b/node/src/main/protobuf/waves/database.proto index 2b32c366885..e22778593eb 100644 --- a/node/src/main/protobuf/waves/database.proto +++ b/node/src/main/protobuf/waves/database.proto @@ -26,6 +26,8 @@ message BlockMeta { int64 total_fee_in_waves = 7; int64 reward = 8; bytes vrf = 9; + bytes total_score = 10; + int64 total_waves_amount = 11; } message TransactionMeta { @@ -34,6 +36,7 @@ message TransactionMeta { int32 type = 3; bool failed = 4; int64 spent_complexity = 5; + int32 size = 6; } message EthereumTransactionMeta { @@ -80,6 +83,7 @@ message StaticAssetInfo { bool is_nft = 4; int32 sequence_in_block = 5; int32 height = 6; + bytes id = 7; } message DataEntry { diff --git a/node/src/main/resources/application.conf b/node/src/main/resources/application.conf index e11f4e1ec08..e485e5193e2 100644 --- a/node/src/main/resources/application.conf +++ b/node/src/main/resources/application.conf @@ -23,6 +23,14 @@ waves { remember-blocks = 3h use-bloom-filter = false + + rocksdb { + main-cache-size = 512M + tx-cache-size = 16M + tx-meta-cache-size = 16M + write-buffer-size = 128M + enable-statistics = false + } } # NTP server diff --git a/node/src/main/scala/com/wavesplatform/Application.scala b/node/src/main/scala/com/wavesplatform/Application.scala index 9eea5e835ae..80b0eb5e547 100644 --- a/node/src/main/scala/com/wavesplatform/Application.scala +++ b/node/src/main/scala/com/wavesplatform/Application.scala @@ -1,5 +1,10 @@ package com.wavesplatform +import java.io.File +import java.security.Security +import java.util.concurrent.atomic.AtomicBoolean +import java.util.concurrent.{TimeUnit, *} + import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.Http.ServerBinding @@ -7,7 +12,7 @@ import cats.Eq import cats.instances.bigInt.* import cats.syntax.option.* import com.typesafe.config.* -import com.wavesplatform.account.{Address, AddressScheme} +import com.wavesplatform.account.AddressScheme import com.wavesplatform.actor.RootActorSystem import com.wavesplatform.api.BlockMeta import com.wavesplatform.api.common.* @@ -19,7 +24,7 @@ import com.wavesplatform.api.http.leasing.LeaseApiRoute import com.wavesplatform.api.http.utils.UtilsApiRoute import com.wavesplatform.common.state.ByteStr import com.wavesplatform.consensus.PoSSelector -import com.wavesplatform.database.{DBExt, Keys, openDB} +import com.wavesplatform.database.{DBExt, Keys, RDB} import com.wavesplatform.events.{BlockchainUpdateTriggers, UtxEvent} import com.wavesplatform.extensions.{Context, Extension} import com.wavesplatform.features.EstimatorProvider.* @@ -34,7 +39,7 @@ import com.wavesplatform.state.appender.{BlockAppender, ExtensionAppender, Micro import com.wavesplatform.state.{Blockchain, BlockchainUpdaterImpl, Diff, Height, TxMeta} import com.wavesplatform.transaction.TxValidationError.GenericError import com.wavesplatform.transaction.smart.script.trace.TracedResult -import com.wavesplatform.transaction.{Asset, DiscardedBlocks, Transaction} +import com.wavesplatform.transaction.{DiscardedBlocks, Transaction} import com.wavesplatform.utils.* import com.wavesplatform.utils.Schedulers.* import com.wavesplatform.utx.{UtxPool, UtxPoolImpl} @@ -47,17 +52,13 @@ import kamon.Kamon import kamon.instrumentation.executor.ExecutorInstrumentation import monix.eval.{Coeval, Task} import monix.execution.schedulers.{ExecutorScheduler, SchedulerService} -import monix.execution.{ExecutionModel, Scheduler, UncaughtExceptionReporter} +import monix.execution.{Scheduler, UncaughtExceptionReporter} import monix.reactive.Observable import monix.reactive.subjects.ConcurrentSubject import org.influxdb.dto.Point -import org.iq80.leveldb.DB +import org.rocksdb.RocksDB import org.slf4j.LoggerFactory -import java.io.File -import java.security.Security -import java.util.concurrent.atomic.AtomicBoolean -import java.util.concurrent.{TimeUnit, *} import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration.* import scala.concurrent.{Await, Future} @@ -69,9 +70,7 @@ class Application(val actorSystem: ActorSystem, val settings: WavesSettings, con import Application.* import monix.execution.Scheduler.Implicits.global as scheduler - private[this] val db = openDB(settings.dbSettings.directory) - - private[this] val spendableBalanceChanged = ConcurrentSubject.publish[(Address, Asset)] + private[this] val rdb = RDB.open(settings.dbSettings) private[this] lazy val upnp = new UPnP(settings.networkSettings.uPnPSettings) // don't initialize unless enabled @@ -102,8 +101,8 @@ class Application(val actorSystem: ActorSystem, val settings: WavesSettings, con private var triggers = Seq.empty[BlockchainUpdateTriggers] private[this] var miner: Miner & MinerDebugInfo = Miner.Disabled - private[this] val (blockchainUpdater, levelDB) = - StorageFactory(settings, db, time, spendableBalanceChanged, BlockchainUpdateTriggers.combined(triggers), bc => miner.scheduleMining(bc)) + private[this] val (blockchainUpdater, rocksDB) = + StorageFactory(settings, rdb, time, BlockchainUpdateTriggers.combined(triggers), bc => miner.scheduleMining(bc)) @volatile private[this] var maybeUtx: Option[UtxPool] = None @@ -179,7 +178,7 @@ class Application(val actorSystem: ActorSystem, val settings: WavesSettings, con allChannels.broadcast(LocalScoreChanged(x)) }(scheduler) - val history = History(blockchainUpdater, blockchainUpdater.liquidBlock, blockchainUpdater.microBlock, db) + val history = History(blockchainUpdater, blockchainUpdater.liquidBlock, blockchainUpdater.microBlock, rdb) val historyReplier = new HistoryReplier(blockchainUpdater.score, history, settings.synchronizationSettings)(historyRepliesScheduler) @@ -218,23 +217,23 @@ class Application(val actorSystem: ActorSystem, val settings: WavesSettings, con override def utx: UtxPool = utxStorage override def broadcastTransaction(tx: Transaction): TracedResult[ValidationError, Boolean] = Await.result(transactionPublisher.validateAndBroadcast(tx, None), Duration.Inf) // TODO: Replace with async if possible - override def spendableBalanceChanged: Observable[(Address, Asset)] = app.spendableBalanceChanged - override def actorSystem: ActorSystem = app.actorSystem - override def utxEvents: Observable[UtxEvent] = app.utxEvents + override def actorSystem: ActorSystem = app.actorSystem + override def utxEvents: Observable[UtxEvent] = app.utxEvents override val transactionsApi: CommonTransactionsApi = CommonTransactionsApi( blockchainUpdater.bestLiquidDiff.map(diff => Height(blockchainUpdater.height) -> diff), - db, + rdb, blockchainUpdater, utxStorage, tx => transactionPublisher.validateAndBroadcast(tx, None), - loadBlockAt(db, blockchainUpdater) + loadBlockAt(rdb, blockchainUpdater) ) override val blocksApi: CommonBlocksApi = - CommonBlocksApi(blockchainUpdater, loadBlockMetaAt(db, blockchainUpdater), loadBlockInfoAt(db, blockchainUpdater)) + CommonBlocksApi(blockchainUpdater, loadBlockMetaAt(rdb.db, blockchainUpdater), loadBlockInfoAt(rdb, blockchainUpdater)) override val accountsApi: CommonAccountsApi = - CommonAccountsApi(() => blockchainUpdater.bestLiquidDiff.getOrElse(Diff.empty), db, blockchainUpdater) - override val assetsApi: CommonAssetsApi = CommonAssetsApi(() => blockchainUpdater.bestLiquidDiff.getOrElse(Diff.empty), db, blockchainUpdater) + CommonAccountsApi(() => blockchainUpdater.getCompositeBlockchain, rdb, blockchainUpdater) + override val assetsApi: CommonAssetsApi = + CommonAssetsApi(() => blockchainUpdater.bestLiquidDiff.getOrElse(Diff.empty), rdb.db, blockchainUpdater) } extensions = settings.extensions.map { extensionClassName => @@ -318,13 +317,6 @@ class Application(val actorSystem: ActorSystem, val settings: WavesSettings, con // API start if (settings.restAPISettings.enable) { - def loadBalanceHistory(address: Address): Seq[(Int, Long)] = db.readOnly { rdb => - rdb.get(Keys.addressId(address)).fold(Seq.empty[(Int, Long)]) { aid => - rdb.get(Keys.wavesBalanceHistory(aid)).map { h => - h -> rdb.get(Keys.wavesBalance(aid)(h)) - } - } - } val limitedScheduler = Schedulers.timeBoundedFixedPool( @@ -352,8 +344,7 @@ class Application(val actorSystem: ActorSystem, val settings: WavesSettings, con val heavyRequestScheduler = Scheduler( if (settings.config.getBoolean("kamon.enable")) ExecutorInstrumentation.instrument(heavyRequestExecutor, "heavy-request-executor") - else heavyRequestExecutor, - ExecutionModel.AlwaysAsyncExecution + else heavyRequestExecutor ) val routeTimeout = new RouteTimeout( @@ -369,6 +360,7 @@ class Application(val actorSystem: ActorSystem, val settings: WavesSettings, con extensionContext.transactionsApi, wallet, blockchainUpdater, + () => blockchainUpdater.getCompositeBlockchain, () => utxStorage.size, transactionPublisher, time, @@ -413,8 +405,8 @@ class Application(val actorSystem: ActorSystem, val settings: WavesSettings, con mbSyncCacheSizes, scoreStatsReporter, configRoot, - loadBalanceHistory, - levelDB.loadStateHash, + rocksDB.loadBalanceHistory, + rocksDB.loadStateHash, () => utxStorage.priorityPool.compositeBlockchain, routeTimeout, heavyRequestScheduler @@ -424,6 +416,7 @@ class Application(val actorSystem: ActorSystem, val settings: WavesSettings, con wallet, transactionPublisher, blockchainUpdater, + () => blockchainUpdater.getCompositeBlockchain, time, extensionContext.accountsApi, extensionContext.assetsApi, @@ -475,7 +468,6 @@ class Application(val actorSystem: ActorSystem, val settings: WavesSettings, con def shutdown(): Unit = if (shutdownInProgress.compareAndSet(false, true)) { - spendableBalanceChanged.onComplete() maybeUtx.foreach(_.close()) log.info("Closing REST API") @@ -499,8 +491,7 @@ class Application(val actorSystem: ActorSystem, val settings: WavesSettings, con shutdownAndWait(appenderScheduler, "Appender", 5.minutes.some) log.info("Closing storage") - levelDB.close() - db.close() + rdb.close() // extensions should be shut down last, after all node functionality, to guarantee no data loss if (extensions.nonEmpty) { @@ -582,26 +573,24 @@ object Application extends ScorexLogging { settings } - private[wavesplatform] def loadBlockAt(db: DB, blockchainUpdater: BlockchainUpdaterImpl)( + private[wavesplatform] def loadBlockAt(rdb: RDB, blockchainUpdater: BlockchainUpdaterImpl)( height: Int ): Option[(BlockMeta, Seq[(TxMeta, Transaction)])] = - loadBlockInfoAt(db, blockchainUpdater)(height) + loadBlockInfoAt(rdb, blockchainUpdater)(height) - private[wavesplatform] def loadBlockInfoAt(db: DB, blockchainUpdater: BlockchainUpdaterImpl)( + private[wavesplatform] def loadBlockInfoAt(rdb: RDB, blockchainUpdater: BlockchainUpdaterImpl)( height: Int ): Option[(BlockMeta, Seq[(TxMeta, Transaction)])] = - loadBlockMetaAt(db, blockchainUpdater)(height).map { meta => + loadBlockMetaAt(rdb.db, blockchainUpdater)(height).map { meta => meta -> blockchainUpdater .liquidTransactions(meta.id) - .getOrElse(db.readOnly(ro => database.loadTransactions(Height(height), ro))) + .getOrElse(database.loadTransactions(Height(height), rdb)) } - private[wavesplatform] def loadBlockMetaAt(db: DB, blockchainUpdater: BlockchainUpdaterImpl)(height: Int): Option[BlockMeta] = { - val result = blockchainUpdater.liquidBlockMeta + private[wavesplatform] def loadBlockMetaAt(db: RocksDB, blockchainUpdater: BlockchainUpdaterImpl)(height: Int): Option[BlockMeta] = + blockchainUpdater.liquidBlockMeta .filter(_ => blockchainUpdater.height == height) - .orElse(db.get(Keys.blockMetaAt(Height(height)))) - result - } + .orElse(db.get(Keys.blockMetaAt(Height(height))).flatMap(BlockMeta.fromPb)) def main(args: Array[String]): Unit = { diff --git a/node/src/main/scala/com/wavesplatform/Explorer.scala b/node/src/main/scala/com/wavesplatform/Explorer.scala index 6ccb56617d0..419ea1d27eb 100644 --- a/node/src/main/scala/com/wavesplatform/Explorer.scala +++ b/node/src/main/scala/com/wavesplatform/Explorer.scala @@ -3,35 +3,45 @@ package com.wavesplatform import java.io.File import java.nio.ByteBuffer import java.util +import com.google.common.hash.{Funnels, BloomFilter as GBloomFilter} +import com.google.common.math.StatsAccumulator import com.google.common.primitives.Longs import com.wavesplatform.account.Address -import com.wavesplatform.api.common.AddressPortfolio +import com.wavesplatform.api.common.{AddressPortfolio, CommonAccountsApi} import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.{Base58, Base64, EitherExt2} import com.wavesplatform.database.* +import com.wavesplatform.database.protobuf.StaticAssetInfo import com.wavesplatform.lang.script.ContractScript import com.wavesplatform.lang.script.v1.ExprScript import com.wavesplatform.settings.Constants import com.wavesplatform.state.diffs.{DiffsCommon, SetScriptTransactionDiff} -import com.wavesplatform.state.{Blockchain, Diff, Height, Portfolio} +import com.wavesplatform.state.reader.CompositeBlockchain +import com.wavesplatform.state.{Blockchain, Diff, Height, Portfolio, TransactionId} import com.wavesplatform.transaction.Asset.IssuedAsset import com.wavesplatform.utils.ScorexLogging -import org.iq80.leveldb.DB +import monix.execution.{ExecutionModel, Scheduler} +import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics +import org.rocksdb.RocksDB +import play.api.libs.json.Json import scala.annotation.tailrec import scala.collection.immutable.VectorMap import scala.collection.mutable +import scala.concurrent.duration.Duration +import scala.concurrent.{Await, Future} import scala.jdk.CollectionConverters.* +import scala.util.Using //noinspection ScalaStyle object Explorer extends ScorexLogging { case class Stats(entryCount: Long, totalKeySize: Long, totalValueSize: Long) - def portfolio(db: DB, blockchain: Blockchain, address: Address): Portfolio = + def portfolio(db: RocksDB, blockchain: Blockchain, address: Address): Portfolio = Portfolio( blockchain.balance(address), blockchain.leaseBalance(address), - db.withResource(r => AddressPortfolio.assetBalanceIterator(r, address, Diff.empty, _ => true).to(VectorMap)) + db.withResource(r => AddressPortfolio.assetBalanceIterator(r, address, Diff.empty, _ => true).flatten.to(VectorMap)) ) def main(argsRaw: Array[String]): Unit = { @@ -60,12 +70,26 @@ object Explorer extends ScorexLogging { log.info(s"Data directory: ${settings.dbSettings.directory}") - val db = openDB(settings.dbSettings.directory) - val reader = LevelDBWriter.readOnly(db, settings) + val rdb = RDB.open(settings.dbSettings) + val reader = new RocksDBWriter(rdb, settings.blockchainSettings, settings.dbSettings) val blockchainHeight = reader.height log.info(s"Blockchain height is $blockchainHeight") try { + def loadBalanceHistory(curBalanceKey: Key[CurrentBalance], balanceNodeKey: Height => Key[BalanceNode]): Seq[(Int, Long)] = rdb.db.readOnly { + db => + @tailrec + def getPrevBalances(height: Height, acc: Seq[(Int, Long)]): Seq[(Int, Long)] = { + if (height > 0) { + val balance = rdb.db.get(balanceNodeKey(height)) + getPrevBalances(balance.prevHeight, (height, balance.balance) +: acc) + } else acc + } + + val currentBalance = db.get(curBalanceKey) + (currentBalance.height, currentBalance.balance) +: getPrevBalances(currentBalance.prevHeight, Seq.empty).reverse + } + @inline def argument(i: Int, msg: => String) = args.applyOrElse(i, (_: Int) => throw new IllegalArgumentException(s"Argument #${i + 1} missing: $msg")) val flag = argument(0, "command").toUpperCase @@ -73,14 +97,14 @@ object Explorer extends ScorexLogging { flag match { case "WB" => val balances = mutable.Map[BigInt, Long]() - db.iterateOver(KeyTags.WavesBalance) { e => + rdb.db.iterateOver(KeyTags.WavesBalance) { e => val addressId = BigInt(e.getKey.drop(6)) val balance = Longs.fromByteArray(e.getValue) balances += (addressId -> balance) } var actualTotalReward = 0L - db.iterateOver(KeyTags.BlockReward) { e => + rdb.db.iterateOver(KeyTags.BlockReward) { e => actualTotalReward += Longs.fromByteArray(e.getValue) } @@ -98,7 +122,7 @@ object Explorer extends ScorexLogging { case "DA" => val addressIds = mutable.Seq[(BigInt, Address)]() - db.iterateOver(KeyTags.AddressId) { e => + rdb.db.iterateOver(KeyTags.AddressId) { e => val address = Address.fromBytes(e.getKey.drop(2), settings.blockchainSettings.addressSchemeCharacter.toByte) val addressId = BigInt(e.getValue) addressIds :+ (addressId -> address) @@ -114,58 +138,63 @@ object Explorer extends ScorexLogging { val maybeBlockId = Base58.tryDecodeWithLimit(argument(1, "block id")).toOption.map(ByteStr.apply) if (maybeBlockId.isDefined) { val kBlockHeight = Keys.heightOf(maybeBlockId.get) - val blockHeightBytes = db.get(kBlockHeight.keyBytes) + val blockHeightBytes = rdb.db.get(kBlockHeight.keyBytes) val maybeBlockHeight = kBlockHeight.parse(blockHeightBytes) maybeBlockHeight.foreach { h => val kBlock = Keys.blockInfoBytesAt(Height(h)) - val blockBytes = db.get(kBlock.keyBytes) + val blockBytes = rdb.db.get(kBlock.keyBytes) log.info(s"BlockId=${maybeBlockId.get} at h=$h: ${Base64.encode(blockBytes)}") } } else log.error("No block ID was provided") case "O" => + def loadVfHistory(orderId: ByteStr): Seq[(Int, Long, Long)] = { + @tailrec + def getPrevVfs(height: Height, acc: Seq[(Int, Long, Long)]): Seq[(Int, Long, Long)] = { + if (height > 0) { + val vf = rdb.db.get(Keys.filledVolumeAndFeeAt(orderId, height)) + getPrevVfs(vf.prevHeight, (height, vf.volume, vf.fee) +: acc) + } else acc + } + + val currentVf = rdb.db.get(Keys.filledVolumeAndFee(orderId)) + (currentVf.height, currentVf.volume, currentVf.fee) +: getPrevVfs(currentVf.prevHeight, Seq.empty).reverse + } + val orderId = Base58.tryDecodeWithLimit(argument(1, "order id")).toOption.map(ByteStr.apply) if (orderId.isDefined) { - val kVolumeAndFee = Keys.filledVolumeAndFee(orderId.get)(blockchainHeight) - val bytes1 = db.get(kVolumeAndFee.keyBytes) + val kVolumeAndFee = Keys.filledVolumeAndFeeAt(orderId.get, Height(blockchainHeight)) + val bytes1 = rdb.db.get(kVolumeAndFee.keyBytes) val v = kVolumeAndFee.parse(bytes1) log.info(s"OrderId = ${Base58.encode(orderId.get.arr)}: Volume = ${v.volume}, Fee = ${v.fee}") - val kVolumeAndFeeHistory = Keys.filledVolumeAndFeeHistory(orderId.get) - val bytes2 = db.get(kVolumeAndFeeHistory.keyBytes) - val value2 = kVolumeAndFeeHistory.parse(bytes2) - val value2Str = value2.mkString("[", ", ", "]") - log.info(s"OrderId = ${Base58.encode(orderId.get.arr)}: History = $value2Str") - value2.foreach { h => - val k = Keys.filledVolumeAndFee(orderId.get)(h) - val v = k.parse(db.get(k.keyBytes)) - log.info(s"\t h = $h: Volume = ${v.volume}, Fee = ${v.fee}") + val vfHistory = loadVfHistory(orderId.get) + val heights = vfHistory.map(_._1) + val heightsStr = heights.mkString("[", ", ", "]") + log.info(s"OrderId = ${Base58.encode(orderId.get.arr)}: History = $heightsStr") + vfHistory.foreach { case (h, volume, fee) => + log.info(s"\t h = $h: Volume = $volume, Fee = $fee") } } else log.error("No order ID was provided") case "A" => val address = Address.fromString(argument(1, "address")).explicitGet() val aid = Keys.addressId(address) - val addressId = aid.parse(db.get(aid.keyBytes)).get + val addressId = aid.parse(rdb.db.get(aid.keyBytes)).get log.info(s"Address id = $addressId") - val kwbh = Keys.wavesBalanceHistory(addressId) - val wbh = kwbh.parse(db.get(kwbh.keyBytes)) - - val balances = wbh.map { h => - val k = Keys.wavesBalance(addressId)(h) - h -> k.parse(db.get(k.keyBytes)) + loadBalanceHistory(Keys.wavesBalance(addressId), Keys.wavesBalanceAt(addressId, _)).foreach { case (h, balance) => + log.info(s"h = $h: balance = $balance") } - balances.foreach(b => log.info(s"h = ${b._1}: balance = ${b._2}")) case "AC" => - val lastAddressId = Keys.lastAddressId.parse(db.get(Keys.lastAddressId.keyBytes)) + val lastAddressId = Keys.lastAddressId.parse(rdb.db.get(Keys.lastAddressId.keyBytes)) log.info(s"Last address id: $lastAddressId") case "AD" => val result = new util.HashMap[Address, java.lang.Integer]() - db.iterateOver(KeyTags.IdToAddress) { e => + rdb.db.iterateOver(KeyTags.IdToAddress) { e => result.compute( Address.fromBytes(e.getValue).explicitGet(), (_, prev) => @@ -184,34 +213,31 @@ object Explorer extends ScorexLogging { val address = Address.fromString(argument(1, "address")).explicitGet() val asset = IssuedAsset(ByteStr.decodeBase58(argument(2, "asset")).get) val ai = Keys.addressId(address) - val addressId = ai.parse(db.get(ai.keyBytes)).get + val addressId = ai.parse(rdb.db.get(ai.keyBytes)).get log.info(s"Address ID = $addressId") - val kabh = Keys.assetBalanceHistory(addressId, asset) - val abh = kabh.parse(db.get(kabh.keyBytes)) - - val balances = abh.map { h => - val k = Keys.assetBalance(addressId, asset)(h) - h -> k.parse(db.get(k.keyBytes)) + loadBalanceHistory(Keys.assetBalance(addressId, asset), Keys.assetBalanceAt(addressId, asset, _)).foreach { case (h, balance) => + log.info(s"h = $h: balance = $balance") } - balances.foreach(b => log.info(s"h = ${b._1}: balance = ${b._2}")) case "S" => log.info("Collecting DB stats") - val iterator = db.iterator() + val iterator = rdb.db.newIterator() val result = new util.HashMap[Short, Stats] iterator.seekToFirst() - while (iterator.hasNext) { - val entry = iterator.next() - val keyPrefix = ByteBuffer.wrap(entry.getKey).getShort + while (iterator.isValid) { + val keyPrefix = ByteBuffer.wrap(iterator.key()).getShort + val valueLength = iterator.value().length + val keyLength = iterator.key().length result.compute( keyPrefix, (_, maybePrev) => maybePrev match { - case null => Stats(1, entry.getKey.length, entry.getValue.length) - case prev => Stats(prev.entryCount + 1, prev.totalKeySize + entry.getKey.length, prev.totalValueSize + entry.getValue.length) + case null => Stats(1, keyLength, valueLength) + case prev => Stats(prev.entryCount + 1, prev.totalKeySize + keyLength, prev.totalValueSize + valueLength) } ) + iterator.next() } iterator.close() @@ -222,48 +248,41 @@ object Explorer extends ScorexLogging { case "TXBH" => val h = Height(argument(1, "height").toInt) - val txs = db.readOnly(loadTransactions(h, _)) + val txs = loadTransactions(h, rdb) println(txs.length) txs.foreach { case (_, tx) => println(tx) } case "AP" => val address = Address.fromString(argument(1, "address")).explicitGet() - val pf = portfolio(db, reader, address) + val pf = portfolio(rdb.db, reader, address) log.info(s"$address : ${pf.balance} WAVES, ${pf.lease}, ${pf.assets.size} assets") - pf.assets.toSeq.sortBy(_._1.toString) foreach { - case (assetId, balance) => log.info(s"$assetId : $balance") + pf.assets.toSeq.sortBy(_._1.toString) foreach { case (assetId, balance) => + log.info(s"$assetId : $balance") } - case "HS" => - val height = argument(1, "height").toInt - val hitSourceKey = Keys.hitSource(height) - val hitSource = db.get(hitSourceKey.keyBytes) - log.info(s"HitSource at height=$height: ${Base64.encode(hitSource)}") - case "OC" => log.info("Counting orders") var counter = 0L - db.iterateOver(KeyTags.FilledVolumeAndFeeHistory) { _ => + rdb.db.iterateOver(KeyTags.FilledVolumeAndFeeHistory) { _ => counter += 1 } log.info(s"Found $counter orders") case "CAT" => log.info(s"Counting address transactions") - val addressCount = db.get(Keys.lastAddressId).get.toInt + val addressCount = rdb.db.get(Keys.lastAddressId).get.toInt log.info(s"Processing $addressCount addresses") val txCounts = new Array[Int](addressCount + 1) - db.iterateOver(KeyTags.AddressTransactionHeightTypeAndNums) { e => + rdb.db.iterateOver(KeyTags.AddressTransactionHeightTypeAndNums) { e => txCounts(Longs.fromByteArray(e.getKey.slice(2, 10)).toInt) += readTransactionHNSeqAndType(e.getValue)._2.size } log.info("Sorting result") - txCounts.zipWithIndex.sorted.takeRight(100).foreach { - case (count, id) => - log.info(s"${db.get(Keys.idToAddress(AddressId(id.toLong)))}: $count") + txCounts.zipWithIndex.sorted.takeRight(100).foreach { case (count, id) => + log.info(s"${rdb.db.get(Keys.idToAddress(AddressId(id.toLong)))}: $count") } case "ES" => - db.iterateOver(KeyTags.AddressScript) { e => + rdb.db.iterateOver(KeyTags.AddressScript) { e => val asi = readAccountScriptInfo(e.getValue) val estimationResult = asi.script match { case ContractScript.ContractScriptImpl(stdLibVersion, expr) => @@ -275,7 +294,7 @@ object Explorer extends ScorexLogging { estimationResult.left.foreach { error => val addressId = Longs.fromByteArray(e.getKey.drop(2).dropRight(4)) - val address = db.get(Keys.idToAddress(AddressId(addressId))) + val address = rdb.db.get(Keys.idToAddress(AddressId(addressId))) log.info(s"$address: $error") } } @@ -284,9 +303,9 @@ object Explorer extends ScorexLogging { val PrefixLength = argument(1, "prefix").toInt var prevAssetId = Array.emptyByteArray var assetCounter = 0 - db.iterateOver(KeyTags.AssetStaticInfo) { e => + rdb.db.iterateOver(KeyTags.AssetStaticInfo) { e => assetCounter += 1 - val thisAssetId = e.getKey.drop(2) + val thisAssetId = StaticAssetInfo.parseFrom(e.getValue).id.toByteArray if (prevAssetId.nonEmpty) { var counter = 0 while (counter < PrefixLength && prevAssetId(counter) == thisAssetId(counter)) counter += 1 @@ -297,7 +316,79 @@ object Explorer extends ScorexLogging { prevAssetId = thisAssetId } log.info(s"Checked $assetCounter asset(s)") + + case "LDT" => + val s = Scheduler.fixedPool("foo-bar", 8, executionModel = ExecutionModel.AlwaysAsyncExecution) + + def countEntries(): Future[Long] = { + CommonAccountsApi(() => CompositeBlockchain(reader, Diff.empty), rdb, reader) + .dataStream(Address.fromString("3PC9BfRwJWWiw9AREE2B3eWzCks3CYtg4yo").explicitGet(), None) + .countL + .runToFuture(s) + } + + import scala.concurrent.ExecutionContext.Implicits.global + + println( + Await.result( + Future.sequence(Seq.fill(16)(countEntries())), + Duration.Inf + ) + ) + + case "DDD" => + log.info(s"Collecting addresses") + var count = 0L + rdb.db.iterateOver(KeyTags.AddressId) { _ => + count += 1 + } + log.info(s"Found $count addresses") + case "TC" => + val bf = GBloomFilter.create[Array[Byte]](Funnels.byteArrayFunnel(), 200_000_000L) + log.info("Counting transactions") + var count = 0L + rdb.db.iterateOver(KeyTags.TransactionMetaById, Some(rdb.txMetaHandle.handle)) { e => + bf.put(e.getKey.drop(2)) + count += 1 + } + log.info(s"Found $count transactions") + case "SH" => + val targetHeight = argument(1, "height").toInt + log.info(s"Loading state hash at $targetHeight") + rdb.db.get(Keys.stateHash(targetHeight)).foreach { sh => + println(Json.toJson(sh).toString()) + } + case "BSD" => + log.info("Collecting block size distribution") + val sa = new StatsAccumulator + val ds = new DescriptiveStatistics() + rdb.db.iterateOver(KeyTags.BlockInfoAtHeight) { e => + val size = readBlockMeta(e.getValue).size + sa.add(size) + ds.addValue(size) + } + log.info(s"${sa.snapshot()}") + log.info(s"${ds.toString}") + case "CTI" => + log.info("Counting transaction IDs") + var counter = 0 + Using(rdb.db.newIterator(rdb.txMetaHandle.handle)) { iter => + iter.seekToFirst() +// iter.seek(KeyTags.TransactionMetaById.prefixBytes) + log.info(iter.key().mkString(",")) + while (iter.isValid && iter.key().startsWith(KeyTags.TransactionMetaById.prefixBytes)) { + counter += 1 + iter.next() + } + } + log.info(s"Found $counter transaction IDs") + case "TXM" => + log.info(s"TxMeta column family: ${new String(rdb.txMetaHandle.handle.getName)}/${rdb.txMetaHandle.handle.getID}") + val id = argument(1, "id") + log.info(s"Load meta for $id") + val meta = rdb.db.get(Keys.transactionMetaById(TransactionId(ByteStr.decodeBase58(id).get), rdb.txMetaHandle)) + log.info(s"Meta: $meta") } - } finally db.close() + } finally rdb.close() } } diff --git a/node/src/main/scala/com/wavesplatform/Exporter.scala b/node/src/main/scala/com/wavesplatform/Exporter.scala index ba572002bbc..d04124924c0 100644 --- a/node/src/main/scala/com/wavesplatform/Exporter.scala +++ b/node/src/main/scala/com/wavesplatform/Exporter.scala @@ -3,7 +3,7 @@ package com.wavesplatform import java.io.{BufferedOutputStream, File, FileOutputStream, OutputStream} import com.google.common.primitives.Ints import com.wavesplatform.block.Block -import com.wavesplatform.database.{DBExt, openDB} +import com.wavesplatform.database.RDB import com.wavesplatform.events.BlockchainUpdateTriggers import com.wavesplatform.history.StorageFactory import com.wavesplatform.metrics.Metrics @@ -11,8 +11,6 @@ import com.wavesplatform.protobuf.block.PBBlocks import com.wavesplatform.state.Height import com.wavesplatform.utils.* import kamon.Kamon -import monix.execution.UncaughtExceptionReporter -import monix.reactive.Observer import scopt.OParser import scala.concurrent.Await @@ -36,15 +34,13 @@ object Exporter extends ScorexLogging { // noinspection ScalaStyle def main(args: Array[String]): Unit = { OParser.parse(commandParser, args, ExporterOptions()).foreach { case ExporterOptions(configFile, outputFileNamePrefix, exportHeight, format) => - implicit val reporter: UncaughtExceptionReporter = UncaughtExceptionReporter.default - val settings = Application.loadApplicationConfig(configFile) Using.resources( new NTP(settings.ntpServer), - openDB(settings.dbSettings.directory) - ) { (time, db) => - val (blockchain, _) = StorageFactory(settings, db, time, Observer.empty, BlockchainUpdateTriggers.noop) + RDB.open(settings.dbSettings) + ) { (time, rdb) => + val (blockchain, _) = StorageFactory(settings, rdb, time, BlockchainUpdateTriggers.noop) val blockchainHeight = blockchain.height val height = Math.min(blockchainHeight, exportHeight.getOrElse(blockchainHeight)) log.info(s"Blockchain height is $blockchainHeight exporting to $height") @@ -64,7 +60,7 @@ object Exporter extends ScorexLogging { val start = System.currentTimeMillis() exportedBytes += IO.writeHeader(bos, format) (2 to height).foreach { h => - val block = db.readOnly(ro => database.loadBlock(Height(h), ro)) + val block = database.loadBlock(Height(h), rdb) exportedBytes += (if (format == "JSON") IO.exportBlockToJson(bos, block, h) else IO.exportBlockToBinary(bos, block, format == Formats.Binary)) if (h % (height / 10) == 0) diff --git a/node/src/main/scala/com/wavesplatform/Importer.scala b/node/src/main/scala/com/wavesplatform/Importer.scala index 9f540b8fb1c..2f37ad08066 100644 --- a/node/src/main/scala/com/wavesplatform/Importer.scala +++ b/node/src/main/scala/com/wavesplatform/Importer.scala @@ -1,46 +1,46 @@ package com.wavesplatform +import java.io.* +import java.net.{MalformedURLException, URL} import akka.actor.ActorSystem import com.google.common.io.ByteStreams import com.google.common.primitives.Ints import com.wavesplatform.Exporter.Formats -import com.wavesplatform.account.Address import com.wavesplatform.api.common.{CommonAccountsApi, CommonAssetsApi, CommonBlocksApi, CommonTransactionsApi} import com.wavesplatform.block.{Block, BlockHeader} import com.wavesplatform.common.state.ByteStr import com.wavesplatform.consensus.PoSSelector -import com.wavesplatform.database.{DBExt, KeyTags, openDB} +import com.wavesplatform.database.{DBExt, KeyTags, RDB} import com.wavesplatform.events.{BlockchainUpdateTriggers, UtxEvent} import com.wavesplatform.extensions.{Context, Extension} import com.wavesplatform.features.BlockchainFeatures import com.wavesplatform.history.StorageFactory import com.wavesplatform.lang.ValidationError import com.wavesplatform.mining.Miner -import com.wavesplatform.protobuf.block.PBBlocks +import com.wavesplatform.protobuf.block.{PBBlocks, VanillaBlock} import com.wavesplatform.settings.WavesSettings +import com.wavesplatform.state.ParSignatureChecker.sigverify import com.wavesplatform.state.appender.BlockAppender -import com.wavesplatform.state.{Blockchain, BlockchainUpdaterImpl, Diff, Height} +import com.wavesplatform.state.{Blockchain, BlockchainUpdaterImpl, Diff, Height, ParSignatureChecker} import com.wavesplatform.transaction.TxValidationError.GenericError import com.wavesplatform.transaction.smart.script.trace.TracedResult -import com.wavesplatform.transaction.{Asset, DiscardedBlocks, Transaction} +import com.wavesplatform.transaction.{DiscardedBlocks, Transaction} import com.wavesplatform.utils.* import com.wavesplatform.utx.{UtxPool, UtxPoolImpl} import com.wavesplatform.wallet.Wallet import kamon.Kamon import monix.eval.Task import monix.execution.Scheduler -import monix.reactive.{Observable, Observer} -import org.iq80.leveldb.DB +import monix.reactive.Observable import scopt.OParser -import java.io.* -import java.net.{MalformedURLException, URL} +import scala.annotation.tailrec +import scala.collection.mutable import scala.concurrent.duration.* import scala.concurrent.{Await, Future} import scala.util.{Failure, Success, Try} object Importer extends ScorexLogging { - import monix.execution.Scheduler.Implicits.global type AppendBlock = Block => Task[Either[ValidationError, Option[BigInt]]] @@ -50,7 +50,8 @@ object Importer extends ScorexLogging { importHeight: Int = Int.MaxValue, format: String = Formats.Binary, verify: Boolean = true, - dryRun: Boolean = false + dryRun: Boolean = false, + maxQueueSize: Int = 100 ) def parseOptions(args: Array[String]): ImportOptions = { @@ -87,6 +88,10 @@ object Importer extends ScorexLogging { opt[Unit]('n', "no-verify") .text("Disable signatures verification") .action((_, c) => c.copy(verify = false)), + opt[Int]('q', "max-queue-size") + .text("Max size of blocks' queue") + .action((maxSize, c) => c.copy(maxQueueSize = maxSize)) + .validate(maxSize => if (maxSize > 0) success else failure("Max blocks' queue size must be > 0")), help("help").hidden() ) } @@ -109,7 +114,7 @@ object Importer extends ScorexLogging { appenderScheduler: Scheduler, extensionTime: Time, utxPool: UtxPool, - db: DB, + rdb: RDB, extensionActorSystem: ActorSystem ): Seq[Extension] = if (wavesSettings.extensions.isEmpty) Seq.empty @@ -126,24 +131,27 @@ object Importer extends ScorexLogging { override def broadcastTransaction(tx: Transaction): TracedResult[ValidationError, Boolean] = TracedResult.wrapE(Left(GenericError("Not implemented during import"))) - override def spendableBalanceChanged: Observable[(Address, Asset)] = Observable.empty - override def actorSystem: ActorSystem = extensionActorSystem - override def utxEvents: Observable[UtxEvent] = Observable.empty + override def actorSystem: ActorSystem = extensionActorSystem + override def utxEvents: Observable[UtxEvent] = Observable.empty override def transactionsApi: CommonTransactionsApi = CommonTransactionsApi( blockchainUpdater.bestLiquidDiff.map(diff => Height(blockchainUpdater.height) -> diff), - db, + rdb, blockchainUpdater, utxPool, _ => Future.successful(TracedResult.wrapE(Left(GenericError("Not implemented during import")))), - Application.loadBlockAt(db, blockchainUpdater) + Application.loadBlockAt(rdb, blockchainUpdater) ) override def blocksApi: CommonBlocksApi = - CommonBlocksApi(blockchainUpdater, Application.loadBlockMetaAt(db, blockchainUpdater), Application.loadBlockInfoAt(db, blockchainUpdater)) + CommonBlocksApi( + blockchainUpdater, + Application.loadBlockMetaAt(rdb.db, blockchainUpdater), + Application.loadBlockInfoAt(rdb, blockchainUpdater) + ) override def accountsApi: CommonAccountsApi = - CommonAccountsApi(() => blockchainUpdater.bestLiquidDiff.getOrElse(Diff.empty), db, blockchainUpdater) + CommonAccountsApi(() => blockchainUpdater.getCompositeBlockchain, rdb, blockchainUpdater) override def assetsApi: CommonAssetsApi = - CommonAssetsApi(() => blockchainUpdater.bestLiquidDiff.getOrElse(Diff.empty), db, blockchainUpdater) + CommonAssetsApi(() => blockchainUpdater.bestLiquidDiff.getOrElse(Diff.empty), rdb.db, blockchainUpdater) } } @@ -174,7 +182,8 @@ object Importer extends ScorexLogging { blockchain: Blockchain, appendBlock: AppendBlock, importOptions: ImportOptions, - skipBlocks: Boolean + skipBlocks: Boolean, + appender: Scheduler ): Unit = { val lenBytes = new Array[Byte](Ints.BYTES) val start = System.nanoTime() @@ -194,53 +203,73 @@ object Importer extends ScorexLogging { ) } - while (!quit && counter < blocksToApply) lock.synchronized { - val s1 = ByteStreams.read(inputStream, lenBytes, 0, Ints.BYTES) - if (s1 == Ints.BYTES) { - val blockSize = Ints.fromByteArray(lenBytes) - - lazy val blockBytes = new Array[Byte](blockSize) - val factReadSize = - if (blocksToSkip > 0) { - // File IO optimization - ByteStreams.skipFully(inputStream, blockSize) - blockSize - } else { - ByteStreams.read(inputStream, blockBytes, 0, blockSize) - } + val maxSize = importOptions.maxQueueSize + val queue = new mutable.Queue[VanillaBlock](maxSize) + + @tailrec + def readBlocks(queue: mutable.Queue[VanillaBlock], remainCount: Int, maxCount: Int): Unit = { + if (remainCount == 0) () + else { + val s1 = ByteStreams.read(inputStream, lenBytes, 0, Ints.BYTES) + if (s1 == Ints.BYTES) { + val blockSize = Ints.fromByteArray(lenBytes) + + lazy val blockBytes = new Array[Byte](blockSize) + val factReadSize = + if (blocksToSkip > 0) { + // File IO optimization + ByteStreams.skipFully(inputStream, blockSize) + blockSize + } else { + ByteStreams.read(inputStream, blockBytes, 0, blockSize) + } - if (factReadSize == blockSize) { - if (blocksToSkip > 0) { - blocksToSkip -= 1 - } else { - val blockV5 = blockchain.isFeatureActivated( - BlockchainFeatures.BlockV5, - blockchain.height + 1 - ) - val block = - (if (importOptions.format == Formats.Binary && !blockV5) Block.parseBytes(blockBytes) - else PBBlocks.vanilla(PBBlocks.addChainId(protobuf.block.PBBlock.parseFrom(blockBytes)), unsafe = true)).get - if (blockchain.lastBlockId.contains(block.header.reference)) { - Await.result(appendBlock(block).runAsyncLogErr, Duration.Inf) match { - case Left(ve) => - log.error(s"Error appending block: $ve") - quit = true - case _ => - counter = counter + 1 - } + if (factReadSize == blockSize) { + if (blocksToSkip > 0) { + blocksToSkip -= 1 } else { - log.warn(s"Block $block is not a child of the last block ${blockchain.lastBlockId.get}") + val blockV5 = blockchain.isFeatureActivated(BlockchainFeatures.BlockV5, blockchain.height + (maxCount - remainCount) + 1) + val rideV6 = blockchain.isFeatureActivated(BlockchainFeatures.RideV6, blockchain.height + (maxCount - remainCount) + 1) + lazy val parsedProtoBlock = PBBlocks.vanilla(PBBlocks.addChainId(protobuf.block.PBBlock.parseFrom(blockBytes)), unsafe = true) + + val block = (if (!blockV5) Block.parseBytes(blockBytes) else parsedProtoBlock).orElse(parsedProtoBlock).get + + ParSignatureChecker.checkBlockAndTxSignatures(block, rideV6) + + queue.enqueue(block) } + readBlocks(queue, remainCount - 1, maxCount) + } else { + log.info(s"$factReadSize != expected $blockSize") + quit = true } } else { - log.info(s"$factReadSize != expected $blockSize") + if (inputStream.available() > 0) log.info(s"Expecting to read ${Ints.BYTES} but got $s1 (${inputStream.available()})") quit = true } - } else { - if (inputStream.available() > 0) log.info(s"Expecting to read ${Ints.BYTES} but got $s1 (${inputStream.available()})") - quit = true } } + + while ((!quit || queue.nonEmpty) && counter < blocksToApply) + if (!quit && queue.isEmpty) { + readBlocks(queue, maxSize, maxSize) + } else { + lock.synchronized { + val block = queue.dequeue() + if (blockchain.lastBlockId.contains(block.header.reference)) { + Await.result(appendBlock(block).runAsyncLogErr(appender), Duration.Inf) match { + case Left(ve) => + log.error(s"Error appending block: $ve") + queue.clear() + quit = true + case _ => + counter = counter + 1 + } + } else { + log.warn(s"Block $block is not a child of the last block ${blockchain.lastBlockId.get}") + } + } + } } def main(args: Array[String]): Unit = { @@ -274,14 +303,14 @@ object Importer extends ScorexLogging { val time = new NTP(settings.ntpServer) val actorSystem = ActorSystem("wavesplatform-import") - val db = openDB(settings.dbSettings.directory) - val (blockchainUpdater, levelDb) = - StorageFactory(settings, db, time, Observer.empty, BlockchainUpdateTriggers.combined(triggers)) + val rdb = RDB.open(settings.dbSettings) + val (blockchainUpdater, _) = + StorageFactory(settings, rdb, time, BlockchainUpdateTriggers.combined(triggers)) val utxPool = new UtxPoolImpl(time, blockchainUpdater, settings.utxSettings, settings.maxTxErrorLogSize, settings.minerSettings.enable) val pos = PoSSelector(blockchainUpdater, settings.synchronizationSettings.maxBaseTarget) - val extAppender = BlockAppender(blockchainUpdater, time, utxPool, pos, scheduler, importOptions.verify) _ + val extAppender = BlockAppender(blockchainUpdater, time, (_: Seq[Diff]) => {}, pos, scheduler, importOptions.verify, txSignParCheck = false) _ - val extensions = initExtensions(settings, blockchainUpdater, scheduler, time, utxPool, db, actorSystem) + val extensions = initExtensions(settings, blockchainUpdater, scheduler, time, utxPool, rdb, actorSystem) checkGenesis(settings, blockchainUpdater, Miner.Disabled) val importFileOffset = @@ -290,7 +319,7 @@ object Importer extends ScorexLogging { importOptions.format match { case Formats.Binary => var result = 0L - db.iterateOver(KeyTags.BlockInfoAtHeight) { e => + rdb.db.iterateOver(KeyTags.BlockInfoAtHeight) { e => e.getKey match { case Array(_, _, 0, 0, 0, 1) => // Skip genesis case _ => @@ -320,7 +349,7 @@ object Importer extends ScorexLogging { Await.result(actorSystem.terminate(), 10.second) lock.synchronized { if (blockchainUpdater.isFeatureActivated(BlockchainFeatures.NG) && blockchainUpdater.liquidBlockMeta.nonEmpty) { - // Force store liquid block in leveldb + // Force store liquid block in rocksdb val lastHeader = blockchainUpdater.lastBlockHeader.get.header val pseudoBlock = Block( BlockHeader( @@ -347,14 +376,21 @@ object Importer extends ScorexLogging { // Terminate extensions Await.ready(Future.sequence(extensions.map(_.shutdown())), settings.extensionsShutdownTimeout) + utxPool.close() blockchainUpdater.shutdown() - levelDb.close() - db.close() + rdb.close() } inputStream.close() } - startImport(inputStream, blockchainUpdater, extAppender, importOptions, importFileOffset == 0) + startImport( + inputStream, + blockchainUpdater, + extAppender, + importOptions, + importFileOffset == 0, + scheduler + ) Await.result(Kamon.stopModules(), 10.seconds) } } diff --git a/node/src/main/scala/com/wavesplatform/account/Recipient.scala b/node/src/main/scala/com/wavesplatform/account/Recipient.scala index 419cc0a7f92..081143d3992 100644 --- a/node/src/main/scala/com/wavesplatform/account/Recipient.scala +++ b/node/src/main/scala/com/wavesplatform/account/Recipient.scala @@ -1,8 +1,8 @@ package com.wavesplatform.account -import java.nio.ByteBuffer - import com.google.common.cache.{Cache, CacheBuilder} + +import java.nio.ByteBuffer import com.google.common.primitives.{Bytes, Ints} import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.Base58 diff --git a/node/src/main/scala/com/wavesplatform/api/BlockMeta.scala b/node/src/main/scala/com/wavesplatform/api/BlockMeta.scala index 5b557396be1..6f8495d982a 100644 --- a/node/src/main/scala/com/wavesplatform/api/BlockMeta.scala +++ b/node/src/main/scala/com/wavesplatform/api/BlockMeta.scala @@ -4,6 +4,8 @@ import com.wavesplatform.block.Block.protoHeaderHash import com.wavesplatform.block.serialization.BlockHeaderSerializer import com.wavesplatform.block.{Block, BlockHeader, SignedBlockHeader} import com.wavesplatform.common.state.ByteStr +import com.wavesplatform.protobuf.ByteStringExt +import com.wavesplatform.protobuf.block.PBBlocks import monix.eval.Coeval import play.api.libs.json.{JsObject, Json} @@ -25,7 +27,7 @@ case class BlockMeta( BlockHeaderSerializer.toJson(header, size, transactionCount, signature) ++ Json.obj("height" -> height, "totalFee" -> totalFeeInWaves) ++ reward.fold(Json.obj())(r => Json.obj("reward" -> r)) ++ - vrf.fold(Json.obj())(v => Json.obj("VRF" -> v.toString)) ++ + vrf.fold(Json.obj())(v => Json.obj("VRF" -> v.toString)) ++ headerHash.fold(Json.obj())(h => Json.obj("id" -> h.toString)) } } @@ -42,4 +44,21 @@ object BlockMeta { reward, vrf ) + + def fromPb(pbMeta: com.wavesplatform.database.protobuf.BlockMeta): Option[BlockMeta] = { + pbMeta.header.map { pbHeader => + BlockMeta( + PBBlocks.vanilla(pbHeader), + pbMeta.signature.toByteStr, + if (pbMeta.headerHash.isEmpty) None else Some(pbMeta.headerHash.toByteStr), + pbMeta.height, + pbMeta.size, + pbMeta.transactionCount, + pbMeta.totalFeeInWaves, + Some(pbMeta.reward), + if (pbMeta.vrf.isEmpty) None + else Some(pbMeta.vrf.toByteStr) + ) + } + } } diff --git a/node/src/main/scala/com/wavesplatform/api/common/AddressPortfolio.scala b/node/src/main/scala/com/wavesplatform/api/common/AddressPortfolio.scala index c655eaf0623..05d48f8bda3 100644 --- a/node/src/main/scala/com/wavesplatform/api/common/AddressPortfolio.scala +++ b/node/src/main/scala/com/wavesplatform/api/common/AddressPortfolio.scala @@ -2,79 +2,124 @@ package com.wavesplatform.api.common import cats.syntax.semigroup.* import com.google.common.collect.AbstractIterator +import com.google.common.primitives.Ints import com.wavesplatform.account.Address +import com.wavesplatform.api.common.NFTIterator.BatchSize import com.wavesplatform.common.state.ByteStr import com.wavesplatform.crypto -import com.wavesplatform.database.{AddressId, DBResource, KeyTags, Keys, readIntSeq} +import com.wavesplatform.database.{AddressId, CurrentBalance, DBResource, Key, KeyTags, Keys, readCurrentBalance} import com.wavesplatform.state.{AssetDescription, Diff, Portfolio} import com.wavesplatform.transaction.Asset.IssuedAsset import com.wavesplatform.utils.ScorexLogging -import scala.annotation.tailrec +import java.nio.ByteBuffer +import scala.collection.mutable.ArrayBuffer import scala.jdk.CollectionConverters.* class NFTIterator(addressId: AddressId, maybeAfter: Option[IssuedAsset], resource: DBResource) - extends AbstractIterator[(IssuedAsset, Long)] + extends AbstractIterator[Seq[(IssuedAsset, Long)]] with ScorexLogging { - private val prefixBytes = KeyTags.NftPossession.prefixBytes ++ addressId.toByteArray + private val prefixBytes: Array[Byte] = KeyTags.NftPossession.prefixBytes ++ addressId.toByteArray - resource.iterator.seek(prefixBytes) + resource.withSafePrefixIterator { dbIterator => + dbIterator.seek(prefixBytes) - for (after <- maybeAfter) { - @inline - def skipEntry(key: Array[Byte]): Boolean = - key.startsWith(prefixBytes) && !key.endsWith(after.id.arr) + for (after <- maybeAfter) { + @inline + def skipEntry(key: Array[Byte]): Boolean = + !key.endsWith(after.id.arr) - while (resource.iterator.hasNext && skipEntry(resource.iterator.next().getKey)) {} - } + while (dbIterator.isValid && skipEntry(dbIterator.key())) { + dbIterator.next() + } + if (dbIterator.isValid && !skipEntry(dbIterator.key())) + dbIterator.next() + } + }(()) + + override def computeNext(): Seq[(IssuedAsset, Long)] = + resource.withSafePrefixIterator { dbIterator => + val keysBuffer = new ArrayBuffer[Key[CurrentBalance]]() + val assetsBuffer = new ArrayBuffer[IssuedAsset]() + while (dbIterator.isValid && keysBuffer.length < BatchSize) { + val assetId = IssuedAsset(ByteStr(dbIterator.key().takeRight(crypto.DigestLength))) + keysBuffer.addOne(Keys.assetBalance(addressId, assetId)) + assetsBuffer.addOne(assetId) + dbIterator.next() + } + if (keysBuffer.nonEmpty) { + resource + .multiGet(keysBuffer, 16) + .zip(assetsBuffer) + .map { case (curBalance, asset) => + asset -> curBalance.balance + } + .toSeq + } else endOfData() + }(endOfData()) +} - override def computeNext(): (IssuedAsset, Long) = - if (resource.iterator.hasNext && resource.iterator.peekNext().getKey.startsWith(prefixBytes)) { - val assetId = IssuedAsset(ByteStr(resource.iterator.next().getKey.takeRight(crypto.DigestLength))) - assetId -> (for { - lastChange <- resource.get(Keys.assetBalanceHistory(addressId, assetId)).headOption - } yield resource.get(Keys.assetBalance(addressId, assetId)(lastChange))).getOrElse(0L) - } else endOfData() +object NFTIterator { + val BatchSize = 1000 } -class AssetBalanceIterator(addressId: AddressId, resource: DBResource) extends AbstractIterator[(IssuedAsset, Long)] { - private val prefixBytes = KeyTags.AssetBalanceHistory.prefixBytes ++ addressId.toByteArray +class AssetBalanceIterator(addressId: AddressId, resource: DBResource) extends AbstractIterator[Seq[(IssuedAsset, Long)]] { + private val prefixBytes: Array[Byte] = KeyTags.AssetBalance.prefixBytes ++ addressId.toByteArray - resource.iterator.seek(prefixBytes) + resource.withSafePrefixIterator(_.seek(prefixBytes))(()) - private def stillSameAddress(k: Array[Byte]): Boolean = - (k.length == (prefixBytes.length + crypto.DigestLength)) && k.startsWith(prefixBytes) + override def computeNext(): Seq[(IssuedAsset, Long)] = + resource.withSafePrefixIterator { dbIterator => + if (dbIterator.isValid) { + val assetId = IssuedAsset(ByteStr(dbIterator.key().takeRight(crypto.DigestLength))) + val curBalance = readCurrentBalance(dbIterator.value()) + dbIterator.next() + Seq(assetId -> curBalance.balance) + } else endOfData() + }(endOfData()) +} - override def computeNext(): (IssuedAsset, Long) = - if (resource.iterator.hasNext && stillSameAddress(resource.iterator.peekNext().getKey)) { - val currentEntry = resource.iterator.next() - val assetId = IssuedAsset(ByteStr(currentEntry.getKey.takeRight(crypto.DigestLength))) - val history = readIntSeq(currentEntry.getValue) - val balance = resource.get(Keys.assetBalance(addressId, assetId)(history.headOption.getOrElse(0))) - assetId -> balance - } else endOfData() +class WavesBalanceIterator(addressId: AddressId, resource: DBResource) extends AbstractIterator[(Int, Long)] { + private val prefixBytes: Array[Byte] = KeyTags.WavesBalanceHistory.prefixBytes ++ addressId.toByteArray + private val lastHeight: Int = resource.get(Keys.wavesBalance(addressId)).height.toInt + + resource.withSafePrefixIterator(_.seekForPrev(prefixBytes ++ Ints.toByteArray(lastHeight)))(()) + + override def computeNext(): (Int, Long) = + resource.withSafePrefixIterator { dbIterator => + if (dbIterator.isValid) { + val h = ByteBuffer.wrap(dbIterator.key().drop(prefixBytes.length)).getInt + val balance = ByteBuffer.wrap(dbIterator.value()).getLong + dbIterator.prev() + h -> balance + } else endOfData() + }(endOfData()) } class BalanceIterator( - underlying: Iterator[(IssuedAsset, Long)], + underlying: Iterator[Seq[(IssuedAsset, Long)]], includeAsset: IssuedAsset => Boolean, private var pendingOverrides: Map[IssuedAsset, Long] -) extends AbstractIterator[(IssuedAsset, Long)] { +) extends AbstractIterator[Seq[(IssuedAsset, Long)]] { - @tailrec private def nextOverride(): (IssuedAsset, Long) = + private def nextOverride(): Seq[(IssuedAsset, Long)] = if (pendingOverrides.isEmpty) endOfData() else { - val head @ (asset, _) = pendingOverrides.head - pendingOverrides -= asset - if (includeAsset(asset)) head else nextOverride() + val balances = pendingOverrides.collect { + case (asset, balance) if includeAsset(asset) => + asset -> balance + }.toSeq + pendingOverrides = Map.empty + balances } - override def computeNext(): (IssuedAsset, Long) = + override def computeNext(): Seq[(IssuedAsset, Long)] = if (underlying.hasNext) { - val (asset, balanceFromHistory) = underlying.next() - val balanceFromDiff = pendingOverrides.getOrElse(asset, 0L) - pendingOverrides -= asset - asset -> (balanceFromDiff |+| balanceFromHistory) + underlying.next().map { case (asset, balanceFromHistory) => + val balanceFromDiff = pendingOverrides.getOrElse(asset, 0L) + pendingOverrides -= asset + asset -> (balanceFromDiff |+| balanceFromHistory) + } } else nextOverride() } @@ -85,30 +130,31 @@ object AddressPortfolio { diff: Diff, maybeAfter: Option[IssuedAsset], loadAssetDescription: IssuedAsset => Option[AssetDescription] - ): Iterator[(IssuedAsset, AssetDescription)] = + ): Iterator[Seq[(IssuedAsset, AssetDescription)]] = new BalanceIterator( resource .get(Keys.addressId(address)) - .fold[Iterator[(IssuedAsset, Long)]](Iterator())(addressId => new NFTIterator(addressId, maybeAfter, resource).asScala), + .fold[Iterator[Seq[(IssuedAsset, Long)]]](Iterator.empty)(addressId => new NFTIterator(addressId, maybeAfter, resource).asScala), asset => loadAssetDescription(asset).exists(_.nft), diff.portfolios.getOrElse(address, Portfolio.empty).assets ).asScala - .collect { case (asset, balance) if balance > 0 => asset } - .flatMap(a => loadAssetDescription(a).map(a -> _)) + .map(_.collect { case (asset, balance) if balance > 0 => asset } + .flatMap(a => loadAssetDescription(a).map(a -> _))) def assetBalanceIterator( resource: DBResource, address: Address, diff: Diff, includeAsset: IssuedAsset => Boolean - ): Iterator[(IssuedAsset, Long)] = + ): Iterator[Seq[(IssuedAsset, Long)]] = new BalanceIterator( resource .get(Keys.addressId(address)) - .fold[Iterator[(IssuedAsset, Long)]](Iterator())(addressId => new AssetBalanceIterator(addressId, resource).asScala), + .fold[Iterator[Seq[(IssuedAsset, Long)]]](Iterator.empty)(addressId => new AssetBalanceIterator(addressId, resource).asScala), includeAsset, diff.portfolios.getOrElse(address, Portfolio.empty).assets - ).asScala.filter { case (asset, balance) => - includeAsset(asset) && balance > 0 - } + ).asScala + .map(_.filter { case (asset, balance) => + includeAsset(asset) && balance > 0 + }) } diff --git a/node/src/main/scala/com/wavesplatform/api/common/AddressTransactions.scala b/node/src/main/scala/com/wavesplatform/api/common/AddressTransactions.scala index 95ec7458642..9061e8b6c1a 100644 --- a/node/src/main/scala/com/wavesplatform/api/common/AddressTransactions.scala +++ b/node/src/main/scala/com/wavesplatform/api/common/AddressTransactions.scala @@ -1,83 +1,105 @@ package com.wavesplatform.api.common +import com.google.common.collect.AbstractIterator import com.wavesplatform.account.Address +import com.wavesplatform.api.common.AddressTransactions.TxByAddressIterator.BatchSize import com.wavesplatform.common.state.ByteStr import com.wavesplatform.database.protobuf.EthereumTransactionMeta -import com.wavesplatform.database.{DBExt, DBResource, Keys} +import com.wavesplatform.database.{AddressId, DBExt, DBResource, Key, KeyTags, Keys, RDB, readTransactionHNSeqAndType} import com.wavesplatform.state.{Diff, Height, InvokeScriptResult, TransactionId, TxMeta, TxNum} import com.wavesplatform.transaction.{Authorized, EthereumTransaction, GenesisTransaction, Transaction, TransactionType} -import org.iq80.leveldb.DB +import monix.eval.Task +import monix.reactive.Observable +import org.rocksdb.RocksDB + +import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters.* object AddressTransactions { - private def loadTransaction(db: DB, height: Height, txNum: TxNum, sender: Option[Address]): Option[(TxMeta, Transaction)] = - db.get(Keys.transactionAt(height, txNum)) match { - case Some((m, tx: Authorized)) if sender.forall(_ == tx.sender.toAddress) => Some(m -> tx) - case Some((m, gt: GenesisTransaction)) if sender.isEmpty => Some(m -> gt) - case Some((m, et: EthereumTransaction)) if sender.forall(_ == et.senderAddress()) => Some(m -> et) - case _ => None - } - - private def loadInvokeScriptResult(resource: DBResource, txId: ByteStr): Option[InvokeScriptResult] = + private def loadTransactions( + db: DBResource, + keys: ArrayBuffer[Key[Option[(TxMeta, Transaction)]]], + nums: ArrayBuffer[TxNum], + sizes: ArrayBuffer[Int], + sender: Option[Address] + ): Seq[(TxMeta, Transaction, Option[TxNum])] = + db.multiGet(keys, sizes) + .zip(nums) + .flatMap { + case (Some((m, tx: Authorized)), txNum) if sender.forall(_ == tx.sender.toAddress) => Some((m, tx, Some(txNum))) + case (Some((m, gt: GenesisTransaction)), txNum) if sender.isEmpty => Some((m, gt, Some(txNum))) + case (Some((m, et: EthereumTransaction)), txNum) if sender.forall(_ == et.senderAddress()) => Some((m, et, Some(txNum))) + case _ => None + } + .toSeq + + private def loadInvokeScriptResult(resource: DBResource, txMetaHandle: RDB.TxMetaHandle, txId: ByteStr): Option[InvokeScriptResult] = for { - tm <- resource.get(Keys.transactionMetaById(TransactionId(txId))) + tm <- resource.get(Keys.transactionMetaById(TransactionId(txId), txMetaHandle)) scriptResult <- resource.get(Keys.invokeScriptResult(tm.height, TxNum(tm.num.toShort))) } yield scriptResult - def loadInvokeScriptResult(db: DB, txId: ByteStr): Option[InvokeScriptResult] = - db.withResource(r => loadInvokeScriptResult(r, txId)) + def loadInvokeScriptResult(db: RocksDB, txMetaHandle: RDB.TxMetaHandle, txId: ByteStr): Option[InvokeScriptResult] = + db.withResource(r => loadInvokeScriptResult(r, txMetaHandle, txId)) - def loadEthereumMetadata(db: DB, txId: ByteStr): Option[EthereumTransactionMeta] = db.withResource { resource => - for { - tm <- resource.get(Keys.transactionMetaById(TransactionId(txId))) - m <- resource.get(Keys.ethereumTransactionMeta(Height(tm.height), TxNum(tm.num.toShort))) - } yield m + def loadInvokeScriptResult(db: RocksDB, height: Height, txNum: TxNum): Option[InvokeScriptResult] = + db.get(Keys.invokeScriptResult(height, txNum)) + + def loadEthereumMetadata(db: RocksDB, txMetaHandle: RDB.TxMetaHandle, txId: ByteStr): Option[EthereumTransactionMeta] = db.withResource { + resource => + for { + tm <- resource.get(Keys.transactionMetaById(TransactionId(txId), txMetaHandle)) + m <- resource.get(Keys.ethereumTransactionMeta(Height(tm.height), TxNum(tm.num.toShort))) + } yield m } + def loadEthereumMetadata(db: RocksDB, height: Height, txNum: TxNum): Option[EthereumTransactionMeta] = + db.get(Keys.ethereumTransactionMeta(height, txNum)) + def allAddressTransactions( - db: DB, + rdb: RDB, maybeDiff: Option[(Height, Diff)], subject: Address, sender: Option[Address], types: Set[Transaction.Type], fromId: Option[ByteStr] - ): Iterator[(TxMeta, Transaction)] = { + ): Observable[(TxMeta, Transaction, Option[TxNum])] = { val diffTxs = transactionsFromDiff(maybeDiff, subject, sender, types, fromId) + val dbTxs = transactionsFromDB( - db, + rdb, subject, sender, types, - fromId.filter(id => maybeDiff.exists { case (_, diff) => !diff.transactions.contains(id) }) + fromId.filter(id => maybeDiff.exists { case (_, diff) => !diff.containsTransaction(id) }) ) - diffTxs.iterator ++ dbTxs.filterNot(diffTxs.contains) + Observable.fromIterable(diffTxs) ++ dbTxs.filterNot(diffTxs.contains) } - private def transactionsFromDB( - db: DB, + def transactionsFromDB( + rdb: RDB, subject: Address, sender: Option[Address], types: Set[Transaction.Type], fromId: Option[ByteStr] - ): Iterator[(TxMeta, Transaction)] = - db.get(Keys.addressId(subject)) - .fold(Iterable.empty[(TxMeta, Transaction)]) { addressId => + ): Observable[(TxMeta, Transaction, Option[TxNum])] = rdb.db.resourceObservable.flatMap { dbResource => + dbResource + .get(Keys.addressId(subject)) + .fold(Observable.empty[(TxMeta, Transaction, Option[TxNum])]) { addressId => val (maxHeight, maxTxNum) = fromId - .flatMap(id => db.get(Keys.transactionMetaById(TransactionId(id)))) + .flatMap(id => rdb.db.get(Keys.transactionMetaById(TransactionId(id), rdb.txMetaHandle))) .fold[(Height, TxNum)](Height(Int.MaxValue) -> TxNum(Short.MaxValue)) { tm => Height(tm.height) -> TxNum(tm.num.toShort) } - (for { - seqNr <- (db.get(Keys.addressTransactionSeqNr(addressId)) to 0 by -1).view - (height, transactionIds) <- db.get(Keys.addressTransactionHN(addressId, seqNr)).view if height <= maxHeight - (txType, txNum) <- transactionIds.view - } yield (height, txNum, txType)) - .dropWhile { case (h, txNum, _) => h > maxHeight || h == maxHeight && txNum >= maxTxNum } - .collect { case (h, txNum, txType) if types.isEmpty || types(TransactionType(txType)) => h -> txNum } - .flatMap { case (h, txNum) => loadTransaction(db, h, txNum, sender) } + Observable + .fromIterator( + Task(new TxByAddressIterator(dbResource, rdb.txHandle, addressId, maxHeight, maxTxNum, sender, types).asScala) + ) + .concatMapIterable(identity) } - .iterator + } private def transactionsFromDiff( maybeDiff: Option[(Height, Diff)], @@ -85,7 +107,7 @@ object AddressTransactions { sender: Option[Address], types: Set[Transaction.Type], fromId: Option[ByteStr] - ): Seq[(TxMeta, Transaction)] = + ): Seq[(TxMeta, Transaction, Option[TxNum])] = (for { (height, diff) <- maybeDiff.toSeq nti <- diff.transactions.toSeq.reverse @@ -94,5 +116,59 @@ object AddressTransactions { .dropWhile { case (_, tx) => fromId.isDefined && !fromId.contains(tx.id()) } .dropWhile { case (_, tx) => fromId.contains(tx.id()) } .filter { case (_, tx) => types.isEmpty || types.contains(tx.tpe) } - .collect { case v @ (_, tx: Authorized) if sender.forall(_ == tx.sender.toAddress) => v } + .collect { case (m, tx: Authorized) if sender.forall(_ == tx.sender.toAddress) => (m, tx, None) } + + class TxByAddressIterator( + db: DBResource, + txHandle: RDB.TxHandle, + addressId: AddressId, + maxHeight: Int, + maxTxNum: Int, + sender: Option[Address], + types: Set[Transaction.Type] + ) extends AbstractIterator[Seq[(TxMeta, Transaction, Option[TxNum])]] { + val prefix: Array[Byte] = KeyTags.AddressTransactionHeightTypeAndNums.prefixBytes ++ addressId.toByteArray + val seqNr: Int = db.get(Keys.addressTransactionSeqNr(addressId)) + + db.withSafePrefixIterator(_.seekForPrev(Keys.addressTransactionHN(addressId, seqNr).keyBytes))() + + final override def computeNext(): Seq[(TxMeta, Transaction, Option[TxNum])] = db.withSafePrefixIterator { dbIterator => + val keysBuffer = new ArrayBuffer[Key[Option[(TxMeta, Transaction)]]]() + val numsBuffer = new ArrayBuffer[TxNum]() + val sizesBuffer = new ArrayBuffer[Int]() + while (dbIterator.isValid && keysBuffer.length < BatchSize) { + val (height, txs) = readTransactionHNSeqAndType(dbIterator.value()) + dbIterator.prev() + if (height > maxHeight) { + () + } else if (height == maxHeight) { + txs + .dropWhile { case (_, txNum, _) => txNum >= maxTxNum } + .foreach { case (tp, txNum, size) => + if (types.isEmpty || types(TransactionType(tp))) { + keysBuffer.addOne(Keys.transactionAt(height, txNum, txHandle)) + numsBuffer.addOne(txNum) + sizesBuffer.addOne(size) + } + } + } else { + txs.foreach { case (tp, txNum, size) => + if (types.isEmpty || types(TransactionType(tp))) { + keysBuffer.addOne(Keys.transactionAt(height, txNum, txHandle)) + numsBuffer.addOne(txNum) + sizesBuffer.addOne(size) + } + } + } + } + if (keysBuffer.nonEmpty) { + loadTransactions(db, keysBuffer, numsBuffer, sizesBuffer, sender) + } else + endOfData() + }(endOfData()) + } + + object TxByAddressIterator { + val BatchSize = 50 + } } diff --git a/node/src/main/scala/com/wavesplatform/api/common/BalanceDistribution.scala b/node/src/main/scala/com/wavesplatform/api/common/BalanceDistribution.scala index be3abebb14d..71c6219280f 100644 --- a/node/src/main/scala/com/wavesplatform/api/common/BalanceDistribution.scala +++ b/node/src/main/scala/com/wavesplatform/api/common/BalanceDistribution.scala @@ -18,29 +18,30 @@ object BalanceDistribution { private var pendingPortfolios: Map[Address, Portfolio] ) extends AbstractIterator[(Address, Long)] { @inline - private def stillSameAddress(expected: AddressId): Boolean = resource.iterator.hasNext && { - val maybeNext = resource.iterator.peekNext().getKey + private def stillSameAddress(expected: AddressId): Boolean = resource.fullIterator.isValid && { + val maybeNext = resource.fullIterator.key() maybeNext.startsWith(globalPrefix) && addressId(maybeNext) == expected } @tailrec private def findNextBalance(): Option[(Address, Long)] = { - if (!resource.iterator.hasNext) None + if (!resource.fullIterator.isValid) None else { - val current = resource.iterator.next() - if (!current.getKey.startsWith(globalPrefix)) None + val key = resource.fullIterator.key() + val value = resource.fullIterator.value() + if (!key.startsWith(globalPrefix)) None else { - val aid = addressId(current.getKey) + val aid = addressId(key) val address = resource.get(Keys.idToAddress(aid)) - var balance = Longs.fromByteArray(current.getValue) - var currentHeight = Ints.fromByteArray(current.getKey.takeRight(4)) + var balance = Longs.fromByteArray(value) + var currentHeight = Ints.fromByteArray(key.takeRight(4)) while (stillSameAddress(aid)) { - val next = resource.iterator.next() - val nextHeight = Ints.fromByteArray(next.getKey.takeRight(4)) + val nextHeight = Ints.fromByteArray(resource.fullIterator.key.takeRight(4)) if (nextHeight <= height) { currentHeight = nextHeight - balance = Longs.fromByteArray(next.getValue) + balance = Longs.fromByteArray(resource.fullIterator.value()) } + resource.fullIterator.next() } val adjustedBalanceE = safeSum(balance, pendingPortfolios.get(address).fold(0L)(balanceOf), "Next distribution balance") diff --git a/node/src/main/scala/com/wavesplatform/api/common/CommonAccountsApi.scala b/node/src/main/scala/com/wavesplatform/api/common/CommonAccountsApi.scala index 56952c030f8..463a5b82f53 100644 --- a/node/src/main/scala/com/wavesplatform/api/common/CommonAccountsApi.scala +++ b/node/src/main/scala/com/wavesplatform/api/common/CommonAccountsApi.scala @@ -1,17 +1,22 @@ package com.wavesplatform.api.common +import java.util.regex.Pattern + +import com.google.common.base.Charsets +import com.google.common.collect.AbstractIterator import com.wavesplatform.account.{Address, Alias} import com.wavesplatform.api.common.AddressPortfolio.{assetBalanceIterator, nftIterator} import com.wavesplatform.api.common.TransactionMeta.Ethereum import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.EitherExt2 -import com.wavesplatform.database -import com.wavesplatform.database.{DBExt, KeyTags, Keys} +import com.wavesplatform.database.{DBExt, DBResource, KeyTags, Keys, RDB} import com.wavesplatform.features.BlockchainFeatures import com.wavesplatform.lang.ValidationError +import com.wavesplatform.protobuf.transaction.PBRecipients import com.wavesplatform.state.patch.CancelLeasesToDisabledAliases +import com.wavesplatform.state.reader.CompositeBlockchain import com.wavesplatform.state.reader.LeaseDetails.Status -import com.wavesplatform.state.{AccountScriptInfo, AssetDescription, Blockchain, DataEntry, Diff, Height, InvokeScriptResult} +import com.wavesplatform.state.{AccountScriptInfo, AssetDescription, Blockchain, DataEntry, Height, InvokeScriptResult} import com.wavesplatform.transaction.Asset.IssuedAsset import com.wavesplatform.transaction.EthereumTransaction.Invocation import com.wavesplatform.transaction.TxValidationError.GenericError @@ -19,7 +24,8 @@ import com.wavesplatform.transaction.lease.LeaseTransaction import com.wavesplatform.transaction.{EthereumTransaction, TransactionType} import monix.eval.Task import monix.reactive.Observable -import org.iq80.leveldb.DB + +import scala.jdk.CollectionConverters.* trait CommonAccountsApi { import CommonAccountsApi.* @@ -32,9 +38,9 @@ trait CommonAccountsApi { def assetBalance(address: Address, asset: IssuedAsset): Long - def portfolio(address: Address): Observable[(IssuedAsset, Long)] + def portfolio(address: Address): Observable[Seq[(IssuedAsset, Long)]] - def nftList(address: Address, after: Option[IssuedAsset]): Observable[(IssuedAsset, AssetDescription)] + def nftList(address: Address, after: Option[IssuedAsset]): Observable[Seq[(IssuedAsset, AssetDescription)]] def script(address: Address): Option[AccountScriptInfo] @@ -50,12 +56,13 @@ trait CommonAccountsApi { } object CommonAccountsApi { - def includeNft(blockchain: Blockchain)(assetId: IssuedAsset): Boolean = - !blockchain.isFeatureActivated(BlockchainFeatures.ReduceNFTFee) || !blockchain.assetDescription(assetId).exists(_.nft) - final case class BalanceDetails(regular: Long, generating: Long, available: Long, effective: Long, leaseIn: Long, leaseOut: Long) - def apply(diff: () => Diff, db: DB, blockchain: Blockchain): CommonAccountsApi = new CommonAccountsApi { + def apply( + compositeBlockchain: () => CompositeBlockchain, + rdb: RDB, + blockchain: Blockchain + ): CommonAccountsApi = new CommonAccountsApi { override def balance(address: Address, confirmations: Int = 0): Long = blockchain.balance(address, blockchain.height, confirmations) @@ -80,17 +87,22 @@ object CommonAccountsApi { override def assetBalance(address: Address, asset: IssuedAsset): Long = blockchain.balance(address, asset) - override def portfolio(address: Address): Observable[(IssuedAsset, Long)] = { - val currentDiff = diff() - db.resourceObservable.flatMap { resource => - Observable.fromIterator(Task(assetBalanceIterator(resource, address, currentDiff, includeNft(blockchain)))) + override def portfolio(address: Address): Observable[Seq[(IssuedAsset, Long)]] = { + val featureNotActivated = !blockchain.isFeatureActivated(BlockchainFeatures.ReduceNFTFee) + val compBlockchain = compositeBlockchain() + def includeNft(assetId: IssuedAsset): Boolean = + featureNotActivated || !compBlockchain.assetDescription(assetId).exists(_.nft) + + rdb.db.resourceObservable.flatMap { resource => + Observable + .fromIterator(Task(assetBalanceIterator(resource, address, compBlockchain.diff, includeNft))) } } - override def nftList(address: Address, after: Option[IssuedAsset]): Observable[(IssuedAsset, AssetDescription)] = { - val currentDiff = diff() - db.resourceObservable.flatMap { resource => - Observable.fromIterator(Task(nftIterator(resource, address, currentDiff, after, blockchain.assetDescription))) + override def nftList(address: Address, after: Option[IssuedAsset]): Observable[Seq[(IssuedAsset, AssetDescription)]] = { + rdb.db.resourceObservable.flatMap { resource => + Observable + .fromIterator(Task(nftIterator(resource, address, compositeBlockchain().diff, after, blockchain.assetDescription))) } } @@ -101,35 +113,25 @@ object CommonAccountsApi { override def dataStream(address: Address, regex: Option[String]): Observable[DataEntry[?]] = Observable.defer { val pattern = regex.map(_.r.pattern) - val entriesFromDiff = diff().accountData + val entriesFromDiff = compositeBlockchain().diff.accountData .get(address) - .fold[Map[String, DataEntry[?]]](Map.empty)(_.data.filter { case (k, _) => pattern.forall(_.matcher(k).matches()) }) - - val entries = db.readOnly { ro => - ro.get(Keys.addressId(address)).fold(Seq.empty[DataEntry[?]]) { addressId => - val filteredKeys = Set.newBuilder[String] - - ro.iterateOver(KeyTags.ChangedDataKeys.prefixBytes ++ addressId.toByteArray) { e => - for (key <- database.readStrings(e.getValue) if !entriesFromDiff.contains(key) && pattern.forall(_.matcher(key).matches())) - filteredKeys += key - } + .fold(Array.empty[DataEntry[?]])(_.filter { case (k, _) => pattern.forall(_.matcher(k).matches()) }.values.toArray.sortBy(_.key)) - for { - key <- filteredKeys.result().toVector - h <- ro.get(Keys.dataHistory(address, key)).headOption - e <- ro.get(Keys.data(addressId, key)(h)) - } yield e - } + rdb.db.resourceObservable.flatMap { dbResource => + Observable + .fromIterator( + Task(new AddressDataIterator(dbResource, address, entriesFromDiff, pattern).asScala) + ) + .filterNot(_.isEmpty) } - Observable.fromIterable((entriesFromDiff.values ++ entries).filterNot(_.isEmpty)) } override def resolveAlias(alias: Alias): Either[ValidationError, Address] = blockchain.resolveAlias(alias) override def activeLeases(address: Address): Observable[LeaseInfo] = addressTransactions( - db, - Some(Height(blockchain.height) -> diff()), + rdb, + Some(Height(blockchain.height) -> compositeBlockchain().diff), address, None, Set(TransactionType.Lease, TransactionType.InvokeScript, TransactionType.InvokeExpression, TransactionType.Ethereum), @@ -202,4 +204,60 @@ object CommonAccountsApi { blockchain.leaseDetails(id).exists(_.isActive) } + class AddressDataIterator( + db: DBResource, + address: Address, + entriesFromDiff: Array[DataEntry[?]], + pattern: Option[Pattern] + ) extends AbstractIterator[DataEntry[?]] { + val prefix: Array[Byte] = KeyTags.Data.prefixBytes ++ PBRecipients.publicKeyHash(address) + + val length: Int = entriesFromDiff.length + + db.withSafePrefixIterator(_.seek(prefix))() + + var nextIndex = 0 + var nextDbEntry: Option[DataEntry[?]] = None + + def matches(key: String): Boolean = pattern.forall(_.matcher(key).matches()) + + final override def computeNext(): DataEntry[?] = db.withSafePrefixIterator { dbIterator => + nextDbEntry match { + case Some(dbEntry) => + if (nextIndex < length) { + val entryFromDiff = entriesFromDiff(nextIndex) + if (entryFromDiff.key < dbEntry.key) { + nextIndex += 1 + entryFromDiff + } else if (entryFromDiff.key == dbEntry.key) { + nextIndex += 1 + nextDbEntry = None + entryFromDiff + } else { + nextDbEntry = None + dbEntry + } + } else { + nextDbEntry = None + dbEntry + } + case None => + if (dbIterator.isValid) { + val key = new String(dbIterator.key().drop(2 + Address.HashLength), Charsets.UTF_8) + if (matches(key)) { + nextDbEntry = Option(dbIterator.value()).map { arr => + Keys.data(address, key).parse(arr).entry + } + } + dbIterator.next() + computeNext() + } else if (nextIndex < length) { + nextIndex += 1 + entriesFromDiff(nextIndex - 1) + } else { + endOfData() + } + } + }(endOfData()) + } } diff --git a/node/src/main/scala/com/wavesplatform/api/common/CommonAssetsApi.scala b/node/src/main/scala/com/wavesplatform/api/common/CommonAssetsApi.scala index 9039424ee81..602f9d31965 100644 --- a/node/src/main/scala/com/wavesplatform/api/common/CommonAssetsApi.scala +++ b/node/src/main/scala/com/wavesplatform/api/common/CommonAssetsApi.scala @@ -8,13 +8,15 @@ import com.wavesplatform.state.{AssetDescription, Blockchain, Diff, Portfolio} import com.wavesplatform.transaction.Asset.IssuedAsset import com.wavesplatform.transaction.assets.IssueTransaction import monix.reactive.Observable -import org.iq80.leveldb.DB +import org.rocksdb.RocksDB trait CommonAssetsApi { def description(assetId: IssuedAsset): Option[AssetDescription] def fullInfo(assetId: IssuedAsset): Option[AssetInfo] + def fullInfos(assetIds: Seq[IssuedAsset]): Seq[Option[AssetInfo]] + def wavesDistribution(height: Int, after: Option[Address]): Observable[(Address, Long)] def assetDistribution(asset: IssuedAsset, height: Int, after: Option[Address]): Observable[(Address, Long)] @@ -23,7 +25,7 @@ trait CommonAssetsApi { object CommonAssetsApi { final case class AssetInfo(description: AssetDescription, issueTransaction: Option[IssueTransaction], sponsorBalance: Option[Long]) - def apply(diff: () => Diff, db: DB, blockchain: Blockchain): CommonAssetsApi = new CommonAssetsApi { + def apply(diff: () => Diff, db: RocksDB, blockchain: Blockchain): CommonAssetsApi = new CommonAssetsApi { def description(assetId: IssuedAsset): Option[AssetDescription] = blockchain.assetDescription(assetId) @@ -37,13 +39,30 @@ object CommonAssetsApi { sponsorBalance ) + override def fullInfos(assetIds: Seq[IssuedAsset]): Seq[Option[AssetInfo]] = { + blockchain + .transactionInfos(assetIds.map(_.id)) + .view + .zip(assetIds) + .map { case (tx, assetId) => + blockchain.assetDescription(assetId).map { desc => + AssetInfo( + desc, + tx.collect { case (tm, it: IssueTransaction) if tm.succeeded => it }, + if (desc.sponsorship != 0) Some(blockchain.wavesPortfolio(desc.issuer.toAddress).spendableBalance) else None + ) + } + } + .toSeq + } + override def wavesDistribution(height: Int, after: Option[Address]): Observable[(Address, Long)] = balanceDistribution( db, height, after, if (height == blockchain.height) diff().portfolios else Map.empty[Address, Portfolio], - KeyTags.WavesBalance.prefixBytes, + KeyTags.WavesBalanceHistory.prefixBytes, bs => AddressId.fromByteArray(bs.slice(2, bs.length - 4)), _.balance ) @@ -54,7 +73,7 @@ object CommonAssetsApi { height, after, if (height == blockchain.height) diff().portfolios else Map.empty[Address, Portfolio], - KeyTags.AssetBalance.prefixBytes ++ asset.id.arr, + KeyTags.AssetBalanceHistory.prefixBytes ++ asset.id.arr, bs => AddressId.fromByteArray(bs.slice(2 + crypto.DigestLength, bs.length - 4)), _.assets.getOrElse(asset, 0L) ) diff --git a/node/src/main/scala/com/wavesplatform/api/common/CommonTransactionsApi.scala b/node/src/main/scala/com/wavesplatform/api/common/CommonTransactionsApi.scala index a4e3315a4a3..32376f94bd9 100644 --- a/node/src/main/scala/com/wavesplatform/api/common/CommonTransactionsApi.scala +++ b/node/src/main/scala/com/wavesplatform/api/common/CommonTransactionsApi.scala @@ -6,6 +6,7 @@ import com.wavesplatform.block import com.wavesplatform.block.Block import com.wavesplatform.block.Block.TransactionProof import com.wavesplatform.common.state.ByteStr +import com.wavesplatform.database.RDB import com.wavesplatform.lang.ValidationError import com.wavesplatform.state.diffs.FeeValidation import com.wavesplatform.state.diffs.FeeValidation.FeeDetails @@ -15,7 +16,6 @@ import com.wavesplatform.transaction.smart.script.trace.TracedResult import com.wavesplatform.transaction.{Asset, CreateAliasTransaction, Transaction} import com.wavesplatform.utx.UtxPool import monix.reactive.Observable -import org.iq80.leveldb.DB import scala.concurrent.Future @@ -46,13 +46,14 @@ trait CommonTransactionsApi { object CommonTransactionsApi { def apply( maybeDiff: => Option[(Height, Diff)], - db: DB, + rdb: RDB, blockchain: Blockchain, utx: UtxPool, publishTransaction: Transaction => Future[TracedResult[ValidationError, Boolean]], blockAt: Int => Option[(BlockMeta, Seq[(TxMeta, Transaction)])] ): CommonTransactionsApi = new CommonTransactionsApi { - override def aliasesOfAddress(address: Address): Observable[(Height, CreateAliasTransaction)] = common.aliasesOfAddress(db, maybeDiff, address) + override def aliasesOfAddress(address: Address): Observable[(Height, CreateAliasTransaction)] = + common.aliasesOfAddress(rdb, maybeDiff, address) override def transactionsByAddress( subject: Address, @@ -60,10 +61,10 @@ object CommonTransactionsApi { transactionTypes: Set[TransactionType], fromId: Option[ByteStr] = None ): Observable[TransactionMeta] = - common.addressTransactions(db, maybeDiff, subject, sender, transactionTypes, fromId) + common.addressTransactions(rdb, maybeDiff, subject, sender, transactionTypes, fromId) override def transactionById(transactionId: ByteStr): Option[TransactionMeta] = - blockchain.transactionInfo(transactionId).map(common.loadTransactionMeta(db, maybeDiff)) + blockchain.transactionInfo(transactionId).map(common.loadTransactionMeta(rdb, maybeDiff)) override def unconfirmedTransactions: Seq[Transaction] = utx.all @@ -73,9 +74,8 @@ object CommonTransactionsApi { override def calculateFee(tx: Transaction): Either[ValidationError, (Asset, Long, Long)] = FeeValidation .getMinFee(blockchain, tx) - .map { - case FeeDetails(asset, _, feeInAsset, feeInWaves) => - (asset, feeInAsset, feeInWaves) + .map { case FeeDetails(asset, _, feeInAsset, feeInWaves) => + (asset, feeInAsset, feeInWaves) } override def broadcastTransaction(tx: Transaction): Future[TracedResult[ValidationError, Boolean]] = publishTransaction(tx) diff --git a/node/src/main/scala/com/wavesplatform/api/common/package.scala b/node/src/main/scala/com/wavesplatform/api/common/package.scala index 03baf397c3a..fdf913f2a8d 100644 --- a/node/src/main/scala/com/wavesplatform/api/common/package.scala +++ b/node/src/main/scala/com/wavesplatform/api/common/package.scala @@ -3,12 +3,12 @@ package com.wavesplatform.api import com.google.common.primitives.Longs import com.wavesplatform.account.Address import com.wavesplatform.common.state.ByteStr -import com.wavesplatform.database.{AddressId, DBExt, Keys} +import com.wavesplatform.database.{AddressId, DBExt, Keys, RDB} import com.wavesplatform.state.{Diff, Height, Portfolio, TxMeta} import com.wavesplatform.transaction.{CreateAliasTransaction, Transaction, TransactionType} import monix.eval.Task import monix.reactive.Observable -import org.iq80.leveldb.DB +import org.rocksdb.RocksDB import scala.jdk.CollectionConverters.* @@ -17,38 +17,36 @@ package object common { import BalanceDistribution.* def addressTransactions( - db: DB, + rdb: RDB, maybeDiff: Option[(Height, Diff)], subject: Address, sender: Option[Address], types: Set[Transaction.Type], fromId: Option[ByteStr] ): Observable[TransactionMeta] = - Observable - .fromIterator(Task(allAddressTransactions(db, maybeDiff, subject, sender, types, fromId).map { - case (m, transaction) => - def loadISR(t: Transaction) = - maybeDiff - .flatMap { case (_, diff) => diff.scriptResults.get(t.id()) } - .orElse(loadInvokeScriptResult(db, t.id())) + allAddressTransactions(rdb, maybeDiff, subject, sender, types, fromId).map { case (m, transaction, txNumOpt) => + def loadISR(t: Transaction) = + maybeDiff + .flatMap { case (_, diff) => diff.scriptResults.get(t.id()) } + .orElse(txNumOpt.flatMap(loadInvokeScriptResult(rdb.db, m.height, _))) - def loadETM(t: Transaction) = - maybeDiff - .flatMap { case (_, diff) => diff.ethereumTransactionMeta.get(t.id()) } - .orElse(loadEthereumMetadata(db, t.id())) + def loadETM(t: Transaction) = + maybeDiff + .flatMap { case (_, diff) => diff.ethereumTransactionMeta.get(t.id()) } + .orElse(txNumOpt.flatMap(loadEthereumMetadata(rdb.db, m.height, _))) - TransactionMeta.create( - m.height, - transaction, - m.succeeded, - m.spentComplexity, - loadISR, - loadETM - ) - })) + TransactionMeta.create( + m.height, + transaction, + m.succeeded, + m.spentComplexity, + loadISR, + loadETM + ) + } def balanceDistribution( - db: DB, + db: RocksDB, height: Int, after: Option[Address], overrides: Map[Address, Portfolio], @@ -58,7 +56,7 @@ package object common { ): Observable[(Address, Long)] = db.resourceObservable .flatMap { resource => - resource.iterator.seek( + resource.fullIterator.seek( globalPrefix ++ after .flatMap(address => resource.get(Keys.addressId(address))) .fold(Array.emptyByteArray)(id => Longs.toByteArray(id.toLong + 1)) @@ -66,15 +64,21 @@ package object common { Observable.fromIterator(Task(new BalanceIterator(resource, globalPrefix, addressId, balanceOf, height, overrides).asScala.filter(_._2 > 0))) } - def aliasesOfAddress(db: DB, maybeDiff: => Option[(Height, Diff)], address: Address): Observable[(Height, CreateAliasTransaction)] = { - val disabledAliases = db.get(Keys.disabledAliases) - addressTransactions(db, maybeDiff, address, Some(address), Set(TransactionType.CreateAlias), None) + def aliasesOfAddress( + rdb: RDB, + maybeDiff: => Option[(Height, Diff)], + address: Address + ): Observable[(Height, CreateAliasTransaction)] = { + val disabledAliases = rdb.db.get(Keys.disabledAliases) + addressTransactions(rdb, maybeDiff, address, Some(address), Set(TransactionType.CreateAlias), None) .collect { case TransactionMeta(height, cat: CreateAliasTransaction, true) if disabledAliases.isEmpty || !disabledAliases(cat.alias) => height -> cat } } - def loadTransactionMeta(db: DB, maybeDiff: => Option[(Int, Diff)])(tuple: (TxMeta, Transaction)): TransactionMeta = { + def loadTransactionMeta(rdb: RDB, maybeDiff: => Option[(Int, Diff)])( + tuple: (TxMeta, Transaction) + ): TransactionMeta = { val (meta, transaction) = tuple TransactionMeta.create( meta.height, @@ -84,11 +88,11 @@ package object common { ist => maybeDiff .flatMap { case (_, diff) => diff.scriptResults.get(ist.id()) } - .orElse(loadInvokeScriptResult(db, ist.id())), + .orElse(loadInvokeScriptResult(rdb.db, rdb.txMetaHandle, ist.id())), et => maybeDiff .flatMap { case (_, diff) => diff.ethereumTransactionMeta.get(et.id()) } - .orElse(loadEthereumMetadata(db, et.id())) + .orElse(loadEthereumMetadata(rdb.db, rdb.txMetaHandle, et.id())) ) } } diff --git a/node/src/main/scala/com/wavesplatform/api/http/AddressApiRoute.scala b/node/src/main/scala/com/wavesplatform/api/http/AddressApiRoute.scala index d06d41b7c4f..e2a19913976 100644 --- a/node/src/main/scala/com/wavesplatform/api/http/AddressApiRoute.scala +++ b/node/src/main/scala/com/wavesplatform/api/http/AddressApiRoute.scala @@ -197,7 +197,8 @@ case class AddressApiRoute( def getData: Route = pathPrefix("data" / AddrSegment) { address => - implicit val jsonStreamingSupport: ToResponseMarshaller[Source[JsValue, NotUsed]] = jsonStreamMarshaller() + implicit val jsonStreamingSupport: ToResponseMarshaller[Source[DataEntry[?], NotUsed]] = + jacksonStreamMarshaller()(DataEntry.dataEntrySerializer) (path(Segment) & get) { key => complete(accountDataEntry(address, key)) @@ -209,7 +210,7 @@ case class AddressApiRoute( log.trace(s"Error compiling regex $matches: ${e.getMessage}") complete(ApiError.fromValidationError(GenericError(s"Cannot compile regex"))) }, - _ => accountData(address, Some(matches)) + _ => accountData(address, matches) ) } ~ anyParam("key", limit = settings.dataKeysRequestLimit) { keys => extractMethod.filter(_ != HttpMethods.GET || keys.nonEmpty) { _ => @@ -286,20 +287,30 @@ case class AddressApiRoute( pass } - private def accountData(address: Address, regex: Option[String] = None)(implicit m: ToResponseMarshaller[Source[JsValue, NotUsed]]) = { - routeTimeout.execute( + private def accountData(address: Address)(implicit m: ToResponseMarshaller[Source[DataEntry[?], NotUsed]]) = { + routeTimeout.executeFromObservable( commonAccountsApi - .dataStream(address, regex) - .toListL - .map(data => Source.fromIterator(() => data.sortBy(_.key).iterator.map(Json.toJson[DataEntry[?]]))) - )(_.runAsyncLogErr(_)) + .dataStream(address, None) + ) } + private def accountData(addr: Address, regex: String)(implicit m: ToResponseMarshaller[Source[DataEntry[?], NotUsed]]) = + routeTimeout.executeFromObservable( + commonAccountsApi + .dataStream(addr, Some(regex)) + ) + private def accountDataEntry(address: Address, key: String): ToResponseMarshallable = - commonAccountsApi.data(address, key).toRight(DataKeyDoesNotExist) + commonAccountsApi + .data(address, key) + .toRight(DataKeyDoesNotExist) private def accountDataList(address: Address, keys: String*) = - Source.fromIterator(() => keys.flatMap(commonAccountsApi.data(address, _)).iterator.map(Json.toJson[DataEntry[?]])) + Source.fromIterator(() => + keys + .flatMap(commonAccountsApi.data(address, _)) + .iterator + ) private def signPath(address: Address, encode: Boolean): Route = (post & entity(as[String])) { message => withAuth { diff --git a/node/src/main/scala/com/wavesplatform/api/http/ApiMarshallers.scala b/node/src/main/scala/com/wavesplatform/api/http/ApiMarshallers.scala index 333ca202d7a..f0bcf85da96 100644 --- a/node/src/main/scala/com/wavesplatform/api/http/ApiMarshallers.scala +++ b/node/src/main/scala/com/wavesplatform/api/http/ApiMarshallers.scala @@ -2,17 +2,23 @@ package com.wavesplatform.api.http import akka.NotUsed import akka.http.scaladsl.marshalling.* -import akka.http.scaladsl.model.* import akka.http.scaladsl.model.MediaTypes.{`application/json`, `text/plain`} +import akka.http.scaladsl.model.* import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, PredefinedFromEntityUnmarshallers, Unmarshaller} import akka.http.scaladsl.util.FastFuture import akka.stream.scaladsl.{Flow, Source} import akka.util.ByteString +import com.fasterxml.jackson.core.util.ByteArrayBuilder +import com.fasterxml.jackson.core.JsonFactory +import com.fasterxml.jackson.databind.JsonSerializer +import com.fasterxml.jackson.databind.ser.DefaultSerializerProvider +import com.wavesplatform.api.http.ApiMarshallers.writeToBytes import com.wavesplatform.common.state.ByteStr import com.wavesplatform.lang.ValidationError import com.wavesplatform.transaction.smart.script.trace.TracedResult import play.api.libs.json.* +import scala.util.Using import scala.util.control.Exception.nonFatalCatch import scala.util.control.NoStackTrace @@ -33,12 +39,11 @@ trait ApiMarshallers extends JsonFormats { def tracedResultMarshaller[A](includeTrace: Boolean)(implicit writes: OWrites[A]): ToResponseMarshaller[TracedResult[ApiError, A]] = fromStatusCodeAndValue[StatusCode, JsValue] - .compose( - ae => - ( - ae.resultE.fold(_.code, _ => StatusCodes.OK), - ae.resultE.fold(_.json, writes.writes) ++ (if (includeTrace) Json.obj("trace" -> ae.trace.map(_.loggedJson)) else Json.obj()) - ) + .compose(ae => + ( + ae.resultE.fold(_.code, _ => StatusCodes.OK), + ae.resultE.fold(_.json, writes.writes) ++ (if (includeTrace) Json.obj("trace" -> ae.trace.map(_.loggedJson)) else Json.obj()) + ) ) private[this] lazy val jsonStringUnmarshaller = @@ -49,6 +54,12 @@ trait ApiMarshallers extends JsonFormats { case (data, charset) => data.decodeString(charset.nioCharset.name) } + private[this] lazy val jsonByteStringMarshaller = + Marshaller.byteStringMarshaller(`application/json`) + + private[this] lazy val customJsonByteStringMarshaller = + Marshaller.byteStringMarshaller(CustomJson.jsonWithNumbersAsStrings) + private[this] lazy val jsonStringMarshaller = Marshaller.stringMarshaller(`application/json`) @@ -83,6 +94,14 @@ trait ApiMarshallers extends JsonFormats { .compose(writes.writes) ) + implicit def jacksonMarshaller[A](implicit ser: Boolean => JsonSerializer[A]): ToEntityMarshaller[A] = + Marshaller.oneOf( + jsonByteStringMarshaller + .compose(v => ByteString.fromArrayUnsafe(writeToBytes[A](v)(ser(false)))), + customJsonByteStringMarshaller + .compose(v => ByteString.fromArrayUnsafe(writeToBytes[A](v)(ser(true)))) + ) + // preserve support for using plain strings as request entities implicit val stringMarshaller: ToEntityMarshaller[String] = PredefinedToEntityMarshallers.stringMarshaller(`text/plain`) @@ -98,12 +117,28 @@ trait ApiMarshallers extends JsonFormats { } } - def jsonStreamMarshaller(prefix: String = "[", delimiter: String = ",", suffix: String = "]"): ToResponseMarshaller[Source[JsValue, NotUsed]] = { - val pjm = playJsonMarshaller[JsValue].map(_.dataBytes) + def playJsonStreamMarshaller( + prefix: String = "[", + delimiter: String = ",", + suffix: String = "]" + ): ToResponseMarshaller[Source[JsValue, NotUsed]] = + jsonStreamMarshaller(playJsonMarshaller[JsValue])(prefix, delimiter, suffix) + + def jacksonStreamMarshaller[A]( + prefix: String = "[", + delimiter: String = ",", + suffix: String = "]" + )(implicit ser: Boolean => JsonSerializer[A]): ToResponseMarshaller[Source[A, NotUsed]] = + jsonStreamMarshaller(jacksonMarshaller[A])(prefix, delimiter, suffix) + + private def jsonStreamMarshaller[A]( + marshaller: ToEntityMarshaller[A] + )(prefix: String, delimiter: String, suffix: String): Marshaller[Source[A, NotUsed], HttpResponse] = { + val bsm = marshaller.map(_.dataBytes) val framingRenderer = Flow[ByteString].intersperse(ByteString(prefix), ByteString(delimiter), ByteString(suffix)) - Marshaller[Source[JsValue, NotUsed], HttpResponse] { implicit ec => source => + Marshaller[Source[A, NotUsed], HttpResponse] { implicit ec => source => val availableMarshallingsPerElement = source.mapAsync(1) { t => - pjm(t)(ec) + bsm(t)(ec) } FastFuture.successful(List(`application/json`, CustomJson.jsonWithNumbersAsStrings).map { contentType => Marshalling.WithFixedContentType( @@ -130,4 +165,16 @@ trait ApiMarshallers extends JsonFormats { } } -object ApiMarshallers extends ApiMarshallers +object ApiMarshallers extends ApiMarshallers { + private lazy val jsonFactory = new JsonFactory() + + def writeToBytes[A](value: A)(implicit ser: JsonSerializer[A]): Array[Byte] = { + Using.resource(new ByteArrayBuilder(jsonFactory._getBufferRecycler())) { bb => + Using.resource(jsonFactory.createGenerator(bb)) { gen => + ser.serialize(value, gen, new DefaultSerializerProvider.Impl) + gen.flush() + bb.toByteArray + } + }((bb: ByteArrayBuilder) => bb.release()) + } +} diff --git a/node/src/main/scala/com/wavesplatform/api/http/BlocksApiRoute.scala b/node/src/main/scala/com/wavesplatform/api/http/BlocksApiRoute.scala index 4731fce4792..a06e1322da1 100644 --- a/node/src/main/scala/com/wavesplatform/api/http/BlocksApiRoute.scala +++ b/node/src/main/scala/com/wavesplatform/api/http/BlocksApiRoute.scala @@ -7,7 +7,6 @@ import cats.syntax.either.* import com.wavesplatform.api.BlockMeta import com.wavesplatform.api.common.CommonBlocksApi import com.wavesplatform.api.http.ApiError.{BlockDoesNotExist, TooBigArrayAllocation} -import com.wavesplatform.api.http.TransactionsApiRoute.TransactionJsonSerializer import com.wavesplatform.block.Block import com.wavesplatform.settings.RestAPISettings import com.wavesplatform.state.TxMeta diff --git a/node/src/main/scala/com/wavesplatform/api/http/CustomJson.scala b/node/src/main/scala/com/wavesplatform/api/http/CustomJson.scala index 91e07e69980..50e979c63bf 100644 --- a/node/src/main/scala/com/wavesplatform/api/http/CustomJson.scala +++ b/node/src/main/scala/com/wavesplatform/api/http/CustomJson.scala @@ -1,7 +1,6 @@ package com.wavesplatform.api.http import java.io.IOException - import akka.http.scaladsl.model.MediaTypes.`application/json` import akka.http.scaladsl.model.{MediaRange, MediaType} import com.fasterxml.jackson.core.io.SegmentedStringWriter @@ -9,37 +8,10 @@ import com.fasterxml.jackson.core.util.BufferRecyclers import com.fasterxml.jackson.core.{JsonGenerator, JsonProcessingException} import com.fasterxml.jackson.databind.module.SimpleModule import com.fasterxml.jackson.databind.{JsonMappingException, JsonSerializer, ObjectMapper, SerializerProvider} -import play.api.libs.json._ +import com.wavesplatform.api.http.CustomJson.lsfFieldNamesToTranslate +import play.api.libs.json.* object NumberAsStringSerializer extends JsonSerializer[JsValue] { - private val fieldNamesToTranslate = Set( - "amount", - "available", - "balance", - "buyMatcherFee", - "currentReward", - "desiredReward", - "effective", - "fee", - "feeAmount", - "generating", - "in", - "matcherFee", - "minIncrement", - "minSponsoredAssetFee", - "out", - "price", - "quantity", - "regular", - "reward", - "sellMatcherFee", - "sponsorBalance", - "totalAmount", - "totalFee", - "totalWavesAmount", - "value" - ) - override def serialize(value: JsValue, json: JsonGenerator, provider: SerializerProvider): Unit = value match { case JsNumber(v) => json.writeNumber(v.bigDecimal) @@ -56,7 +28,7 @@ object NumberAsStringSerializer extends JsonSerializer[JsValue] { case JsObject(values) => json.writeStartObject() values.foreach { - case (name, JsNumber(v)) if fieldNamesToTranslate(name) => + case (name, JsNumber(v)) if lsfFieldNamesToTranslate(name) => json.writeStringField(name, v.bigDecimal.toPlainString) case (name, jsv) => json.writeFieldName(name) @@ -71,6 +43,34 @@ object NumberAsStringSerializer extends JsonSerializer[JsValue] { object CustomJson { val jsonWithNumbersAsStrings: MediaType.WithFixedCharset = `application/json`.withParams(Map("large-significand-format" -> "string")) + val lsfFieldNamesToTranslate = Set( + "amount", + "available", + "balance", + "buyMatcherFee", + "currentReward", + "desiredReward", + "effective", + "fee", + "feeAmount", + "generating", + "in", + "matcherFee", + "minIncrement", + "minSponsoredAssetFee", + "out", + "price", + "quantity", + "regular", + "reward", + "sellMatcherFee", + "sponsorBalance", + "totalAmount", + "totalFee", + "totalWavesAmount", + "value" + ) + def acceptsNumbersAsStrings(mr: MediaRange): Boolean = mr match { case MediaRange.One(`jsonWithNumbersAsStrings`, _) => true case _ => false diff --git a/node/src/main/scala/com/wavesplatform/api/http/DebugApiRoute.scala b/node/src/main/scala/com/wavesplatform/api/http/DebugApiRoute.scala index 990420dd4d1..e6e92418912 100644 --- a/node/src/main/scala/com/wavesplatform/api/http/DebugApiRoute.scala +++ b/node/src/main/scala/com/wavesplatform/api/http/DebugApiRoute.scala @@ -9,7 +9,6 @@ import com.typesafe.config.{ConfigObject, ConfigRenderOptions} import com.wavesplatform.Version import com.wavesplatform.account.{Address, PKKeyPair} import com.wavesplatform.api.common.{CommonAccountsApi, CommonAssetsApi, CommonTransactionsApi, TransactionMeta} -import com.wavesplatform.api.http.TransactionsApiRoute.TransactionJsonSerializer import com.wavesplatform.common.state.ByteStr import com.wavesplatform.lang.ValidationError import com.wavesplatform.mining.{Miner, MinerDebugInfo} @@ -224,7 +223,7 @@ case class DebugApiRoute( val tracedDiff = for { tx <- TracedResult(parsedTransaction) - diff <- TransactionDiffer.forceValidate(blockchain.lastBlockTimestamp, time.correctedTime())(blockchain, tx) + diff <- TransactionDiffer.forceValidate(blockchain.lastBlockTimestamp, time.correctedTime(), enableExecutionLog = true)(blockchain, tx) } yield (tx, diff) val error = tracedDiff.resultE match { diff --git a/node/src/main/scala/com/wavesplatform/api/http/RouteTimeout.scala b/node/src/main/scala/com/wavesplatform/api/http/RouteTimeout.scala index e13e894a7f6..8649789f35b 100644 --- a/node/src/main/scala/com/wavesplatform/api/http/RouteTimeout.scala +++ b/node/src/main/scala/com/wavesplatform/api/http/RouteTimeout.scala @@ -7,6 +7,7 @@ import akka.http.scaladsl.server.{ExceptionHandler, Route} import akka.stream.scaladsl.Source import monix.eval.Task import monix.execution.Scheduler +import monix.reactive.Observable import scala.concurrent.TimeoutException import scala.concurrent.duration.FiniteDuration @@ -26,6 +27,10 @@ class RouteTimeout(timeout: FiniteDuration)(implicit sc: Scheduler) extends ApiM .map(Source(_).map(f))(sc) } + def executeFromObservable[T](observable: Observable[T])(implicit m: ToResponseMarshaller[Source[T, NotUsed]]): Route = { + handleExceptions(handler) & complete(Source.fromPublisher(observable.toReactivePublisher(sc)).initialTimeout(timeout)) + } + def execute[T](task: Task[T])(f: (Task[T], Scheduler) => ToResponseMarshallable): Route = handleExceptions(handler) & complete(f(task.timeout(timeout), sc)) } diff --git a/node/src/main/scala/com/wavesplatform/api/http/StreamSerializerUtils.scala b/node/src/main/scala/com/wavesplatform/api/http/StreamSerializerUtils.scala new file mode 100644 index 00000000000..11b8bbff96e --- /dev/null +++ b/node/src/main/scala/com/wavesplatform/api/http/StreamSerializerUtils.scala @@ -0,0 +1,64 @@ +package com.wavesplatform.api.http + +import com.fasterxml.jackson.core.JsonGenerator +import com.fasterxml.jackson.databind.{JsonSerializer, SerializerProvider} + +object StreamSerializerUtils { + implicit class JsonGeneratorOps(val gen: JsonGenerator) extends AnyVal { + def writeValueField[A](key: String, value: A)(ser: JsonSerializer[A], provider: SerializerProvider): Unit = { + gen.writeFieldName(key) + ser.serialize(value, gen, provider) + } + + def writeValueField[A](key: String)(writeF: JsonGenerator => Unit): Unit = { + gen.writeFieldName(key) + writeF(gen) + } + + def writeNumberField(key: String, value: Long, numberAsString: Boolean): Unit = { + if (numberAsString && CustomJson.lsfFieldNamesToTranslate.contains(key)) { + gen.writeStringField(key, value.toString) + } else { + gen.writeNumberField(key, value) + } + } + + def writeNumberField(key: String, value: BigDecimal, numberAsString: Boolean): Unit = { + if (numberAsString && CustomJson.lsfFieldNamesToTranslate.contains(key)) { + gen.writeStringField(key, value.bigDecimal.toPlainString) + } else { + gen.writeNumberField(key, value.bigDecimal) + } + } + + def writeNumberField(key: String, value: Int, numberAsString: Boolean): Unit = { + if (numberAsString && CustomJson.lsfFieldNamesToTranslate.contains(key)) { + gen.writeStringField(key, value.toString) + } else { + gen.writeNumberField(key, value) + } + } + + def writeNumberField(key: String, value: Byte, numberAsString: Boolean): Unit = { + if (numberAsString && CustomJson.lsfFieldNamesToTranslate.contains(key)) { + gen.writeStringField(key, value.toString) + } else { + gen.writeNumberField(key, value) + } + } + + def writeArrayField(key: String)(writeArrayElems: JsonGenerator => Unit): Unit = { + gen.writeFieldName(key) + gen.writeStartArray() + writeArrayElems(gen) + gen.writeEndArray() + } + + def writeArrayField[A](key: String, value: Seq[A])(ser: JsonSerializer[A], provider: SerializerProvider): Unit = { + gen.writeFieldName(key) + gen.writeStartArray() + value.foreach(v => ser.serialize(v, gen, provider)) + gen.writeEndArray() + } + } +} diff --git a/node/src/main/scala/com/wavesplatform/api/http/TransactionJsonSerializer.scala b/node/src/main/scala/com/wavesplatform/api/http/TransactionJsonSerializer.scala new file mode 100644 index 00000000000..cc8144bd59f --- /dev/null +++ b/node/src/main/scala/com/wavesplatform/api/http/TransactionJsonSerializer.scala @@ -0,0 +1,544 @@ +package com.wavesplatform.api.http + +import com.fasterxml.jackson.core.JsonGenerator +import com.fasterxml.jackson.databind.{JsonSerializer, SerializerProvider} +import com.wavesplatform.account.{Address, AddressOrAlias} +import com.wavesplatform.api.common.{CommonTransactionsApi, TransactionMeta} +import com.wavesplatform.api.http.StreamSerializerUtils.* +import com.wavesplatform.api.http.TransactionJsonSerializer.* +import com.wavesplatform.api.http.TransactionsApiRoute.{ApplicationStatus, LeaseStatus, TxMetaEnriched} +import com.wavesplatform.common.state.ByteStr +import com.wavesplatform.database.protobuf.EthereumTransactionMeta +import com.wavesplatform.database.protobuf.EthereumTransactionMeta.Payload +import com.wavesplatform.features.BlockchainFeatures +import com.wavesplatform.lang.v1.compiler.Terms +import com.wavesplatform.lang.v1.compiler.Terms.{ + ARR, + CONST_BOOLEAN, + CONST_BYTESTR, + CONST_LONG, + CONST_STRING, + CaseObj, + EVALUATED, + EXPR, + FAIL, + FUNCTION_CALL +} +import com.wavesplatform.lang.v1.serialization.SerdeV1 +import com.wavesplatform.protobuf.transaction.PBAmounts +import com.wavesplatform.state.InvokeScriptResult.{ + AttachedPayment, + Burn, + Call, + ErrorMessage, + Invocation, + Issue, + Lease, + LeaseCancel, + Reissue, + SponsorFee +} +import com.wavesplatform.state.{Blockchain, DataEntry, InvokeScriptResult, TxMeta} +import com.wavesplatform.state.reader.LeaseDetails +import com.wavesplatform.transaction.Asset.{IssuedAsset, Waves} +import com.wavesplatform.transaction.{Asset, PBSince, Transaction} +import com.wavesplatform.transaction.lease.{LeaseCancelTransaction, LeaseTransaction} +import com.wavesplatform.transaction.serialization.impl.InvokeScriptTxSerializer +import com.wavesplatform.transaction.smart.InvokeScriptTransaction +import com.wavesplatform.transaction.smart.InvokeScriptTransaction.Payment +import com.wavesplatform.transaction.transfer.MassTransferTransaction +import com.wavesplatform.utils.EthEncoding +import play.api.libs.json.{JsArray, JsBoolean, JsNumber, JsObject, JsString, JsValue, Json, JsonConfiguration, OWrites, OptionHandlers} + +final case class TransactionJsonSerializer(blockchain: Blockchain, commonApi: CommonTransactionsApi) { + + val assetSerializer: JsonSerializer[Asset] = + (value: Asset, gen: JsonGenerator, serializers: SerializerProvider) => { + value match { + case Waves => gen.writeNull() + case IssuedAsset(id) => gen.writeString(id.toString) + } + } + + def evaluatedSerializer(numbersAsString: Boolean): JsonSerializer[EVALUATED] = + (value: EVALUATED, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + value match { + case CONST_LONG(num) => + gen.writeStringField("type", "Int") + gen.writeNumberField("value", num, numbersAsString) + case CONST_BYTESTR(bs) => + gen.writeStringField("type", "ByteVector") + gen.writeStringField("value", bs.toString) + case CONST_STRING(str) => + gen.writeStringField("type", "String") + gen.writeStringField("value", str) + case CONST_BOOLEAN(b) => + gen.writeStringField("type", "Boolean") + gen.writeBooleanField("value", b) + case CaseObj(caseType, fields) => + gen.writeStringField("type", caseType.name) + gen.writeValueField("value") { gen => + gen.writeStartObject() + fields.foreach { case (key, value) => + gen.writeValueField(key, value)(evaluatedSerializer(numbersAsString), serializers) + } + gen.writeEndObject() + } + case ARR(xs) => + gen.writeStringField("type", "Array") + gen.writeArrayField("value", xs)(evaluatedSerializer(numbersAsString), serializers) + case FAIL(reason) => + gen.writeStringField("error", reason) + case _ => + } + gen.writeEndObject() + } + + def funcCallSerializer(numbersAsString: Boolean): JsonSerializer[FUNCTION_CALL] = new JsonSerializer[FUNCTION_CALL] { + override def serialize(funcCall: FUNCTION_CALL, gen: JsonGenerator, serializers: SerializerProvider): Unit = { + gen.writeStartObject() + gen.writeStringField("function", funcCall.function.funcName) + gen.writeArrayField("args") { out => + funcCall.args.foreach { + case Terms.ARR(elements) => + gen.writeStartObject() + gen.writeStringField("type", "list") + out.writeArrayField("value")(out => elements.foreach(e => writeSingleArg(e, out))) + gen.writeEndObject() + case other => writeSingleArg(other, out) + } + } + gen.writeEndObject() + } + + def writeSingleArg(arg: EXPR, gen: JsonGenerator): Unit = { + gen.writeStartObject() + arg match { + case CONST_LONG(num) => + gen.writeStringField("type", "integer") + gen.writeNumberField("value", num, numbersAsString) + case CONST_BOOLEAN(bool) => + gen.writeStringField("type", "boolean") + gen.writeBooleanField("value", bool) + case CONST_BYTESTR(bytes) => + gen.writeStringField("type", "binary") + gen.writeStringField("value", bytes.base64) + case CONST_STRING(str) => + gen.writeStringField("type", "string") + gen.writeStringField("value", str) + case ARR(_) => + gen.writeStringField("type", "list") + gen.writeStringField("value", "unsupported") + case arg => throw new NotImplementedError(s"Not supported: $arg") + } + gen.writeEndObject() + } + } + + val leaseStatusSerializer: JsonSerializer[LeaseStatus] = + (status: LeaseStatus, gen: JsonGenerator, serializers: SerializerProvider) => { + if (status == LeaseStatus.active) gen.writeString("active") else gen.writeString("canceled") + } + + def leaseRefSerializer(numbersAsString: Boolean): JsonSerializer[LeaseRef] = + (l: LeaseRef, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeStringField("id", l.id.toString) + l.originTransactionId.fold(gen.writeNullField("originTransactionId"))(txId => gen.writeStringField("originTransactionId", txId.toString)) + l.sender.fold(gen.writeNullField("sender"))(sender => gen.writeStringField("sender", sender.toString)) + l.recipient.fold(gen.writeNullField("recipient"))(recipient => gen.writeStringField("recipient", recipient.toString)) + l.amount.fold(gen.writeNullField("amount"))(amount => gen.writeNumberField("amount", amount, numbersAsString)) + l.height.fold(gen.writeNullField("height"))(height => gen.writeNumberField("height", height, numbersAsString)) + gen.writeStringField("status", if (l.status == LeaseStatus.active) "active" else "canceled") + l.cancelHeight.fold(gen.writeNullField("cancelHeight"))(ch => gen.writeNumberField("cancelHeight", ch, numbersAsString)) + l.cancelTransactionId.fold(gen.writeNullField("cancelTransactionId"))(cti => gen.writeStringField("cancelTransactionId", cti.toString)) + gen.writeEndObject() + } + + def leaseSerializer(numbersAsString: Boolean): JsonSerializer[Lease] = + (l: Lease, gen: JsonGenerator, serializers: SerializerProvider) => { + leaseRefSerializer(numbersAsString).serialize(leaseIdToLeaseRef(l.id, Some(l.recipient), Some(l.amount)), gen, serializers) + } + + def leaseCancelSerializer(numbersAsString: Boolean): JsonSerializer[LeaseCancel] = + (lc: LeaseCancel, gen: JsonGenerator, serializers: SerializerProvider) => { + leaseRefSerializer(numbersAsString).serialize(leaseIdToLeaseRef(lc.id), gen, serializers) + } + + def paymentSerializer(numbersAsString: Boolean): JsonSerializer[Payment] = + (p: Payment, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeNumberField("amount", p.amount, numbersAsString) + gen.writeValueField("assetId")(assetSerializer.serialize(p.assetId, _, serializers)) + gen.writeEndObject() + } + + def attachedPaymentSerializer(numbersAsString: Boolean): JsonSerializer[AttachedPayment] = + (p: AttachedPayment, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeValueField("assetId")(assetSerializer.serialize(p.assetId, _, serializers)) + gen.writeNumberField("amount", p.amount, numbersAsString) + gen.writeEndObject() + } + + def isrPaymentSerializer(numbersAsString: Boolean): JsonSerializer[InvokeScriptResult.Payment] = + (p: InvokeScriptResult.Payment, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeStringField("address", p.address.toString) + gen.writeValueField("asset")(assetSerializer.serialize(p.asset, _, serializers)) + gen.writeNumberField("amount", p.amount, numbersAsString) + gen.writeEndObject() + } + + def issueSerializer(numbersAsString: Boolean): JsonSerializer[Issue] = + (issue: Issue, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeStringField("id", issue.id.toString) + issue.compiledScript.foreach(sc => gen.writeStringField("compiledScript", sc.toString)) + gen.writeNumberField("decimals", issue.decimals, numbersAsString) + gen.writeStringField("description", issue.description) + gen.writeBooleanField("isReissuable", issue.isReissuable) + gen.writeStringField("name", issue.name) + gen.writeNumberField("quantity", issue.quantity, numbersAsString) + gen.writeNumberField("nonce", issue.nonce, numbersAsString) + gen.writeEndObject() + } + + def reissueSerializer(numbersAsString: Boolean): JsonSerializer[Reissue] = + (r: Reissue, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeStringField("assetId", r.assetId.toString) + gen.writeBooleanField("isReissuable", r.isReissuable) + gen.writeNumberField("quantity", r.quantity, numbersAsString) + gen.writeEndObject() + } + + def burnSerializer(numbersAsString: Boolean): JsonSerializer[Burn] = + (b: Burn, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeStringField("assetId", b.assetId.toString) + gen.writeNumberField("quantity", b.quantity, numbersAsString) + gen.writeEndObject() + } + + def sponsorFeeSerializer(numbersAsString: Boolean): JsonSerializer[SponsorFee] = + (s: SponsorFee, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeStringField("assetId", s.assetId.toString) + s.minSponsoredAssetFee.foreach(fee => gen.writeNumberField("minSponsoredAssetFee", fee, numbersAsString)) + gen.writeEndObject() + } + + def callSerializer(numbersAsString: Boolean): JsonSerializer[Call] = + (c: Call, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeStringField("function", c.function) + gen.writeArrayField("args", c.args)(evaluatedSerializer(numbersAsString), serializers) + gen.writeEndObject() + } + + def invocationSerializer(numbersAsString: Boolean): JsonSerializer[Invocation] = + (inv: Invocation, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeStringField("dApp", inv.dApp.toString) + gen.writeValueField("call")(callSerializer(numbersAsString).serialize(inv.call, _, serializers)) + gen.writeArrayField("payments", inv.payments)(attachedPaymentSerializer(numbersAsString), serializers) + gen.writeValueField("stateChanges")(invokeScriptResultSerializer(numbersAsString).serialize(inv.stateChanges, _, serializers)) + gen.writeEndObject() + } + + val errorMessageSerializer: JsonSerializer[ErrorMessage] = + (err: ErrorMessage, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeNumberField("code", err.code, false) + gen.writeStringField("text", err.text) + gen.writeEndObject() + } + + def invokeScriptResultSerializer(numbersAsString: Boolean): JsonSerializer[InvokeScriptResult] = + (isr: InvokeScriptResult, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeArrayField("data", isr.data)(DataEntry.dataEntrySerializer(numbersAsString), serializers) + gen.writeArrayField("transfers", isr.transfers)(isrPaymentSerializer(numbersAsString), serializers) + gen.writeArrayField("issues", isr.issues)(issueSerializer(numbersAsString), serializers) + gen.writeArrayField("reissues", isr.reissues)(reissueSerializer(numbersAsString), serializers) + gen.writeArrayField("burns", isr.burns)(burnSerializer(numbersAsString), serializers) + gen.writeArrayField("sponsorFees", isr.sponsorFees)(sponsorFeeSerializer(numbersAsString), serializers) + gen.writeArrayField("leases", isr.leases)(leaseSerializer(numbersAsString), serializers) + gen.writeArrayField("leaseCancels", isr.leaseCancels)(leaseCancelSerializer(numbersAsString), serializers) + gen.writeArrayField("invokes", isr.invokes)(invocationSerializer(numbersAsString), serializers) + isr.error.foreach(err => gen.writeValueField("error")(errorMessageSerializer.serialize(err, _, serializers))) + gen.writeEndObject() + } + + def txMetaJsonSerializer(address: Address, isBlockV5: Int => Boolean, numbersAsString: Boolean): JsonSerializer[TxMetaEnriched] = + (txMeta: TxMetaEnriched, gen: JsonGenerator, serializers: SerializerProvider) => { + txMeta.meta match { + case TransactionMeta.Invoke(height, tx: InvokeScriptTransaction, succeeded, spentComplexity, invokeScriptResult) => + gen.writeStartObject() + gen.writeNumberField("type", tx.tpe.id, numbersAsString) + gen.writeStringField("id", tx.id().toString) + gen.writeNumberField("fee", tx.assetFee._2, numbersAsString) + tx.assetFee._1.maybeBase58Repr.foreach(gen.writeStringField("feeAssetId", _)) + gen.writeNumberField("timestamp", tx.timestamp, numbersAsString) + gen.writeNumberField("version", tx.version, numbersAsString) + if (tx.asInstanceOf[PBSince].isProtobufVersion) gen.writeNumberField("chainId", tx.chainId, numbersAsString) + gen.writeStringField("sender", tx.sender.toAddress(tx.chainId).toString) + gen.writeStringField("senderPublicKey", tx.sender.toString) + gen.writeArrayField("proofs")(gen => tx.proofs.proofs.foreach(p => gen.writeString(p.toString))) + gen.writeStringField("dApp", tx.dApp.toString) + gen.writeArrayField("payment", tx.payments)(paymentSerializer(numbersAsString), serializers) + gen.writeValueField("call")(funcCallSerializer(numbersAsString).serialize(tx.funcCall, _, serializers)) + gen.writeNumberField("height", height.toInt, numbersAsString) + val appStatus = + if (isBlockV5(height)) + if (succeeded) Some(ApplicationStatus.Succeeded) else Some(ApplicationStatus.ScriptExecutionFailed) + else + None + appStatus.foreach(s => gen.writeStringField("applicationStatus", s)) + gen.writeNumberField("spentComplexity", spentComplexity, numbersAsString) + invokeScriptResult.fold(gen.writeNullField("stateChanges"))(isr => + gen.writeValueField("stateChanges")(invokeScriptResultSerializer(numbersAsString).serialize(isr, _, serializers)) + ) + gen.writeEndObject() + case TransactionMeta.Ethereum(height, tx, succeeded, spentComplexity, Some(EthereumTransactionMeta(Payload.Invocation(i), _)), isr) => + val functionCallEi = SerdeV1.deserializeFunctionCall(i.functionCall.toByteArray).toOption + val payments = i.payments.map(p => InvokeScriptTransaction.Payment(p.amount, PBAmounts.toVanillaAssetId(p.assetId))) + + gen.writeStartObject() + gen.writeStringField("id", tx.id().toString) + gen.writeNumberField("fee", tx.assetFee._2, numbersAsString) + tx.assetFee._1.maybeBase58Repr.foreach(gen.writeStringField("feeAssetId", _)) + gen.writeNumberField("timestamp", tx.timestamp, numbersAsString) + gen.writeNumberField("version", tx.version, numbersAsString) + if (tx.isProtobufVersion) gen.writeNumberField("chainId", tx.chainId, numbersAsString) + gen.writeStringField("bytes", EthEncoding.toHexString(tx.bytes())) + gen.writeStringField("sender", tx.senderAddress().toString) + gen.writeStringField("senderPublicKey", tx.signerPublicKey().toString) + gen.writeNumberField("height", height.toInt, numbersAsString) + val appStatus = + if (isBlockV5(height)) + if (succeeded) Some(ApplicationStatus.Succeeded) else Some(ApplicationStatus.ScriptExecutionFailed) + else + None + appStatus.foreach(s => gen.writeStringField("applicationStatus", s)) + gen.writeNumberField("spentComplexity", spentComplexity, numbersAsString) + gen.writeStringField("type", "invocation") + gen.writeStringField("dApp", Address(EthEncoding.toBytes(tx.underlying.getTo)).toString) + functionCallEi.fold(gen.writeNullField("call"))(fc => + gen.writeValueField("call")(funcCallSerializer(numbersAsString).serialize(fc, _, serializers)) + ) + gen.writeArrayField("payment", payments)(paymentSerializer(numbersAsString), serializers) + isr.fold(gen.writeNullField("stateChanges"))(isr => + gen.writeValueField("stateChanges")(invokeScriptResultSerializer(numbersAsString).serialize(isr, _, serializers)) + ) + gen.writeEndObject() + case meta @ TransactionMeta.Default(height, mtt: MassTransferTransaction, succeeded, spentComplexity) if mtt.sender.toAddress != address => + /** Produces compact representation for large transactions by stripping unnecessary data. Currently implemented for MassTransfer transaction + * only. + */ + jsObjectSerializer(numbersAsString).serialize( + mtt.compactJson(address, txMeta.aliases.getOrElse(Set.empty)) ++ transactionMetaJson(meta), + gen, + serializers + ) + case other => + jsObjectSerializer(numbersAsString).serialize(other.transaction.json() ++ transactionMetaJson(other), gen, serializers) + } + } + + def jsObjectSerializer(numbersAsString: Boolean): JsonSerializer[JsObject] = new JsonSerializer[JsObject] { + override def serialize(jsObj: JsObject, gen: JsonGenerator, serializers: SerializerProvider): Unit = { + gen.writeStartObject() + jsObj.fields.foreach { case (key, value) => encodeField(key, value, gen, serializers) } + gen.writeEndObject() + } + + private def encodeField(key: String, jsValue: JsValue, gen: JsonGenerator, serializers: SerializerProvider): Unit = { + jsValue match { + case n: JsNumber => + gen.writeNumberField(key, n.value, numbersAsString) + case b: JsBoolean => + gen.writeBooleanField(key, b.value) + case s: JsString => + gen.writeStringField(key, s.value) + case a: JsArray => + gen.writeArrayField(key)(out => a.value.foreach(encodeArrayElem(_, out, serializers))) + case o: JsObject => + gen.writeValueField(key)(serialize(o, _, serializers)) + case _ => + gen.writeNullField(key) + } + } + + private def encodeArrayElem(jsValue: JsValue, gen: JsonGenerator, serializers: SerializerProvider): Unit = { + jsValue match { + case n: JsNumber => + gen.writeNumber(n.value.bigDecimal) + case b: JsBoolean => + gen.writeBoolean(b.value) + case s: JsString => + gen.writeString(s.value) + case a: JsArray => + gen.writeStartArray() + a.value.foreach(encodeArrayElem(_, gen, serializers)) + gen.writeEndArray() + case o: JsObject => + serialize(o, gen, serializers) + case _ => + gen.writeNull() + } + } + } + + def transactionMetaJson(meta: TransactionMeta): JsObject = { + val specificInfo = meta.transaction match { + case lease: LeaseTransaction => + import com.wavesplatform.api.http.TransactionsApiRoute.LeaseStatus.* + Json.obj("status" -> (if (blockchain.leaseDetails(lease.id()).exists(_.isActive)) active else canceled)) + + case leaseCancel: LeaseCancelTransaction => + Json.obj("lease" -> leaseIdToLeaseRef(leaseCancel.leaseId)) + + case _ => JsObject.empty + } + + val stateChanges = meta match { + case i: TransactionMeta.Invoke => + Json.obj("stateChanges" -> i.invokeScriptResult) + + case e: TransactionMeta.Ethereum => + val payloadJson: JsObject = e.meta + .map(_.payload) + .collect { + case Payload.Invocation(i) => + val functionCallEi = SerdeV1.deserializeFunctionCall(i.functionCall.toByteArray).map(InvokeScriptTxSerializer.functionCallToJson) + val payments = i.payments.map(p => InvokeScriptTransaction.Payment(p.amount, PBAmounts.toVanillaAssetId(p.assetId))) + Json.obj( + "type" -> "invocation", + "dApp" -> Address(EthEncoding.toBytes(e.transaction.underlying.getTo)), + "call" -> functionCallEi.toOption, + "payment" -> payments, + "stateChanges" -> e.invokeScriptResult + ) + + case Payload.Transfer(t) => + val (asset, amount) = PBAmounts.toAssetAndAmount(t.getAmount) + Json.obj( + "type" -> "transfer", + "recipient" -> Address(t.publicKeyHash.toByteArray), + "asset" -> asset, + "amount" -> amount + ) + } + .getOrElse(JsObject.empty) + Json.obj("payload" -> payloadJson) + + case _ => JsObject.empty + } + + Seq( + TransactionJsonSerializer.height(meta.height), + metaJson(TxMeta(meta.height, meta.succeeded, meta.spentComplexity)), + stateChanges, + specificInfo + ).reduce(_ ++ _) + } + + def transactionWithMetaJson(meta: TransactionMeta): JsObject = + meta.transaction.json() ++ transactionMetaJson(meta) + + def unconfirmedTxExtendedJson(tx: Transaction): JsObject = tx match { + case leaseCancel: LeaseCancelTransaction => + leaseCancel.json() ++ Json.obj("lease" -> leaseIdToLeaseRef(leaseCancel.leaseId)) + + case t => t.json() + } + + def metaJson(m: TxMeta): JsObject = + TransactionJsonSerializer.applicationStatus(isBlockV5(m.height), m.succeeded) ++ Json.obj("spentComplexity" -> m.spentComplexity) + + private[this] def isBlockV5(height: Int): Boolean = blockchain.isFeatureActivated(BlockchainFeatures.BlockV5, height) + + // Extended lease format. Overrides default + private[this] def leaseIdToLeaseRef( + leaseId: ByteStr, + recipientParamOpt: Option[AddressOrAlias] = None, + amountOpt: Option[Long] = None + ): LeaseRef = { + val detailsOpt = blockchain.leaseDetails(leaseId) + val txMetaOpt = detailsOpt.flatMap(d => blockchain.transactionMeta(d.sourceId)) + val recipientOpt = recipientParamOpt.orElse(detailsOpt.map(_.recipient)) + val resolvedRecipientOpt = recipientOpt.flatMap(r => blockchain.resolveAlias(r).toOption) + + val statusOpt = detailsOpt.map(_.status) + val status = LeaseStatus(statusOpt.contains(LeaseDetails.Status.Active)) + val statusDataOpt = statusOpt.map { + case LeaseDetails.Status.Active => (None, None) + case LeaseDetails.Status.Cancelled(height, txId) => (Some(height), txId) + case LeaseDetails.Status.Expired(height) => (Some(height), None) + } + + LeaseRef( + leaseId, + detailsOpt.map(_.sourceId), + detailsOpt.map(_.sender.toAddress), + resolvedRecipientOpt, + amountOpt orElse detailsOpt.map(_.amount), + txMetaOpt.map(_.height), + status, + statusDataOpt.flatMap(_._1), + statusDataOpt.flatMap(_._2) + ) + } + + private[http] implicit val leaseWrites: OWrites[InvokeScriptResult.Lease] = + LeaseRef.jsonWrites.contramap((l: InvokeScriptResult.Lease) => leaseIdToLeaseRef(l.id, Some(l.recipient), Some(l.amount))) + + private[http] implicit val leaseCancelWrites: OWrites[InvokeScriptResult.LeaseCancel] = + LeaseRef.jsonWrites.contramap((l: InvokeScriptResult.LeaseCancel) => leaseIdToLeaseRef(l.id)) + + // To override nested InvokeScriptResult writes + private[http] implicit val invocationWrites: OWrites[InvokeScriptResult.Invocation] = (i: InvokeScriptResult.Invocation) => + Json.obj( + "dApp" -> i.dApp, + "call" -> i.call, + "payment" -> i.payments, + "stateChanges" -> invokeScriptResultWrites.writes(i.stateChanges) + ) + + private[http] implicit val invokeScriptResultWrites: OWrites[InvokeScriptResult] = { + import InvokeScriptResult.{issueFormat, reissueFormat, burnFormat, sponsorFeeFormat} + Json.writes[InvokeScriptResult] + } +} + +object TransactionJsonSerializer { + def applicationStatus(isBlockV5: Boolean, succeeded: Boolean): JsObject = + if (isBlockV5) + Json.obj("applicationStatus" -> (if (succeeded) ApplicationStatus.Succeeded else ApplicationStatus.ScriptExecutionFailed)) + else + JsObject.empty + + def height(height: Int): JsObject = + Json.obj("height" -> height) + + final case class LeaseRef( + id: ByteStr, + originTransactionId: Option[ByteStr], + sender: Option[Address], + recipient: Option[Address], + amount: Option[Long], + height: Option[Int], + status: LeaseStatus, + cancelHeight: Option[Int], + cancelTransactionId: Option[ByteStr] + ) + + object LeaseRef { + import com.wavesplatform.utils.byteStrFormat + implicit val config = JsonConfiguration(optionHandlers = OptionHandlers.WritesNull) + implicit val jsonWrites: OWrites[LeaseRef] = Json.writes[LeaseRef] + } +} diff --git a/node/src/main/scala/com/wavesplatform/api/http/TransactionsApiRoute.scala b/node/src/main/scala/com/wavesplatform/api/http/TransactionsApiRoute.scala index 7caf0242355..531ff7d51e0 100644 --- a/node/src/main/scala/com/wavesplatform/api/http/TransactionsApiRoute.scala +++ b/node/src/main/scala/com/wavesplatform/api/http/TransactionsApiRoute.scala @@ -7,35 +7,32 @@ import cats.instances.list.* import cats.syntax.alternative.* import cats.syntax.either.* import cats.syntax.traverse.* -import com.wavesplatform.account.{Address, AddressOrAlias, Alias} +import com.wavesplatform.account.{Address, Alias} import com.wavesplatform.api.common.{CommonTransactionsApi, TransactionMeta} import com.wavesplatform.api.http.ApiError.* import com.wavesplatform.block.Block import com.wavesplatform.block.Block.TransactionProof import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.Base58 -import com.wavesplatform.database.protobuf.EthereumTransactionMeta.Payload import com.wavesplatform.features.BlockchainFeatures -import com.wavesplatform.lang.v1.serialization.SerdeV1 import com.wavesplatform.network.TransactionPublisher -import com.wavesplatform.protobuf.transaction.PBAmounts import com.wavesplatform.settings.RestAPISettings -import com.wavesplatform.state.reader.LeaseDetails -import com.wavesplatform.state.{Blockchain, InvokeScriptResult, TxMeta} +import com.wavesplatform.state.reader.CompositeBlockchain +import com.wavesplatform.state.Blockchain import com.wavesplatform.transaction.* -import com.wavesplatform.transaction.lease.* -import com.wavesplatform.transaction.serialization.impl.InvokeScriptTxSerializer -import com.wavesplatform.transaction.smart.InvokeScriptTransaction -import com.wavesplatform.utils.{EthEncoding, Time} +import com.wavesplatform.transaction.transfer.MassTransferTransaction +import com.wavesplatform.utils.Time import com.wavesplatform.wallet.Wallet import monix.eval.Task import play.api.libs.json.* +import monix.reactive.Observable case class TransactionsApiRoute( settings: RestAPISettings, commonApi: CommonTransactionsApi, wallet: Wallet, blockchain: Blockchain, + compositeBlockchain: () => CompositeBlockchain, utxPoolSize: () => Int, transactionPublisher: TransactionPublisher, time: Time, @@ -55,13 +52,16 @@ case class TransactionsApiRoute( def addressWithLimit: Route = { (get & path("address" / AddrSegment / "limit" / IntNumber) & parameter("after".?)) { (address, limit, maybeAfter) => - routeTimeout.executeToFuture { - val after = - maybeAfter.map(s => ByteStr.decodeBase58(s).getOrElse(throw ApiException(CustomValidationError(s"Unable to decode transaction id $s")))) - if (limit > settings.transactionsByAddressLimit) throw ApiException(TooBigArrayAllocation) + val after = + maybeAfter.map(s => ByteStr.decodeBase58(s).getOrElse(throw ApiException(CustomValidationError(s"Unable to decode transaction id $s")))) + if (limit > settings.transactionsByAddressLimit) throw ApiException(TooBigArrayAllocation) - transactionsByAddress(address, limit, after).map(txs => List(txs)) // Double list - [ [tx1, tx2, ...] ] - } + val blockV5Activation = blockchain.activatedFeatures.get(BlockchainFeatures.BlockV5.id) + val improvedSerializer = serializer.copy(blockchain = compositeBlockchain()) + + routeTimeout.executeFromObservable { + transactionsByAddress(address, limit, after) // Double list - [ [tx1, tx2, ...] ] + }(jacksonStreamMarshaller("[[", ",", "]]")(improvedSerializer.txMetaJsonSerializer(address, h => blockV5Activation.exists(v5h => v5h <= h), _))) } } @@ -196,7 +196,7 @@ case class TransactionsApiRoute( case (errors, _) => InvalidIds(errors) } - def transactionsByAddress(address: Address, limitParam: Int, maybeAfter: Option[ByteStr]): Task[List[JsObject]] = { + def transactionsByAddress(address: Address, limitParam: Int, maybeAfter: Option[ByteStr]): Observable[TxMetaEnriched] = { val aliasesOfAddress: Task[Set[Alias]] = commonApi .aliasesOfAddress(address) @@ -205,31 +205,28 @@ case class TransactionsApiRoute( .map(aliases => aliases.toSet) .memoize - /** Produces compact representation for large transactions by stripping unnecessary data. Currently implemented for MassTransfer transaction only. - */ - def compactJson(address: Address, meta: TransactionMeta): Task[JsObject] = { - import com.wavesplatform.transaction.transfer.* + def txMetaEnriched(address: Address, meta: TransactionMeta): Task[TxMetaEnriched] = meta.transaction match { case mtt: MassTransferTransaction if mtt.sender.toAddress != address => - (if ( - mtt.transfers.exists(pt => - pt.address match { - case address: Address => false - case a: Alias => true - } - ) - ) aliasesOfAddress.map(aliases => mtt.compactJson(address, aliases)) - else Task.now(mtt.compactJson(address, Set.empty))).map(_ ++ serializer.transactionMetaJson(meta)) - - case _ => Task.now(serializer.transactionWithMetaJson(meta)) + val aliasExists = mtt.transfers.exists(pt => + pt.address match { + case _: Address => false + case _: Alias => true + } + ) + + if (aliasExists) { + aliasesOfAddress.map(aliases => TxMetaEnriched(meta, Some(aliases))) + } else { + Task.now(TxMetaEnriched(meta)) + } + case _ => Task.now(TxMetaEnriched(meta)) } - } commonApi .transactionsByAddress(address, None, Set.empty, maybeAfter) .take(limitParam) - .mapEval(compactJson(address, _)) - .toListL + .mapEval(txMetaEnriched(address, _)) } } @@ -272,155 +269,5 @@ object TransactionsApiRoute { } yield TransactionProof(id, transactionIndex, merkleProof) } - private[http] object TransactionJsonSerializer { - def applicationStatus(isBlockV5: Boolean, succeeded: Boolean): JsObject = - if (isBlockV5) - Json.obj("applicationStatus" -> (if (succeeded) ApplicationStatus.Succeeded else ApplicationStatus.ScriptExecutionFailed)) - else - JsObject.empty - - def height(height: Int): JsObject = - Json.obj("height" -> height) - } - - private[http] final case class TransactionJsonSerializer(blockchain: Blockchain, commonApi: CommonTransactionsApi) { - def transactionMetaJson(meta: TransactionMeta): JsObject = { - val specificInfo = meta.transaction match { - case lease: LeaseTransaction => - import com.wavesplatform.api.http.TransactionsApiRoute.LeaseStatus.* - Json.obj("status" -> (if (blockchain.leaseDetails(lease.id()).exists(_.isActive)) active else canceled)) - - case leaseCancel: LeaseCancelTransaction => - Json.obj("lease" -> leaseIdToLeaseRef(leaseCancel.leaseId)) - - case _ => JsObject.empty - } - - val stateChanges = meta match { - case i: TransactionMeta.Invoke => - Json.obj("stateChanges" -> i.invokeScriptResult) - - case e: TransactionMeta.Ethereum => - val payloadJson: JsObject = e.meta - .map(_.payload) - .collect { - case Payload.Invocation(i) => - val functionCallEi = SerdeV1.deserializeFunctionCall(i.functionCall.toByteArray).map(InvokeScriptTxSerializer.functionCallToJson) - val payments = i.payments.map(p => InvokeScriptTransaction.Payment(p.amount, PBAmounts.toVanillaAssetId(p.assetId))) - Json.obj( - "type" -> "invocation", - "dApp" -> Address(EthEncoding.toBytes(e.transaction.underlying.getTo)), - "call" -> functionCallEi.toOption, - "payment" -> payments, - "stateChanges" -> e.invokeScriptResult - ) - - case Payload.Transfer(t) => - val (asset, amount) = PBAmounts.toAssetAndAmount(t.getAmount) - Json.obj( - "type" -> "transfer", - "recipient" -> Address(t.publicKeyHash.toByteArray), - "asset" -> asset, - "amount" -> amount - ) - } - .getOrElse(JsObject.empty) - Json.obj("payload" -> payloadJson) - - case _ => JsObject.empty - } - - Seq( - TransactionJsonSerializer.height(meta.height), - metaJson(TxMeta(meta.height, meta.succeeded, meta.spentComplexity)), - stateChanges, - specificInfo - ).reduce(_ ++ _) - } - - def transactionWithMetaJson(meta: TransactionMeta): JsObject = { - meta.transaction.json() ++ transactionMetaJson(meta) - } - - def unconfirmedTxExtendedJson(tx: Transaction): JsObject = tx match { - case leaseCancel: LeaseCancelTransaction => - leaseCancel.json() ++ Json.obj("lease" -> leaseIdToLeaseRef(leaseCancel.leaseId)) - - case t => t.json() - } - - def metaJson(m: TxMeta): JsObject = - TransactionJsonSerializer.applicationStatus(isBlockV5(m.height), m.succeeded) ++ Json.obj("spentComplexity" -> m.spentComplexity) - - private[this] def isBlockV5(height: Int): Boolean = blockchain.isFeatureActivated(BlockchainFeatures.BlockV5, height) - - // Extended lease format. Overrides default - private[this] def leaseIdToLeaseRef( - leaseId: ByteStr, - recipientParamOpt: Option[AddressOrAlias] = None, - amountOpt: Option[Long] = None - ): LeaseRef = { - val detailsOpt = blockchain.leaseDetails(leaseId) - val txMetaOpt = detailsOpt.flatMap(d => blockchain.transactionMeta(d.sourceId)) - val recipientOpt = recipientParamOpt.orElse(detailsOpt.map(_.recipient)) - val resolvedRecipientOpt = recipientOpt.flatMap(r => blockchain.resolveAlias(r).toOption) - - val statusOpt = detailsOpt.map(_.status) - val status = LeaseStatus(statusOpt.contains(LeaseDetails.Status.Active)) - val statusDataOpt = statusOpt.map { - case LeaseDetails.Status.Active => (None, None) - case LeaseDetails.Status.Cancelled(height, txId) => (Some(height), txId) - case LeaseDetails.Status.Expired(height) => (Some(height), None) - } - - LeaseRef( - leaseId, - detailsOpt.map(_.sourceId), - detailsOpt.map(_.sender.toAddress), - resolvedRecipientOpt, - amountOpt orElse detailsOpt.map(_.amount), - txMetaOpt.map(_.height), - status, - statusDataOpt.flatMap(_._1), - statusDataOpt.flatMap(_._2) - ) - } - - private[http] implicit val leaseWrites: OWrites[InvokeScriptResult.Lease] = - LeaseRef.jsonWrites.contramap((l: InvokeScriptResult.Lease) => leaseIdToLeaseRef(l.id, Some(l.recipient), Some(l.amount))) - - private[http] implicit val leaseCancelWrites: OWrites[InvokeScriptResult.LeaseCancel] = - LeaseRef.jsonWrites.contramap((l: InvokeScriptResult.LeaseCancel) => leaseIdToLeaseRef(l.id)) - - // To override nested InvokeScriptResult writes - private[http] implicit lazy val invocationWrites: OWrites[InvokeScriptResult.Invocation] = (i: InvokeScriptResult.Invocation) => - Json.obj( - "dApp" -> i.dApp, - "call" -> i.call, - "payment" -> i.payments, - "stateChanges" -> invokeScriptResultWrites.writes(i.stateChanges) - ) - - private[http] implicit lazy val invokeScriptResultWrites: OWrites[InvokeScriptResult] = { - import InvokeScriptResult.{issueFormat, reissueFormat, burnFormat, sponsorFeeFormat} - Json.writes[InvokeScriptResult] - } - } - - private[this] final case class LeaseRef( - id: ByteStr, - originTransactionId: Option[ByteStr], - sender: Option[Address], - recipient: Option[Address], - amount: Option[Long], - height: Option[Int], - status: LeaseStatus = LeaseStatus.active, - cancelHeight: Option[Int] = None, - cancelTransactionId: Option[ByteStr] = None - ) - private[this] object LeaseRef { - import com.wavesplatform.utils.byteStrFormat - implicit val config = JsonConfiguration(optionHandlers = OptionHandlers.WritesNull) - implicit val jsonWrites: OWrites[LeaseRef] = Json.writes[LeaseRef] - } + case class TxMetaEnriched(meta: TransactionMeta, aliases: Option[Set[Alias]] = None) } diff --git a/node/src/main/scala/com/wavesplatform/api/http/alias/AliasApiRoute.scala b/node/src/main/scala/com/wavesplatform/api/http/alias/AliasApiRoute.scala index 09eb90011cf..cd8a11a57fa 100644 --- a/node/src/main/scala/com/wavesplatform/api/http/alias/AliasApiRoute.scala +++ b/node/src/main/scala/com/wavesplatform/api/http/alias/AliasApiRoute.scala @@ -51,11 +51,10 @@ case class AliasApiRoute( private implicit val ess: JsonEntityStreamingSupport = EntityStreamingSupport.json() def aliasOfAddress: Route = (get & path("by-address" / AddrSegment)) { address => - routeTimeout.executeStreamed { + routeTimeout.executeFromObservable { commonApi .aliasesOfAddress(address) .map { case (_, tx) => JsString(tx.alias.toString) } - .toListL - }(identity) + } } } diff --git a/node/src/main/scala/com/wavesplatform/api/http/assets/AssetsApiRoute.scala b/node/src/main/scala/com/wavesplatform/api/http/assets/AssetsApiRoute.scala index 214a50c3d1d..553272d2ad9 100644 --- a/node/src/main/scala/com/wavesplatform/api/http/assets/AssetsApiRoute.scala +++ b/node/src/main/scala/com/wavesplatform/api/http/assets/AssetsApiRoute.scala @@ -11,29 +11,33 @@ import cats.instances.list.* import cats.syntax.alternative.* import cats.syntax.either.* import cats.syntax.traverse.* +import com.fasterxml.jackson.core.JsonGenerator +import com.fasterxml.jackson.databind.{JsonSerializer, SerializerProvider} import com.wavesplatform.account.Address import com.wavesplatform.api.common.{CommonAccountsApi, CommonAssetsApi} import com.wavesplatform.api.http.* import com.wavesplatform.api.http.ApiError.* -import com.wavesplatform.api.http.assets.AssetsApiRoute.DistributionParams +import com.wavesplatform.api.http.assets.AssetsApiRoute.{AssetDetails, AssetInfo, DistributionParams, assetDetailsSerializer} import com.wavesplatform.api.http.requests.* +import com.wavesplatform.api.http.StreamSerializerUtils.* import com.wavesplatform.common.state.ByteStr import com.wavesplatform.lang.ValidationError import com.wavesplatform.network.TransactionPublisher import com.wavesplatform.settings.RestAPISettings +import com.wavesplatform.state.reader.CompositeBlockchain import com.wavesplatform.state.{AssetDescription, AssetScriptInfo, Blockchain} import com.wavesplatform.transaction.Asset.IssuedAsset import com.wavesplatform.transaction.EthereumTransaction.Invocation +import com.wavesplatform.transaction.{EthereumTransaction, TransactionFactory, TxTimestamp, TxVersion} import com.wavesplatform.transaction.TxValidationError.GenericError import com.wavesplatform.transaction.assets.IssueTransaction import com.wavesplatform.transaction.assets.exchange.Order import com.wavesplatform.transaction.smart.{InvokeExpressionTransaction, InvokeScriptTransaction} -import com.wavesplatform.transaction.{EthereumTransaction, TransactionFactory} import com.wavesplatform.utils.Time import com.wavesplatform.wallet.Wallet import io.netty.util.concurrent.DefaultThreadFactory -import monix.eval.Task import monix.execution.Scheduler +import monix.reactive.Observable import play.api.libs.json.* import java.util.concurrent.* @@ -44,6 +48,7 @@ case class AssetsApiRoute( wallet: Wallet, transactionPublisher: TransactionPublisher, blockchain: Blockchain, + compositeBlockchain: () => CompositeBlockchain, time: Time, commonAccountApi: CommonAccountsApi, commonAssetsApi: CommonAssetsApi, @@ -146,46 +151,46 @@ case class AssetsApiRoute( case (errors, _) => InvalidIds(errors) } - def fullAssetInfoJson(asset: IssuedAsset): JsObject = commonAssetsApi.fullInfo(asset) match { - case Some(CommonAssetsApi.AssetInfo(assetInfo, issueTransaction, sponsorBalance)) => - Json.obj( - "assetId" -> asset, - "reissuable" -> assetInfo.reissuable, - "minSponsoredAssetFee" -> (assetInfo.sponsorship match { - case 0 => JsNull - case sponsorship => JsNumber(sponsorship) - }), - "sponsorBalance" -> sponsorBalance, - "quantity" -> JsNumber(BigDecimal(assetInfo.totalVolume)), - "issueTransaction" -> issueTransaction.map(_.json()), - "sequenceInBlock" -> assetInfo.sequenceInBlock - ) - - case None => - Json.obj("assetId" -> asset) - } + def getFullAssetInfo(balances: Seq[(IssuedAsset, Long)]): Seq[AssetInfo] = + balances.view + .zip(commonAssetsApi.fullInfos(balances.map(_._1))) + .map { case ((asset, balance), infoOpt) => + infoOpt match { + case Some(CommonAssetsApi.AssetInfo(assetInfo, issueTransaction, sponsorBalance)) => + AssetInfo.FullAssetInfo( + assetId = asset.id.toString, + reissuable = assetInfo.reissuable, + minSponsoredAssetFee = assetInfo.sponsorship match { + case 0 => None + case sponsorship => Some(sponsorship) + }, + sponsorBalance = sponsorBalance, + quantity = BigDecimal(assetInfo.totalVolume), + issueTransaction = issueTransaction, + balance = balance, + sequenceInBlock = assetInfo.sequenceInBlock + ) + case None => AssetInfo.AssetId(asset.id.toString) + } + } + .toSeq /** @param assets * Some(assets) for specific asset balances, None for a full portfolio */ def balances(address: Address, assets: Option[Seq[IssuedAsset]] = None): Route = { - implicit val jsonStreamingSupport: ToResponseMarshaller[Source[JsObject, NotUsed]] = - jsonStreamMarshaller(s"""{"address":"$address","balances":[""", ",", "]}") + implicit val jsonStreamingSupport: ToResponseMarshaller[Source[AssetInfo, NotUsed]] = + jacksonStreamMarshaller(s"""{"address":"$address","balances":[""", ",", "]}")(AssetsApiRoute.assetInfoSerializer) - routeTimeout.executeStreamed { - assets match { + routeTimeout.executeFromObservable( + (assets match { case Some(assets) => - Task { - assets.map(asset => asset -> blockchain.balance(address, asset)) - } + Observable.eval(assets.map(asset => asset -> blockchain.balance(address, asset))) case None => commonAccountApi .portfolio(address) - .toListL // FIXME: Strict loading because of segfault in leveldb - } - } { case (assetId, balance) => - fullAssetInfoJson(assetId) ++ Json.obj("balance" -> balance) - } + }).concatMapIterable(getFullAssetInfo) + ) } def balance(address: Address, assetId: IssuedAsset): Route = complete(balanceJson(address, assetId)) @@ -242,18 +247,20 @@ case class AssetsApiRoute( if (limit > settings.transactionsByAddressLimit) complete(TooBigArrayAllocation) else { import cats.syntax.either.* - implicit val jsonStreamingSupport: ToResponseMarshaller[Source[JsValue, NotUsed]] = jsonStreamMarshaller() + implicit val jsonStreamingSupport: ToResponseMarshaller[Source[AssetDetails, NotUsed]] = jacksonStreamMarshaller()(assetDetailsSerializer) + val compBlockchain = compositeBlockchain() routeTimeout.executeStreamed { commonAccountApi .nftList(address, after) + .concatMapIterable { a => + AssetsApiRoute + .getAssetDetails(compBlockchain)(a, full = true) + .valueOr(err => throw new IllegalArgumentException(err)) + } .take(limit) .toListL - } { case (assetId, assetDesc) => - AssetsApiRoute - .jsonDetails(blockchain)(assetId, assetDesc, full = true) - .valueOr(err => throw new IllegalArgumentException(err)) - } + }(identity) } } @@ -323,6 +330,56 @@ object AssetsApiRoute { } yield limit } + def getAssetDetails(blockchain: Blockchain)(assets: Seq[(IssuedAsset, AssetDescription)], full: Boolean): Either[String, Seq[AssetDetails]] = { + def getTimestamps(ids: Seq[ByteStr]): Either[String, Seq[TxTimestamp]] = { + blockchain.transactionInfos(ids).traverse { infoOpt => + for { + (_, tx) <- infoOpt + .filter { case (tm, _) => tm.succeeded } + .toRight("Failed to find issue/invokeScript/invokeExpression transaction by ID") + ts <- (tx match { + case tx: IssueTransaction => Some(tx.timestamp) + case tx: InvokeScriptTransaction => Some(tx.timestamp) + case tx: InvokeExpressionTransaction => Some(tx.timestamp) + case tx @ EthereumTransaction(_: Invocation, _, _, _) => Some(tx.timestamp) + case _ => None + }).toRight("No issue/invokeScript/invokeExpression transaction found with the given asset ID") + } yield ts + } + } + + getTimestamps(assets.map { case (_, description) => description.originTransactionId }).map { infos => + assets.zip(infos).map { case ((id, description), timestamp) => + AssetDetails( + assetId = id.id.toString, + issueHeight = description.issueHeight, + issueTimestamp = timestamp, + issuer = description.issuer.toAddress.toString, + issuerPublicKey = description.issuer.toString, + name = description.name.toStringUtf8, + description = description.description.toStringUtf8, + decimals = description.decimals, + reissuable = description.reissuable, + quantity = BigDecimal(description.totalVolume), + scripted = description.script.nonEmpty, + minSponsoredAssetFee = description.sponsorship match { + case 0 => None + case sponsorship => Some(sponsorship) + }, + originTransactionId = description.originTransactionId.toString, + sequenceInBlock = description.sequenceInBlock, + scriptDetails = description.script.filter(_ => full).map { case AssetScriptInfo(script, complexity) => + AssetScriptDetails( + scriptComplexity = BigDecimal(complexity), + script = script.bytes().base64, + scriptText = script.expr.toString // [WAIT] Script.decompile(script) + ) + } + ) + } + } + } + def jsonDetails(blockchain: Blockchain)(id: IssuedAsset, description: AssetDescription, full: Boolean): Either[String, JsObject] = { // (timestamp, height) def additionalInfo(id: ByteStr): Either[String, Long] = @@ -373,4 +430,121 @@ object AssetsApiRoute { } ) } + + case class AssetScriptDetails( + scriptComplexity: BigDecimal, + script: String, + scriptText: String + ) + + case class AssetDetails( + assetId: String, + issueHeight: Int, + issueTimestamp: Long, + issuer: String, + issuerPublicKey: String, + name: String, + description: String, + decimals: Int, + reissuable: Boolean, + quantity: BigDecimal, + scripted: Boolean, + minSponsoredAssetFee: Option[Long], + originTransactionId: String, + sequenceInBlock: Int, + scriptDetails: Option[AssetScriptDetails] + ) + + sealed trait AssetInfo + object AssetInfo { + case class FullAssetInfo( + assetId: String, + reissuable: Boolean, + minSponsoredAssetFee: Option[Long], + sponsorBalance: Option[Long], + quantity: BigDecimal, + issueTransaction: Option[IssueTransaction], + balance: Long, + sequenceInBlock: Int + ) extends AssetInfo + + case class AssetId(assetId: String) extends AssetInfo + } + + def assetScriptDetailsSerializer(numbersAsString: Boolean): JsonSerializer[AssetScriptDetails] = + (details: AssetScriptDetails, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeNumberField("scriptComplexity", details.scriptComplexity, numbersAsString) + gen.writeStringField("script", details.script) + gen.writeStringField("scriptText", details.scriptText) + gen.writeEndObject() + } + + def assetDetailsSerializer(numbersAsString: Boolean): JsonSerializer[AssetDetails] = + (details: AssetDetails, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeStringField("assetId", details.assetId) + gen.writeNumberField("issueHeight", details.issueHeight, numbersAsString) + gen.writeNumberField("issueTimestamp", details.issueTimestamp, numbersAsString) + gen.writeStringField("issuer", details.issuer) + gen.writeStringField("issuerPublicKey", details.issuerPublicKey) + gen.writeStringField("name", details.name) + gen.writeStringField("description", details.description) + gen.writeNumberField("decimals", details.decimals, numbersAsString) + gen.writeBooleanField("reissuable", details.reissuable) + gen.writeNumberField("quantity", details.quantity, numbersAsString) + gen.writeBooleanField("scripted", details.scripted) + details.minSponsoredAssetFee.foreach(fee => gen.writeNumberField("minSponsoredAssetFee", fee, numbersAsString)) + gen.writeStringField("originTransactionId", details.originTransactionId) + gen.writeNumberField("sequenceInBlock", details.sequenceInBlock, numbersAsString) + details.scriptDetails.foreach(sd => gen.writeValueField("scriptDetails", sd)(assetScriptDetailsSerializer(numbersAsString), serializers)) + gen.writeEndObject() + } + + def issueTxSerializer(numbersAsString: Boolean): JsonSerializer[IssueTransaction] = + (tx: IssueTransaction, gen: JsonGenerator, serializers: SerializerProvider) => { + gen.writeStartObject() + gen.writeNumberField("type", tx.tpe.id, numbersAsString) + gen.writeStringField("id", tx.id().toString) + gen.writeNumberField("fee", tx.assetFee._2, numbersAsString) + tx.assetFee._1.maybeBase58Repr.fold(gen.writeNullField("feeAssetId"))(gen.writeStringField("feeAssetId", _)) + gen.writeNumberField("timestamp", tx.timestamp, numbersAsString) + gen.writeNumberField("version", tx.version, numbersAsString) + if (tx.version >= TxVersion.V2) gen.writeNumberField("chainId", tx.chainId, numbersAsString) else gen.writeNullField("chainId") + gen.writeStringField("sender", tx.sender.toAddress(tx.chainId).toString) + gen.writeStringField("senderPublicKey", tx.sender.toString) + gen.writeArrayField("proofs")(gen => tx.proofs.proofs.foreach(p => gen.writeString(p.toString))) + gen.writeStringField("assetId", tx.assetId.toString) + gen.writeStringField("name", tx.name.toStringUtf8) + gen.writeNumberField("quantity", tx.quantity.value, numbersAsString) + gen.writeBooleanField("reissuable", tx.reissuable) + gen.writeNumberField("decimals", tx.decimals.value, numbersAsString) + gen.writeStringField("description", tx.description.toStringUtf8) + if (tx.version >= TxVersion.V2) { + tx.script.map(_.bytes().base64).fold(gen.writeNullField("script"))(gen.writeStringField("script", _)) + } + if (tx.usesLegacySignature) gen.writeStringField("signature", tx.signature.toString) + gen.writeEndObject() + } + + def assetInfoSerializer(numbersAsString: Boolean): JsonSerializer[AssetInfo] = + (value: AssetInfo, gen: JsonGenerator, serializers: SerializerProvider) => { + value match { + case info: AssetInfo.FullAssetInfo => + gen.writeStartObject() + gen.writeStringField("assetId", info.assetId) + gen.writeBooleanField("reissuable", info.reissuable) + info.minSponsoredAssetFee.foreach(gen.writeNumberField("minSponsoredAssetFee", _, numbersAsString)) + info.sponsorBalance.foreach(gen.writeNumberField("sponsorBalance", _, numbersAsString)) + gen.writeNumberField("quantity", info.quantity, numbersAsString) + info.issueTransaction.foreach(tx => gen.writeValueField("issueTransaction", tx)(issueTxSerializer(numbersAsString), serializers)) + gen.writeNumberField("balance", info.balance, numbersAsString) + gen.writeNumberField("sequenceInBlock", info.sequenceInBlock, numbersAsString) + gen.writeEndObject() + case assetId: AssetInfo.AssetId => + gen.writeStartObject() + gen.writeStringField("assetId", assetId.assetId) + gen.writeEndObject() + } + } } diff --git a/node/src/main/scala/com/wavesplatform/api/http/utils/UtilsEvaluator.scala b/node/src/main/scala/com/wavesplatform/api/http/utils/UtilsEvaluator.scala index ab9bb21610e..07305438f29 100644 --- a/node/src/main/scala/com/wavesplatform/api/http/utils/UtilsEvaluator.scala +++ b/node/src/main/scala/com/wavesplatform/api/http/utils/UtilsEvaluator.scala @@ -91,6 +91,7 @@ object UtilsEvaluator { dAppPk, Set.empty[Address], limitedExecution = false, + enableExecutionLog = true, limit, remainingCalls = ContractLimits.MaxSyncDAppCalls(script.stdLibVersion), availableActions = ContractLimits.MaxCallableActionsAmountBeforeV6(script.stdLibVersion), @@ -114,7 +115,8 @@ object UtilsEvaluator { script.stdLibVersion, correctFunctionCallScope = blockchain.checkEstimatorSumOverflow, newMode = blockchain.newEvaluatorMode, - checkConstructorArgsTypes = true + checkConstructorArgsTypes = true, + enableExecutionLog = true ) .value() .leftMap { case (err, _, log) => InvokeRejectError(err.message, log) } @@ -127,26 +129,29 @@ object UtilsEvaluator { .bimap( _ => Right(Diff.empty), r => - InvokeDiffsCommon.processActions( - StructuredCallableActions(r.actions, blockchain), - ds.stdLibVersion, - dAppAddress, - dAppPk, - usedComplexity, - invoke, - CompositeBlockchain(blockchain, paymentsDiff), - System.currentTimeMillis(), - isSyncCall = false, - limitedExecution = false, - limit, - Nil, - log - ).resultE + InvokeDiffsCommon + .processActions( + StructuredCallableActions(r.actions, blockchain), + ds.stdLibVersion, + dAppAddress, + dAppPk, + usedComplexity, + invoke, + CompositeBlockchain(blockchain, paymentsDiff), + System.currentTimeMillis(), + isSyncCall = false, + limitedExecution = false, + limit, + Nil, + enableExecutionLog = true, + log + ) + .resultE ) .merge totalDiff <- diff.combineE(paymentsDiff) - _ <- TransactionDiffer.validateBalance(blockchain, InvokeScript, addWavesToDefaultInvoker(totalDiff)) - _ <- TransactionDiffer.assetsVerifierDiff(blockchain, invoke, verify = true, totalDiff, Int.MaxValue).resultE + _ <- TransactionDiffer.validateBalance(blockchain, InvokeScript, addWavesToDefaultInvoker(totalDiff)) + _ <- TransactionDiffer.assetsVerifierDiff(blockchain, invoke, verify = true, totalDiff, Int.MaxValue, enableExecutionLog = true).resultE rootScriptResult = diff.scriptResults.headOption.map(_._2).getOrElse(InvokeScriptResult.empty) innerScriptResult = environment.currentDiff.scriptResults.values.fold(InvokeScriptResult.empty)(_ |+| _) } yield (evaluated, usedComplexity, log, innerScriptResult |+| rootScriptResult) diff --git a/node/src/main/scala/com/wavesplatform/database/BloomFilter.scala b/node/src/main/scala/com/wavesplatform/database/BloomFilter.scala deleted file mode 100644 index dee56aad57b..00000000000 --- a/node/src/main/scala/com/wavesplatform/database/BloomFilter.scala +++ /dev/null @@ -1,107 +0,0 @@ -package com.wavesplatform.database - -import java.io._ - -import com.google.common.hash.{Funnels, HashFunction, Hashing, HashingInputStream, HashingOutputStream, BloomFilter => GBloomFilter} -import com.google.common.primitives.Ints -import com.wavesplatform.utils.ScorexLogging -import org.iq80.leveldb.DB - -import scala.util.Try -import scala.util.control.NonFatal - -trait BloomFilter { - def mightContain(key: Array[Byte]): Boolean - def put(key: Array[Byte]): Unit -} - -private[database] class Wrapper(underlying: GBloomFilter[Array[Byte]]) extends BloomFilter { - override def mightContain(key: Array[Byte]): Boolean = underlying.mightContain(key) - override def put(key: Array[Byte]): Unit = underlying.put(key) -} - -private[database] class BloomFilterImpl(underlying: GBloomFilter[Array[Byte]], directory: String, filterName: String, db: DB) - extends Wrapper(underlying) - with ScorexLogging { - import com.wavesplatform.database.BloomFilter._ - def save(height: Int): Unit = { - val file = filterFile(directory, filterName) - log.info(s"Saving bloom filter to ${file.getAbsolutePath}") - val out = new HashingOutputStream( - defaultHash, - new BufferedOutputStream(new FileOutputStream(file), BufferSize) - ) - try { - out.write(Ints.toByteArray(height)) - underlying.writeTo(out) - } finally out.close() - - val checksum = out.hash() - db.readWrite(_.put(Keys.bloomFilterChecksum(filterName), checksum.asBytes())) - log.info(s"Filter hash: $checksum") - } -} - -object BloomFilter extends ScorexLogging { - val BufferSize = 2 * 1024 * 1024 - val Suffix = ".bf" - def defaultHash: HashFunction = Hashing.sha256() - - private[database] object AlwaysEmpty extends BloomFilter { - override def mightContain(key: Array[Byte]): Boolean = true - override def put(key: Array[Byte]): Unit = {} - } - - private[database] def filterFile(directory: String, filterName: String): File = new File(directory, filterName + Suffix) - - def tryLoad( - db: DB, - filterName: String, - directory: String, - expectedHeight: Int - ): Try[GBloomFilter[Array[Byte]]] = Try { - val storedChecksum = db.get(Keys.bloomFilterChecksum(filterName)) - val ff = filterFile(directory, filterName) - val in = new HashingInputStream(defaultHash, new BufferedInputStream(new FileInputStream(ff), BufferSize)) - log.debug(s"Loading bloom filter from ${ff.getAbsolutePath}") - try { - val heightBytes = new Array[Byte](java.lang.Integer.BYTES) - in.read(heightBytes) - val height = Ints.fromByteArray(heightBytes) - val filter = GBloomFilter.readFrom(in, Funnels.byteArrayFunnel()) - val code = in.hash() - require(code.asBytes().sameElements(storedChecksum), "checksum mismatch") - require(height == expectedHeight, "filter is stale") - filter - } finally in.close() - } - - private def populate(db: DB, keyTag: KeyTags.KeyTag, filterName: String, expectedInsertions: Long) = { - log.info(s"Populating bloom filter for $filterName, this can take a while.") - val filter = GBloomFilter.create(Funnels.byteArrayFunnel(), expectedInsertions) - db.iterateOver(keyTag)(e => filter.put(e.getKey.drop(2))) - log.info(s"Populating bloom filter for $filterName finished.") - filter - } - - def loadOrPopulate( - db: DB, - directory: String, - filterName: String, - expectedHeight: Int, - keyTag: KeyTags.KeyTag, - expectedInsertions: Long - ): BloomFilterImpl = { - val ff = filterFile(directory, filterName) - val underlying = tryLoad(db, filterName, directory, expectedHeight).recover { - case _: FileNotFoundException => - log.trace(s"Filter file ${ff.getAbsoluteFile} is missing, will re-build the filter from scratch") - populate(db, keyTag, filterName, expectedInsertions) - case NonFatal(e) => - log.debug(s"Could not load bloom filter from ${ff.getAbsolutePath}", e) - populate(db, keyTag, filterName, expectedInsertions) - }.get - - new BloomFilterImpl(underlying, directory, filterName, db) - } -} diff --git a/node/src/main/scala/com/wavesplatform/database/Caches.scala b/node/src/main/scala/com/wavesplatform/database/Caches.scala index c528394d888..1e7e449094d 100644 --- a/node/src/main/scala/com/wavesplatform/database/Caches.scala +++ b/node/src/main/scala/com/wavesplatform/database/Caches.scala @@ -1,16 +1,17 @@ package com.wavesplatform.database -import java.util +import java.{lang, util} + import cats.data.Ior -import cats.syntax.monoid.* -import cats.syntax.option.* -import com.google.common.cache.* +import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache} import com.google.common.collect.ArrayListMultimap +import com.google.protobuf.ByteString import com.wavesplatform.account.{Address, Alias} import com.wavesplatform.block.{Block, SignedBlockHeader} import com.wavesplatform.common.state.ByteStr -import com.wavesplatform.database.protobuf.EthereumTransactionMeta -import com.wavesplatform.metrics.LevelDBStats +import com.wavesplatform.database.protobuf.{EthereumTransactionMeta, BlockMeta as PBBlockMeta} +import com.wavesplatform.protobuf.ByteStringExt +import com.wavesplatform.protobuf.block.PBBlocks import com.wavesplatform.settings.DBSettings import com.wavesplatform.state.* import com.wavesplatform.state.DiffToStateApplier.PortfolioUpdates @@ -21,90 +22,124 @@ import com.wavesplatform.utils.ObservedLoadingCache import monix.reactive.Observer import scala.collection.immutable.VectorMap -import scala.concurrent.duration.* import scala.jdk.CollectionConverters.* import scala.reflect.ClassTag -abstract class Caches(spendableBalanceChanged: Observer[(Address, Asset)]) extends Blockchain with Storage { +abstract class Caches extends Blockchain with Storage { import Caches.* val dbSettings: DBSettings @volatile - private var current = (loadHeight(), loadScore(), loadLastBlock()) + private var current = loadCurrentBlock() + + private def loadCurrentBlock() = { + val height = loadHeight() + CurrentBlockInfo(height, loadBlockMeta(height), loadTxs(height)) + } - protected def loadHeight(): Int - override def height: Int = current._1 + protected def loadHeight(): Height + protected def loadBlockMeta(height: Height): Option[PBBlockMeta] + protected def loadTxs(height: Height): Seq[Transaction] - protected def loadScore(): BigInt - override def score: BigInt = current._2 + override def height: Int = current.height - protected def loadLastBlock(): Option[Block] - override def lastBlock: Option[Block] = current._3 + override def score: BigInt = current.score - def loadScoreOf(blockId: ByteStr): Option[BigInt] + override def lastBlock: Option[Block] = current.block - def loadBlockInfo(height: Int): Option[SignedBlockHeader] - override def blockHeader(height: Int): Option[SignedBlockHeader] = current match { - case (`height`, _, maybeBlock) => maybeBlock.map(b => SignedBlockHeader(b.header, b.signature)) - case _ => loadBlockInfo(height) - } + override def blockHeader(height: Int): Option[SignedBlockHeader] = + if (current.height == height) current.signedHeader else loadBlockMeta(Height(height)).map(toSignedHeader) + + override def hitSource(height: Int): Option[ByteStr] = + if (current.height == height) current.hitSource else loadBlockMeta(Height(height)).map(toHitSource) def loadHeightOf(blockId: ByteStr): Option[Int] - override def heightOf(blockId: ByteStr): Option[Int] = current match { - case (height, _, Some(block)) if block.id() == blockId => Some(height) - case _ => loadHeightOf(blockId) - } - private val blocksTs = new util.TreeMap[Int, Long] // Height -> block timestamp, assume sorted by key. - private var oldestStoredBlockTimestamp = Long.MaxValue - private val transactionIds = new util.HashMap[ByteStr, Int]() // TransactionId -> height - protected def forgetTransaction(id: ByteStr): Unit = transactionIds.remove(id) - override def containsTransaction(tx: Transaction): Boolean = transactionIds.containsKey(tx.id()) || { - if (tx.timestamp - 2.hours.toMillis <= oldestStoredBlockTimestamp) { - LevelDBStats.miss.record(1) - transactionMeta(tx.id()).nonEmpty - } else { - false - } - } - protected def forgetBlocks(): Unit = { - val iterator = blocksTs.entrySet().iterator() - val (oldestBlock, oldestTs) = if (iterator.hasNext) { - val e = iterator.next() - e.getKey -> e.getValue - } else { - 0 -> Long.MaxValue - } - oldestStoredBlockTimestamp = oldestTs - val bts = this.lastBlock.fold(0L)(_.header.timestamp) - dbSettings.rememberBlocks.toMillis - blocksTs.entrySet().removeIf(_.getValue < bts) - transactionIds.entrySet().removeIf(_.getValue < oldestBlock) - } + override def heightOf(blockId: ByteStr): Option[Int] = if (current.id.contains(blockId)) Some(height) else loadHeightOf(blockId) - private val leaseBalanceCache: LoadingCache[Address, LeaseBalance] = cache(dbSettings.maxCacheSize, loadLeaseBalance) - protected def loadLeaseBalance(address: Address): LeaseBalance + protected val leaseBalanceCache: LoadingCache[Address, CurrentLeaseBalance] = + cache(dbSettings.maxCacheSize, loadLeaseBalance, keys => loadLeaseBalances(keys.asScala.toSeq).asJava) + protected def loadLeaseBalance(address: Address): CurrentLeaseBalance + protected def loadLeaseBalances(addresses: Seq[Address]): Map[Address, CurrentLeaseBalance] protected def discardLeaseBalance(address: Address): Unit = leaseBalanceCache.invalidate(address) - override def leaseBalance(address: Address): LeaseBalance = leaseBalanceCache.get(address) + override def leaseBalance(address: Address): LeaseBalance = { + val currentLeaseBalance = leaseBalanceCache.get(address) + LeaseBalance(currentLeaseBalance.in, currentLeaseBalance.out) + } - private val balancesCache: LoadingCache[(Address, Asset), java.lang.Long] = - observedCache(dbSettings.maxCacheSize * 16, spendableBalanceChanged, loadBalance) - protected def clearBalancesCache(): Unit = balancesCache.invalidateAll() + override def leaseBalances(addresses: Seq[Address]): Map[Address, LeaseBalance] = { + leaseBalanceCache + .getAll(addresses.asJava) + .asScala + .view + .map { case (address, leaseBalance) => + address -> LeaseBalance(leaseBalance.in, leaseBalance.out) + } + .toMap + } + + protected val balancesCache: LoadingCache[(Address, Asset), CurrentBalance] = + cache(dbSettings.maxCacheSize * 16, loadBalance, keys => loadBalances(keys.asScala.toSeq).asJava) protected def discardBalance(key: (Address, Asset)): Unit = balancesCache.invalidate(key) - override def balance(address: Address, mayBeAssetId: Asset): Long = balancesCache.get(address -> mayBeAssetId) - protected def loadBalance(req: (Address, Asset)): Long + override def balance(address: Address, mayBeAssetId: Asset): Long = balancesCache.get(address -> mayBeAssetId).balance + + override def balances(req: Seq[(Address, Asset)]): Map[(Address, Asset), Long] = + balancesCache + .getAll(req.asJava) + .asScala + .view + .map { case ((address, asset), balance) => + (address, asset) -> balance.balance + } + .toMap + + def loadCacheData(addresses: Set[Address], orders: Set[ByteStr]): Unit = { + addressIdCache.getAll(addresses.asJava) + balancesCache.getAll(addresses.map(_ -> Waves).asJava) + leaseBalanceCache.getAll(addresses.asJava) + volumeAndFeeCache.getAll(orders.asJava) + } + + override def wavesBalances(addresses: Seq[Address]): Map[Address, Long] = + balancesCache + .getAll(addresses.map(_ -> Waves).asJava) + .asScala + .view + .map { case ((address, _), balance) => + address -> balance.balance + } + .toMap + protected def loadBalance(req: (Address, Asset)): CurrentBalance + protected def loadBalances(req: Seq[(Address, Asset)]): Map[(Address, Asset), CurrentBalance] + protected def loadWavesBalances(req: Seq[(Address, Asset)]): Map[(Address, Asset), CurrentBalance] private val assetDescriptionCache: LoadingCache[IssuedAsset, Option[AssetDescription]] = cache(dbSettings.maxCacheSize, loadAssetDescription) protected def loadAssetDescription(asset: IssuedAsset): Option[AssetDescription] protected def discardAssetDescription(asset: IssuedAsset): Unit = assetDescriptionCache.invalidate(asset) override def assetDescription(asset: IssuedAsset): Option[AssetDescription] = assetDescriptionCache.get(asset) - private val volumeAndFeeCache: LoadingCache[ByteStr, VolumeAndFee] = cache(dbSettings.maxCacheSize, loadVolumeAndFee) - protected def loadVolumeAndFee(orderId: ByteStr): VolumeAndFee - protected def discardVolumeAndFee(orderId: ByteStr): Unit = volumeAndFeeCache.invalidate(orderId) - override def filledVolumeAndFee(orderId: ByteStr): VolumeAndFee = volumeAndFeeCache.get(orderId) + private val volumeAndFeeCache: LoadingCache[ByteStr, CurrentVolumeAndFee] = + cache(dbSettings.maxCacheSize, loadVolumeAndFee, keys => loadVolumesAndFees(keys.asScala.toSeq).asJava) + protected def loadVolumeAndFee(orderId: ByteStr): CurrentVolumeAndFee + protected def loadVolumesAndFees(orders: Seq[ByteStr]): Map[ByteStr, CurrentVolumeAndFee] + protected def discardVolumeAndFee(orderId: ByteStr): Unit = volumeAndFeeCache.invalidate(orderId) + override def filledVolumeAndFee(orderId: ByteStr): VolumeAndFee = { + val curVf = volumeAndFeeCache.get(orderId) + VolumeAndFee(curVf.volume, curVf.fee) + } - private val scriptCache: LoadingCache[Address, Option[AccountScriptInfo]] = cache(dbSettings.maxCacheSize, loadScript) + private val scriptCache: LoadingCache[Address, Option[AccountScriptInfo]] = + CacheBuilder + .newBuilder() + .maximumWeight(128 << 20) + .weigher((_: Address, asi: Option[AccountScriptInfo]) => asi.map(_.script.bytes().size).getOrElse(0)) + .recordStats() + .build(new CacheLoader[Address, Option[AccountScriptInfo]] { + override def load(key: Address): Option[AccountScriptInfo] = loadScript(key) + override def loadAll(keys: lang.Iterable[? <: Address]): util.Map[Address, Option[AccountScriptInfo]] = + new util.HashMap[Address, Option[AccountScriptInfo]]() + }) protected def loadScript(address: Address): Option[AccountScriptInfo] protected def hasScriptBytes(address: Address): Boolean protected def discardScript(address: Address): Unit = scriptCache.invalidate(address) @@ -124,29 +159,42 @@ abstract class Caches(spendableBalanceChanged: Observer[(Address, Asset)]) exten private var lastAddressId = loadMaxAddressId() protected def loadMaxAddressId(): Long - private val addressIdCache: LoadingCache[Address, Option[AddressId]] = cache(dbSettings.maxCacheSize, loadAddressId) + private val addressIdCache: LoadingCache[Address, Option[AddressId]] = + cache(dbSettings.maxCacheSize, loadAddressId, keys => loadAddressIds(keys.asScala.toSeq).asJava) protected def loadAddressId(address: Address): Option[AddressId] + protected def loadAddressIds(addresses: Seq[Address]): Map[Address, Option[AddressId]] protected def addressIdWithFallback(address: Address, newAddresses: Map[Address, AddressId]): AddressId = newAddresses.getOrElse(address, addressIdCache.get(address).get) - private val accountDataCache: LoadingCache[(Address, String), Option[DataEntry[?]]] = cache( + private val accountDataCache: LoadingCache[(Address, String), CurrentData] = cache( dbSettings.maxCacheSize, { case (k, v) => loadAccountData(k, v) } ) - override def accountData(acc: Address, key: String): Option[DataEntry[?]] = accountDataCache.get((acc, key)) + override def accountData(acc: Address, key: String): Option[DataEntry[?]] = + accountDataCache.get((acc, key)).entry match { + case _: EmptyDataEntry => None + case other => Some(other) + } + protected def discardAccountData(addressWithKey: (Address, String)): Unit = accountDataCache.invalidate(addressWithKey) - protected def loadAccountData(acc: Address, key: String): Option[DataEntry[?]] + protected def loadAccountData(acc: Address, key: String): CurrentData private[database] def addressId(address: Address): Option[AddressId] = addressIdCache.get(address) + private[database] def addressIds(addresses: Seq[Address]): Map[Address, Option[AddressId]] = + addressIdCache.getAll(addresses.asJava).asScala.toMap protected val aliasCache: LoadingCache[Alias, Option[Address]] = cache(dbSettings.maxCacheSize, loadAlias) protected def loadAlias(alias: Alias): Option[Address] protected def discardAlias(alias: Alias): Unit = aliasCache.invalidate(alias) + protected val blockHeightCache: LoadingCache[ByteStr, Option[Int]] = cache(dbSettings.maxRollbackDepth + 1000, loadBlockHeight) + protected def loadBlockHeight(blockId: ByteStr): Option[Int] + protected def discardBlockHeight(blockId: ByteStr): Unit = blockHeightCache.invalidate(blockId) + @volatile protected var approvedFeaturesCache: Map[Short, Int] = loadApprovedFeatures() protected def loadApprovedFeatures(): Map[Short, Int] @@ -159,24 +207,21 @@ abstract class Caches(spendableBalanceChanged: Observer[(Address, Asset)]) exten // noinspection ScalaStyle protected def doAppend( - block: Block, + blockMeta: PBBlockMeta, carry: Long, newAddresses: Map[Address, AddressId], - balances: Map[AddressId, Map[Asset, Long]], - leaseBalances: Map[AddressId, LeaseBalance], + balances: Map[(AddressId, Asset), (CurrentBalance, BalanceNode)], + leaseBalances: Map[AddressId, (CurrentLeaseBalance, LeaseBalanceNode)], addressTransactions: util.Map[AddressId, util.Collection[TransactionId]], leaseStates: Map[ByteStr, LeaseDetails], issuedAssets: VectorMap[IssuedAsset, NewAssetInfo], reissuedAssets: Map[IssuedAsset, Ior[AssetInfo, AssetVolumeInfo]], - filledQuantity: Map[ByteStr, VolumeAndFee], + filledQuantity: Map[ByteStr, (CurrentVolumeAndFee, VolumeAndFeeNode)], scripts: Map[AddressId, Option[AccountScriptInfo]], assetScripts: Map[IssuedAsset, Option[AssetScriptInfo]], - data: Map[Address, AccountDataInfo], + data: Map[(Address, String), (CurrentData, DataNode)], aliases: Map[Alias, AddressId], sponsorship: Map[IssuedAsset, Sponsorship], - totalFee: Long, - reward: Option[Long], - hitSource: ByteStr, scriptResults: Map[ByteStr, InvokeScriptResult], transactionMeta: Seq[(TxMeta, Transaction)], stateHash: StateHashBuilder.Result, @@ -184,7 +229,21 @@ abstract class Caches(spendableBalanceChanged: Observer[(Address, Asset)]) exten ): Unit override def append(diff: Diff, carryFee: Long, totalFee: Long, reward: Option[Long], hitSource: ByteStr, block: Block): Unit = { - val newHeight = current._1 + 1 + val newHeight = current.height + 1 + val newScore = block.blockScore() + current.score + val newMeta = PBBlockMeta( + Some(PBBlocks.protobuf(block.header)), + ByteString.copyFrom(block.signature.arr), + if (block.header.version >= Block.ProtoBlockVersion) ByteString.copyFrom(block.id().arr) else ByteString.EMPTY, + newHeight, + block.bytes().length, + block.transactionData.size, + totalFee, + reward.getOrElse(0), + if (block.header.version >= Block.ProtoBlockVersion) ByteString.copyFrom(hitSource.arr) else ByteString.EMPTY, + ByteString.copyFrom(newScore.toByteArray), + current.meta.fold(settings.genesisSettings.initialBalance)(_.totalWavesAmount) + reward.getOrElse(0L) + ) val stateHash = new StateHashBuilder @@ -202,40 +261,64 @@ abstract class Caches(spendableBalanceChanged: Observer[(Address, Asset)]) exten val PortfolioUpdates(updatedBalances, updatedLeaseBalances) = DiffToStateApplier.portfolios(this, diff) - val leaseBalances = updatedLeaseBalances.map { case (address, lb) => addressIdWithFallback(address, newAddressIds) -> lb } + val leaseBalances = updatedLeaseBalances.map { case (address, lb) => + val prevCurrentLeaseBalance = leaseBalanceCache.get(address) + address -> + ( + CurrentLeaseBalance(lb.in, lb.out, Height(newHeight), prevCurrentLeaseBalance.height), + LeaseBalanceNode(lb.in, lb.out, prevCurrentLeaseBalance.height) + ) + } val newFills = for { (orderId, fillInfo) <- diff.orderFills - } yield orderId -> volumeAndFeeCache.get(orderId).combine(fillInfo) + } yield { + val prev = volumeAndFeeCache.get(orderId) + orderId -> (CurrentVolumeAndFee(prev.volume + fillInfo.volume, prev.fee + fillInfo.fee, Height(newHeight), prev.height), VolumeAndFeeNode( + prev.volume + fillInfo.volume, + prev.fee + fillInfo.fee, + prev.height + )) + } val transactionMeta = Seq.newBuilder[(TxMeta, Transaction)] val addressTransactions = ArrayListMultimap.create[AddressId, TransactionId]() for (nti <- diff.transactions) { - transactionIds.put(nti.transaction.id(), newHeight) transactionMeta += (TxMeta(Height(newHeight), nti.applied, nti.spentComplexity) -> nti.transaction) for (addr <- nti.affected) { addressTransactions.put(addressIdWithFallback(addr, newAddressIds), TransactionId(nti.transaction.id())) } } - current = (newHeight, current._2 + block.blockScore(), Some(block)) + current = CurrentBlockInfo(Height(newHeight), Some(newMeta), block.transactionData) - for { + val updatedBalanceNodes = for { (address, assets) <- updatedBalances (asset, balance) <- assets - } asset match { - case Waves => stateHash.addWavesBalance(address, balance) - case asset: IssuedAsset => stateHash.addAssetBalance(address, asset, balance) + } yield { + asset match { + case Waves => stateHash.addWavesBalance(address, balance) + case asset: IssuedAsset => stateHash.addAssetBalance(address, asset, balance) + } + val key = (address, asset) + val prevCurrentBalance = balancesCache.get(key) + key -> + (CurrentBalance(balance, Height(newHeight), prevCurrentBalance.height), BalanceNode(balance, prevCurrentBalance.height)) } updatedLeaseBalances foreach { case (address, balance) => stateHash.addLeaseBalance(address, balance.in, balance.out) } - for { + val updatedData = for { (address, data) <- diff.accountData - entry <- data.data.values - } stateHash.addDataEntry(address, entry) + entry <- data.values + } yield { + stateHash.addDataEntry(address, entry) + val entryKey = (address, entry.key) + val prevHeight = accountDataCache.get(entryKey).height + entryKey -> (CurrentData(entry, Height(newHeight), prevHeight) -> DataNode(entry, prevHeight)) + } diff.aliases.foreach { case (alias, address) => stateHash.addAlias(address, alias.name) @@ -266,11 +349,11 @@ abstract class Caches(spendableBalanceChanged: Observer[(Address, Asset)]) exten } doAppend( - block, + newMeta, carryFee, newAddressIds, - updatedBalances.map { case (a, v) => addressIdWithFallback(a, newAddressIds) -> v }, - leaseBalances, + updatedBalanceNodes.map { case ((address, asset), v) => (addressIdWithFallback(address, newAddressIds), asset) -> v }, + leaseBalances.map { case (address, balance) => addressIdWithFallback(address, newAddressIds) -> balance }, addressTransactions.asMap(), diff.leaseState, diff.issuedAssets, @@ -278,48 +361,30 @@ abstract class Caches(spendableBalanceChanged: Observer[(Address, Asset)]) exten newFills, diff.scripts.map { case (address, s) => addressIdWithFallback(address, newAddressIds) -> s }, diff.assetScripts, - diff.accountData, + updatedData, diff.aliases.map { case (a, address) => a -> addressIdWithFallback(address, newAddressIds) }, diff.sponsorship, - totalFee, - reward, - hitSource, diff.scriptResults, transactionMeta.result(), stateHash.result(), diff.ethereumTransactionMeta ) - val emptyData = Map.empty[(Address, String), Option[DataEntry[?]]] - - val newData = - diff.accountData.foldLeft(emptyData) { case (data, (a, d)) => - val updData = data ++ d.data.map { case (k, v) => - (a, k) -> v.some - } - - updData - } - val assetsToInvalidate = diff.issuedAssets.keySet ++ diff.updatedAssets.keySet ++ diff.sponsorship.keySet ++ diff.assetScripts.keySet - for ((address, id) <- newAddressIds) addressIdCache.put(address, Some(id)) - for ((orderId, volumeAndFee) <- newFills) volumeAndFeeCache.put(orderId, volumeAndFee) - for ((address, assetMap) <- updatedBalances; (asset, balance) <- assetMap) balancesCache.put((address, asset), balance) - for (id <- assetsToInvalidate) assetDescriptionCache.invalidate(id) - for ((alias, address) <- diff.aliases) aliasCache.put(alias, Some(address)) - leaseBalanceCache.putAll(updatedLeaseBalances.asJava) + for ((address, id) <- newAddressIds) addressIdCache.put(address, Some(id)) + for ((orderId, (volumeAndFee, _)) <- newFills) volumeAndFeeCache.put(orderId, volumeAndFee) + for (((address, asset), (newBalance, _)) <- updatedBalanceNodes) balancesCache.put((address, asset), newBalance) + for (id <- assetsToInvalidate) assetDescriptionCache.invalidate(id) + for ((alias, address) <- diff.aliases) aliasCache.put(alias, Some(address)) + leaseBalanceCache.putAll(leaseBalances.view.mapValues(_._1).toMap.asJava) scriptCache.putAll(diff.scripts.asJava) assetScriptCache.putAll(diff.assetScripts.asJava) - blocksTs.put(newHeight, block.header.timestamp) - - accountDataCache.putAll(newData.asJava) - - forgetBlocks() + accountDataCache.putAll(updatedData.view.mapValues(_._1).toMap.asJava) } protected def doRollback(targetHeight: Int): Seq[(Block, ByteStr)] @@ -334,7 +399,7 @@ abstract class Caches(spendableBalanceChanged: Observer[(Address, Asset)]) exten ) discardedBlocks = doRollback(height) } yield { - current = (loadHeight(), loadScore(), loadLastBlock()) + current = loadCurrentBlock() activatedFeaturesCache = loadActivatedFeatures() approvedFeaturesCache = loadApprovedFeatures() @@ -344,12 +409,30 @@ abstract class Caches(spendableBalanceChanged: Observer[(Address, Asset)]) exten } object Caches { - def cache[K <: AnyRef, V <: AnyRef](maximumSize: Int, loader: K => V): LoadingCache[K, V] = + case class CurrentBlockInfo(height: Height, meta: Option[PBBlockMeta], transactions: Seq[Transaction]) { + lazy val score: BigInt = meta.filterNot(_.totalScore.isEmpty).fold(BigInt(0))(m => BigInt(m.totalScore.toByteArray)) + lazy val block: Option[Block] = signedHeader.map(h => Block(h.header, h.signature, transactions)) + lazy val signedHeader: Option[SignedBlockHeader] = meta.map(toSignedHeader) + lazy val id: Option[ByteStr] = meta.map(_.id) + lazy val hitSource: Option[ByteStr] = meta.map(toHitSource) + } + + def toHitSource(m: PBBlockMeta): ByteStr = (if (m.vrf.isEmpty) m.getHeader.generationSignature else m.vrf).toByteStr + + def toSignedHeader(m: PBBlockMeta): SignedBlockHeader = SignedBlockHeader(PBBlocks.vanilla(m.getHeader), m.signature.toByteStr) + + def cache[K <: AnyRef, V <: AnyRef]( + maximumSize: Int, + loader: K => V, + batchLoader: lang.Iterable[? <: K] => util.Map[K, V] = { _: lang.Iterable[? <: K] => new util.HashMap[K, V]() } + ): LoadingCache[K, V] = CacheBuilder .newBuilder() .maximumSize(maximumSize) + .recordStats() .build(new CacheLoader[K, V] { - override def load(key: K): V = loader(key) + override def load(key: K): V = loader(key) + override def loadAll(keys: lang.Iterable[? <: K]): util.Map[K, V] = batchLoader(keys) }) def observedCache[K <: AnyRef, V <: AnyRef](maximumSize: Int, changed: Observer[K], loader: K => V)(implicit ct: ClassTag[K]): LoadingCache[K, V] = diff --git a/node/src/main/scala/com/wavesplatform/database/DBResource.scala b/node/src/main/scala/com/wavesplatform/database/DBResource.scala index 5b4695a6bf0..5217cbbcffb 100644 --- a/node/src/main/scala/com/wavesplatform/database/DBResource.scala +++ b/node/src/main/scala/com/wavesplatform/database/DBResource.scala @@ -1,27 +1,57 @@ package com.wavesplatform.database -import org.iq80.leveldb.{DB, DBIterator, ReadOptions} +import org.rocksdb.{ReadOptions, RocksDB, RocksIterator} + +import scala.collection.View +import scala.collection.mutable.ArrayBuffer trait DBResource extends AutoCloseable { def get[V](key: Key[V]): V def get(key: Array[Byte]): Array[Byte] - def iterator: DBIterator // Should have a single instance + def multiGet[A](keys: ArrayBuffer[Key[A]], valBufferSizes: ArrayBuffer[Int]): View[A] + def multiGet[A](keys: ArrayBuffer[Key[A]], valBufferSize: Int): View[A] + def multiGetFlat[A](keys: ArrayBuffer[Key[Option[A]]], valBufferSizes: ArrayBuffer[Int]): Seq[A] + def prefixIterator: RocksIterator // Should have a single instance + def fullIterator: RocksIterator // Should have a single instance + def withSafePrefixIterator[A](ifNotClosed: RocksIterator => A)(ifClosed: => A = ()): A + def withSafeFullIterator[A](ifNotClosed: RocksIterator => A)(ifClosed: => A = ()): A } object DBResource { - def apply(db: DB): DBResource = new DBResource { + def apply(db: RocksDB): DBResource = new DBResource { private[this] val snapshot = db.getSnapshot - private[this] val readOptions = new ReadOptions().snapshot(snapshot) + private[this] val readOptions = new ReadOptions().setSnapshot(snapshot).setVerifyChecksums(false) + + override def get[V](key: Key[V]): V = key.parse(db.get(key.columnFamilyHandle.getOrElse(db.getDefaultColumnFamily), readOptions, key.keyBytes)) + + override def get(key: Array[Byte]): Array[Byte] = db.get(readOptions, key) + + override def multiGetFlat[A](keys: ArrayBuffer[Key[Option[A]]], valBufferSizes: ArrayBuffer[Int]): Seq[A] = + db.multiGetFlat(readOptions, keys, valBufferSizes) + + def multiGet[A](keys: ArrayBuffer[Key[A]], valBufferSizes: ArrayBuffer[Int]): View[A] = + db.multiGet(readOptions, keys, valBufferSizes) - override def get[V](key: Key[V]): V = key.parse(db.get(key.keyBytes, readOptions)) + def multiGet[A](keys: ArrayBuffer[Key[A]], valBufferSize: Int): View[A] = + db.multiGet(readOptions, keys, valBufferSize) - override def get(key: Array[Byte]): Array[Byte] = db.get(key, readOptions) + override lazy val prefixIterator: RocksIterator = db.newIterator(readOptions.setTotalOrderSeek(false).setPrefixSameAsStart(true)) - override lazy val iterator: DBIterator = db.iterator(readOptions) + override lazy val fullIterator: RocksIterator = db.newIterator(readOptions.setTotalOrderSeek(true)) + + override def withSafePrefixIterator[A](ifNotClosed: RocksIterator => A)(ifClosed: => A): A = prefixIterator.synchronized { + if (prefixIterator.isOwningHandle) ifNotClosed(prefixIterator) else ifClosed + } + + override def withSafeFullIterator[A](ifNotClosed: RocksIterator => A)(ifClosed: => A): A = fullIterator.synchronized { + if (fullIterator.isOwningHandle) ifNotClosed(fullIterator) else ifClosed + } override def close(): Unit = { - iterator.close() - snapshot.close() + prefixIterator.synchronized(prefixIterator.close()) + fullIterator.synchronized(fullIterator.close()) + db.releaseSnapshot(snapshot) + readOptions.close() } } } diff --git a/node/src/main/scala/com/wavesplatform/database/Key.scala b/node/src/main/scala/com/wavesplatform/database/Key.scala index 9d61343bd77..8b4e8133089 100644 --- a/node/src/main/scala/com/wavesplatform/database/Key.scala +++ b/node/src/main/scala/com/wavesplatform/database/Key.scala @@ -3,8 +3,9 @@ package com.wavesplatform.database import com.google.common.base.CaseFormat import com.google.common.io.BaseEncoding import com.google.common.primitives.{Bytes, Shorts} +import org.rocksdb.ColumnFamilyHandle -abstract class Key[V](prefix: Short, val name: String, val suffix: Array[Byte]) { +abstract class Key[V](prefix: Short, val name: String, val suffix: Array[Byte], val columnFamilyHandle: Option[ColumnFamilyHandle] = None) { val keyBytes: Array[Byte] = Bytes.concat(Shorts.toByteArray(prefix), suffix) def parse(bytes: Array[Byte]): V def encode(v: V): Array[Byte] @@ -12,7 +13,7 @@ abstract class Key[V](prefix: Short, val name: String, val suffix: Array[Byte]) override lazy val toString: String = s"$name($prefix,${BaseEncoding.base16().encode(suffix)})" override def equals(obj: Any): Boolean = obj match { - case that: Key[_] => java.util.Arrays.equals(this.keyBytes, that.keyBytes) + case that: Key[?] => java.util.Arrays.equals(this.keyBytes, that.keyBytes) case _ => false } @@ -20,19 +21,21 @@ abstract class Key[V](prefix: Short, val name: String, val suffix: Array[Byte]) } object Key { - private[this] val converter = CaseFormat.UPPER_CAMEL.converterTo(CaseFormat.LOWER_HYPHEN) + private[this] val converter = CaseFormat.UPPER_CAMEL.converterTo(CaseFormat.LOWER_HYPHEN) + private[this] val keyTagToStr = KeyTags.values.toArray.sortBy(_.id).map(v => converter.convert(v.toString)) - def apply[V](keyTag: KeyTags.KeyTag, keySuffix: Array[Byte], parser: Array[Byte] => V, encoder: V => Array[Byte]): Key[V] = - new Key[V](keyTag.id.toShort, converter.convert(keyTag.toString), keySuffix) { + def apply[V](keyTag: KeyTags.KeyTag, keySuffix: Array[Byte], parser: Array[Byte] => V, encoder: V => Array[Byte], cfh: Option[ColumnFamilyHandle] = None): Key[V] = + new Key[V](keyTag.id.toShort, keyTagToStr(keyTag.id), keySuffix, cfh) { override def parse(bytes: Array[Byte]): V = parser(bytes) override def encode(v: V): Array[Byte] = encoder(v) } - def opt[V](keyTag: KeyTags.KeyTag, keySuffix: Array[Byte], parser: Array[Byte] => V, encoder: V => Array[Byte]): Key[Option[V]] = + def opt[V](keyTag: KeyTags.KeyTag, keySuffix: Array[Byte], parser: Array[Byte] => V, encoder: V => Array[Byte], cfh: Option[ColumnFamilyHandle] = None): Key[Option[V]] = apply[Option[V]]( keyTag, keySuffix, Option(_).map(parser), - _.fold[Array[Byte]](Array.emptyByteArray)(encoder) + _.fold[Array[Byte]](Array.emptyByteArray)(encoder), + cfh ) } diff --git a/node/src/main/scala/com/wavesplatform/database/KeyHelpers.scala b/node/src/main/scala/com/wavesplatform/database/KeyHelpers.scala index 66c218367ce..a0fadfa64a5 100644 --- a/node/src/main/scala/com/wavesplatform/database/KeyHelpers.scala +++ b/node/src/main/scala/com/wavesplatform/database/KeyHelpers.scala @@ -18,13 +18,13 @@ object KeyHelpers { def historyKey(keyTag: KeyTags.KeyTag, suffix: Array[Byte]): Key[Seq[Int]] = Key(keyTag, suffix, readIntSeq, writeIntSeq) def intKey(keyTag: KeyTags.KeyTag, default: Int = 0): Key[Int] = - Key(keyTag, Array.emptyByteArray, Option(_).fold(default)(Ints.fromByteArray), Ints.toByteArray) + Key(keyTag, Array.emptyByteArray, v => if (v != null && v.length >= Ints.BYTES) Ints.fromByteArray(v) else default, Ints.toByteArray) def longKey(keyTag: KeyTags.KeyTag, default: Long = 0): Key[Long] = - Key(keyTag, Array.emptyByteArray, Option(_).fold(default)(Longs.fromByteArray), Longs.toByteArray) + Key(keyTag, Array.emptyByteArray, v => if (v != null && v.length >= Longs.BYTES) Longs.fromByteArray(v) else default, Longs.toByteArray) def bytesSeqNr(keyTag: KeyTags.KeyTag, suffix: Array[Byte], default: Int = 0): Key[Int] = - Key(keyTag, suffix, Option(_).fold(default)(Ints.fromByteArray), Ints.toByteArray) + Key(keyTag, suffix, v => if (v != null && v.length >= Ints.BYTES) Ints.fromByteArray(v) else default, Ints.toByteArray) def unsupported[A](message: String): A => Array[Byte] = _ => throw new UnsupportedOperationException(message) } diff --git a/node/src/main/scala/com/wavesplatform/database/KeyTags.scala b/node/src/main/scala/com/wavesplatform/database/KeyTags.scala index 2ffa57abdbf..47c1b733c58 100644 --- a/node/src/main/scala/com/wavesplatform/database/KeyTags.scala +++ b/node/src/main/scala/com/wavesplatform/database/KeyTags.scala @@ -8,8 +8,8 @@ object KeyTags extends Enumeration { Height, Score, HeightOf, - WavesBalanceHistory, WavesBalance, + WavesBalanceHistory, AssetBalanceHistory, AssetBalance, AssetDetailsHistory, diff --git a/node/src/main/scala/com/wavesplatform/database/Keys.scala b/node/src/main/scala/com/wavesplatform/database/Keys.scala index dce34e5aca1..7f7040e2d70 100644 --- a/node/src/main/scala/com/wavesplatform/database/Keys.scala +++ b/node/src/main/scala/com/wavesplatform/database/Keys.scala @@ -2,41 +2,84 @@ package com.wavesplatform.database import com.google.common.primitives.{Ints, Longs} import com.wavesplatform.account.{Address, Alias} -import com.wavesplatform.api.BlockMeta import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.EitherExt2 -import com.wavesplatform.database.protobuf.{EthereumTransactionMeta, StaticAssetInfo, TransactionMeta} +import com.wavesplatform.database.protobuf.{EthereumTransactionMeta, StaticAssetInfo, TransactionMeta, BlockMeta as PBBlockMeta} import com.wavesplatform.protobuf.transaction.PBRecipients +import com.wavesplatform.state import com.wavesplatform.state.* import com.wavesplatform.state.reader.LeaseDetails import com.wavesplatform.transaction.Asset.IssuedAsset -import com.wavesplatform.transaction.Transaction +import com.wavesplatform.transaction.{ERC20Address, Transaction} import com.wavesplatform.utils.* +case class CurrentBalance(balance: Long, height: Height, prevHeight: Height) +object CurrentBalance { + val Unavailable: CurrentBalance = CurrentBalance(0L, Height(0), Height(0)) +} + +case class BalanceNode(balance: Long, prevHeight: Height) +object BalanceNode { + val Empty: BalanceNode = BalanceNode(0, Height(0)) +} + +case class CurrentVolumeAndFee(volume: Long, fee: Long, height: Height, prevHeight: Height) +object CurrentVolumeAndFee { + val Unavailable: CurrentVolumeAndFee = CurrentVolumeAndFee(0, 0, Height(0), Height(0)) +} + +case class VolumeAndFeeNode(volume: Long, fee: Long, prevHeight: Height) +object VolumeAndFeeNode { + val Empty: VolumeAndFeeNode = VolumeAndFeeNode(0, 0, Height(0)) +} + +case class CurrentLeaseBalance(in: Long, out: Long, height: Height, prevHeight: Height) +object CurrentLeaseBalance { + val Unavailable: CurrentLeaseBalance = CurrentLeaseBalance(0, 0, Height(0), Height(0)) +} + +case class LeaseBalanceNode(in: Long, out: Long, prevHeight: Height) +object LeaseBalanceNode { + val Empty: LeaseBalanceNode = LeaseBalanceNode(0, 0, Height(0)) +} + +case class CurrentData(entry: DataEntry[?], height: Height, prevHeight: Height) +object CurrentData { + def empty(key: String): CurrentData = CurrentData(EmptyDataEntry(key), Height(0), Height(0)) +} + +case class DataNode(entry: DataEntry[?], prevHeight: Height) +object DataNode { + def empty(key: String): DataNode = DataNode(EmptyDataEntry(key), Height(0)) +} + object Keys { import KeyHelpers.* - import KeyTags.{AddressId as AddressIdTag, EthereumTransactionMeta as EthereumTransactionMetaTag, InvokeScriptResult as InvokeScriptResultTag, LeaseDetails as LeaseDetailsTag, *} - - val version: Key[Int] = intKey(Version, default = 1) - val height: Key[Int] = intKey(Height) - def score(height: Int): Key[BigInt] = Key(Score, h(height), Option(_).fold(BigInt(0))(BigInt(_)), _.toByteArray) + import KeyTags.{ + AddressId as AddressIdTag, + EthereumTransactionMeta as EthereumTransactionMetaTag, + InvokeScriptResult as InvokeScriptResultTag, + LeaseDetails as LeaseDetailsTag, + * + } + + val version: Key[Int] = intKey(Version, default = 1) + val height: Key[Height] = + Key(Height, Array.emptyByteArray, v => state.Height @@ (if (v != null && v.length >= Ints.BYTES) Ints.fromByteArray(v) else 0), Ints.toByteArray) def heightOf(blockId: ByteStr): Key[Option[Int]] = Key.opt[Int](HeightOf, blockId.arr, Ints.fromByteArray, Ints.toByteArray) - def wavesBalanceHistory(addressId: AddressId): Key[Seq[Int]] = historyKey(WavesBalanceHistory, addressId.toByteArray) + def wavesBalance(addressId: AddressId): Key[CurrentBalance] = + Key(WavesBalance, addressId.toByteArray, readCurrentBalance, writeCurrentBalance) - def wavesBalance(addressId: AddressId)(height: Int): Key[Long] = - Key(WavesBalance, hAddr(height, addressId), Option(_).fold(0L)(Longs.fromByteArray), Longs.toByteArray) + def wavesBalanceAt(addressId: AddressId, height: Height): Key[BalanceNode] = + Key(WavesBalanceHistory, hBytes(addressId.toByteArray, height), readBalanceNode, writeBalanceNode) - def assetBalanceHistory(addressId: AddressId, asset: IssuedAsset): Key[Seq[Int]] = - historyKey(AssetBalanceHistory, addressId.toByteArray ++ asset.id.arr) - def assetBalance(addressId: AddressId, asset: IssuedAsset)(height: Int): Key[Long] = - Key( - AssetBalance, - hBytes(asset.id.arr ++ addressId.toByteArray, height), - Option(_).fold(0L)(Longs.fromByteArray), - Longs.toByteArray - ) + def assetBalance(addressId: AddressId, asset: IssuedAsset): Key[CurrentBalance] = + Key(AssetBalance, addressId.toByteArray ++ asset.id.arr, readCurrentBalance, writeCurrentBalance) + + def assetBalanceAt(addressId: AddressId, asset: IssuedAsset, height: Height): Key[BalanceNode] = + Key(AssetBalanceHistory, hBytes(asset.id.arr ++ addressId.toByteArray, height), readBalanceNode, writeBalanceNode) def assetDetailsHistory(asset: IssuedAsset): Key[Seq[Int]] = historyKey(AssetDetailsHistory, asset.id.arr) def assetDetails(asset: IssuedAsset)(height: Int): Key[(AssetInfo, AssetVolumeInfo)] = @@ -48,17 +91,21 @@ object Keys { Key(UpdatedAssets, h(height), d => readAssetIds(d).map(IssuedAsset(_)), ias => writeAssetIds(ias.map(_.id))) def sponsorshipAssets(height: Int): Key[Seq[IssuedAsset]] = Key(SponsoredAssets, h(height), d => readAssetIds(d).map(IssuedAsset(_)), ias => writeAssetIds(ias.map(_.id))) - def leaseBalanceHistory(addressId: AddressId): Key[Seq[Int]] = historyKey(LeaseBalanceHistory, addressId.toByteArray) - def leaseBalance(addressId: AddressId)(height: Int): Key[LeaseBalance] = - Key(LeaseBalance, hAddr(height, addressId), readLeaseBalance, writeLeaseBalance) + def leaseBalanceAt(addressId: AddressId, height: Height): Key[LeaseBalanceNode] = + Key(LeaseBalanceHistory, hBytes(addressId.toByteArray, height), readLeaseBalanceNode, writeLeaseBalanceNode) + + def leaseBalance(addressId: AddressId): Key[CurrentLeaseBalance] = + Key(LeaseBalance, addressId.toByteArray, readLeaseBalance, writeLeaseBalance) def leaseDetailsHistory(leaseId: ByteStr): Key[Seq[Int]] = historyKey(LeaseDetailsHistory, leaseId.arr) def leaseDetails(leaseId: ByteStr)(height: Int): Key[Option[Either[Boolean, LeaseDetails]]] = Key.opt(LeaseDetailsTag, Ints.toByteArray(height) ++ leaseId.arr, readLeaseDetails, writeLeaseDetails) - def filledVolumeAndFeeHistory(orderId: ByteStr): Key[Seq[Int]] = historyKey(FilledVolumeAndFeeHistory, orderId.arr) - def filledVolumeAndFee(orderId: ByteStr)(height: Int): Key[VolumeAndFee] = - Key(FilledVolumeAndFee, hBytes(orderId.arr, height), readVolumeAndFee, writeVolumeAndFee) + def filledVolumeAndFeeAt(orderId: ByteStr, height: Height): Key[VolumeAndFeeNode] = + Key(FilledVolumeAndFeeHistory, hBytes(orderId.arr, height), readVolumeAndFeeNode, writeVolumeAndFeeNode) + + def filledVolumeAndFee(orderId: ByteStr): Key[CurrentVolumeAndFee] = + Key(FilledVolumeAndFee, orderId.arr, readVolumeAndFee, writeVolumeAndFee) def changedAddresses(height: Int): Key[Seq[AddressId]] = Key(ChangedAddresses, h(height), readAddressIds, writeAddressIds) @@ -80,10 +127,11 @@ object Keys { val activatedFeatures: Key[Map[Short, Int]] = Key(ActivatedFeatures, Array.emptyByteArray, readFeatureMap, writeFeatureMap) // public key hash is used here so it's possible to populate bloom filter by just scanning all the history keys - def dataHistory(address: Address, key: String): Key[Seq[Int]] = - historyKey(DataHistory, PBRecipients.publicKeyHash(address) ++ key.utf8Bytes) - def data(addressId: AddressId, key: String)(height: Int): Key[Option[DataEntry[?]]] = - Key.opt(Data, hBytes(addressId.toByteArray ++ key.utf8Bytes, height), readDataEntry(key), writeDataEntry) + def data(address: Address, key: String): Key[CurrentData] = + Key(Data, PBRecipients.publicKeyHash(address) ++ key.utf8Bytes, readCurrentData(key), writeCurrentData) + + def dataAt(addressId: AddressId, key: String)(height: Int): Key[DataNode] = + Key(DataHistory, hBytes(addressId.toByteArray ++ key.utf8Bytes, height), readDataNode(key), writeDataNode) def sponsorshipHistory(asset: IssuedAsset): Key[Seq[Int]] = historyKey(SponsorshipHistory, asset.id.arr) def sponsorship(asset: IssuedAsset)(height: Int): Key[SponsorshipValue] = @@ -102,7 +150,7 @@ object Keys { def changedDataKeys(height: Int, addressId: AddressId): Key[Seq[String]] = Key(ChangedDataKeys, hBytes(addressId.toByteArray, height), readStrings, writeStrings) - def blockMetaAt(height: Height): Key[Option[BlockMeta]] = + def blockMetaAt(height: Height): Key[Option[PBBlockMeta]] = Key.opt(BlockInfoAtHeight, h(height), readBlockMeta, writeBlockMeta) def blockInfoBytesAt(height: Height): Key[Option[Array[Byte]]] = @@ -113,18 +161,19 @@ object Keys { unsupported("Can not explicitly write block bytes") ) - def transactionAt(height: Height, n: TxNum): Key[Option[(TxMeta, Transaction)]] = + def transactionAt(height: Height, n: TxNum, cfHandle: RDB.TxHandle): Key[Option[(TxMeta, Transaction)]] = Key.opt[(TxMeta, Transaction)]( NthTransactionInfoAtHeight, hNum(height, n), readTransaction(height), - writeTransaction + writeTransaction, + Some(cfHandle.handle) ) def addressTransactionSeqNr(addressId: AddressId): Key[Int] = bytesSeqNr(AddressTransactionSeqNr, addressId.toByteArray) - def addressTransactionHN(addressId: AddressId, seqNr: Int): Key[Option[(Height, Seq[(Byte, TxNum)])]] = + def addressTransactionHN(addressId: AddressId, seqNr: Int): Key[Option[(Height, Seq[(Byte, TxNum, Int)])]] = Key.opt( AddressTransactionHeightTypeAndNums, hBytes(addressId.toByteArray, seqNr), @@ -132,12 +181,13 @@ object Keys { writeTransactionHNSeqAndType ) - def transactionMetaById(txId: TransactionId): Key[Option[TransactionMeta]] = + def transactionMetaById(txId: TransactionId, cfh: RDB.TxMetaHandle): Key[Option[TransactionMeta]] = Key.opt( TransactionMetaById, txId.arr, TransactionMeta.parseFrom, - _.toByteArray + _.toByteArray, + Some(cfh.handle) ) def blockTransactionsFee(height: Int): Key[Long] = @@ -151,13 +201,6 @@ object Keys { def invokeScriptResult(height: Int, txNum: TxNum): Key[Option[InvokeScriptResult]] = Key.opt(InvokeScriptResultTag, hNum(height, txNum), InvokeScriptResult.fromBytes, InvokeScriptResult.toBytes) - def blockReward(height: Int): Key[Option[Long]] = - Key.opt(BlockReward, h(height), Longs.fromByteArray, Longs.toByteArray) - - def wavesAmount(height: Int): Key[BigInt] = Key(WavesAmount, h(height), Option(_).fold(BigInt(0))(BigInt(_)), _.toByteArray) - - def hitSource(height: Int): Key[Option[ByteStr]] = Key.opt(HitSource, h(height), ByteStr(_), _.arr) - val disabledAliases: Key[Set[Alias]] = Key( DisabledAliases, Array.emptyByteArray, @@ -166,7 +209,10 @@ object Keys { ) def assetStaticInfo(asset: IssuedAsset): Key[Option[StaticAssetInfo]] = - Key.opt(AssetStaticInfo, asset.id.arr, StaticAssetInfo.parseFrom, _.toByteArray) + Key.opt(AssetStaticInfo, asset.id.arr.take(20), StaticAssetInfo.parseFrom, _.toByteArray) + + def assetStaticInfo(addr: ERC20Address): Key[Option[StaticAssetInfo]] = + Key.opt(AssetStaticInfo, addr.arr, StaticAssetInfo.parseFrom, _.toByteArray) def nftCount(addressId: AddressId): Key[Int] = Key(NftCount, addressId.toByteArray, Option(_).fold(0)(Ints.fromByteArray), Ints.toByteArray) diff --git a/node/src/main/scala/com/wavesplatform/database/LevelDBFactory.scala b/node/src/main/scala/com/wavesplatform/database/LevelDBFactory.scala deleted file mode 100644 index c821f25ff34..00000000000 --- a/node/src/main/scala/com/wavesplatform/database/LevelDBFactory.scala +++ /dev/null @@ -1,28 +0,0 @@ -package com.wavesplatform.database - -import com.wavesplatform.utils.ScorexLogging -import org.iq80.leveldb.DBFactory - -import scala.util.Try - -object LevelDBFactory extends ScorexLogging { - private[this] val jnaFactory = "com.wavesplatform.database.jna.LevelDBJNADBFactory" - private[this] val javaFactory = "org.iq80.leveldb.impl.Iq80DBFactory" - - lazy val factory: DBFactory = { - val isTesting = sys.props.get("sbt-testing").isDefined - val nativeFactories = if (isTesting) List.empty else List(jnaFactory) - val allFactories = nativeFactories :+ javaFactory - - val pairs = for { - loader <- List(ClassLoader.getSystemClassLoader, getClass.getClassLoader).view - factoryName <- allFactories - factory <- Try(loader.loadClass(factoryName).getConstructor().newInstance().asInstanceOf[DBFactory]).toOption - } yield (factoryName, factory) - - val (fName, factory) = pairs.headOption.getOrElse(throw new Exception(s"Could not load any of the factory classes: $allFactories")) - if (fName == javaFactory) log.warn("Using the pure java LevelDB implementation which is still experimental") - else log.info(s"Loaded $fName with $factory") - factory - } -} diff --git a/node/src/main/scala/com/wavesplatform/database/RDB.scala b/node/src/main/scala/com/wavesplatform/database/RDB.scala new file mode 100644 index 00000000000..c403d78e55c --- /dev/null +++ b/node/src/main/scala/com/wavesplatform/database/RDB.scala @@ -0,0 +1,143 @@ +package com.wavesplatform.database + +import java.io.File +import java.util +import com.typesafe.scalalogging.StrictLogging +import com.wavesplatform.database.RDB.{TxHandle, TxMetaHandle} +import com.wavesplatform.settings.DBSettings +import com.wavesplatform.utils.* +import org.rocksdb.* + +import java.nio.file.{Files, Path} +import scala.jdk.CollectionConverters.* + +final class RDB( + val db: RocksDB, + val txMetaHandle: TxMetaHandle, + val txHandle: TxHandle, + acquiredResources: Seq[RocksObject] +) extends AutoCloseable { + override def close(): Unit = { + acquiredResources.foreach(_.close()) + db.close() + } +} + +object RDB extends StrictLogging { + final class TxMetaHandle private[RDB] (val handle: ColumnFamilyHandle) + final class TxHandle private[RDB] (val handle: ColumnFamilyHandle) + case class OptionsWithResources[A](options: A, resources: Seq[RocksObject]) + + def open(settings: DBSettings): RDB = { + val file = new File(settings.directory) + checkDbDir(file.toPath) + logger.debug(s"Open DB at ${settings.directory}") + + val dbOptions = createDbOptions(settings) + + val dbDir = file.getAbsoluteFile + dbDir.getParentFile.mkdirs() + + val handles = new util.ArrayList[ColumnFamilyHandle]() + val defaultCfOptions = newColumnFamilyOptions(12.0, 16 << 10, settings.rocksdb.mainCacheSize, 0.6, settings.rocksdb.writeBufferSize) + val txMetaCfOptions = newColumnFamilyOptions(10.0, 2 << 10, settings.rocksdb.txMetaCacheSize, 0.9, settings.rocksdb.writeBufferSize) + val txCfOptions = newColumnFamilyOptions(10.0, 2 << 10, settings.rocksdb.txCacheSize, 0.9, settings.rocksdb.writeBufferSize) + val db = RocksDB.open( + dbOptions.options, + settings.directory, + Seq( + new ColumnFamilyDescriptor( + RocksDB.DEFAULT_COLUMN_FAMILY, + defaultCfOptions.options + .setCfPaths(Seq(new DbPath(new File(dbDir, "tx-meta").toPath, 0L)).asJava) + ), + new ColumnFamilyDescriptor( + "tx-meta".utf8Bytes, + txMetaCfOptions.options + .optimizeForPointLookup(16 << 20) + .setDisableAutoCompactions(true) + .setCfPaths(Seq(new DbPath(new File(dbDir, "tx-meta").toPath, 0L)).asJava) + ), + new ColumnFamilyDescriptor( + "transactions".utf8Bytes, + txCfOptions.options + .setCfPaths(Seq(new DbPath(new File(dbDir, "transactions").toPath, 0L)).asJava) + ) + ).asJava, + handles + ) + + new RDB( + db, + new TxMetaHandle(handles.get(1)), + new TxHandle(handles.get(2)), + dbOptions.resources ++ defaultCfOptions.resources ++ txMetaCfOptions.resources ++ txCfOptions.resources + ) + } + + private def newColumnFamilyOptions( + bitsPerKey: Double, + blockSize: Long, + cacheCapacity: Long, + highPriPoolRatio: Double, + writeBufferSize: Long + ): OptionsWithResources[ColumnFamilyOptions] = { + val bloomFilter = new BloomFilter(bitsPerKey) + val blockCache = new LRUCache(cacheCapacity, -1, false, highPriPoolRatio) + val sstPartitionerFactory = new SstPartitionerFixedPrefixFactory(2) + + val options = new ColumnFamilyOptions() + .setTableFormatConfig( + new BlockBasedTableConfig() + .setFilterPolicy(bloomFilter) + .setOptimizeFiltersForMemory(true) + .setCacheIndexAndFilterBlocks(true) + .setPinL0FilterAndIndexBlocksInCache(true) + .setFormatVersion(5) + .setBlockSize(blockSize) + .setChecksumType(ChecksumType.kNoChecksum) + .setBlockCache(blockCache) + .setCacheIndexAndFilterBlocksWithHighPriority(true) + .setDataBlockIndexType(DataBlockIndexType.kDataBlockBinaryAndHash) + .setDataBlockHashTableUtilRatio(0.5) + ) + .setWriteBufferSize(writeBufferSize) + .setLevelCompactionDynamicLevelBytes(true) + .useCappedPrefixExtractor(10) + .setMemtablePrefixBloomSizeRatio(0.25) + .setCompressionType(CompressionType.LZ4_COMPRESSION) + .setSstPartitionerFactory(sstPartitionerFactory) + + OptionsWithResources(options, Seq(options, bloomFilter, blockCache, sstPartitionerFactory)) + } + + private def createDbOptions(settings: DBSettings): OptionsWithResources[DBOptions] = { + val dbOptions = new DBOptions() + .setCreateIfMissing(true) + .setParanoidChecks(true) + .setIncreaseParallelism(4) + .setBytesPerSync(2 << 20) + .setMaxBackgroundJobs(4) + .setCreateMissingColumnFamilies(true) + .setMaxOpenFiles(100) + + if (settings.rocksdb.enableStatistics) { + val statistics = new Statistics() + OptionsWithResources( + dbOptions.setStatistics(statistics), + Seq(dbOptions, statistics) + ) + } else OptionsWithResources(dbOptions, Seq(dbOptions)) + } + + private def checkDbDir(dbPath: Path): Unit = { + val containsLdbFiles = Files.exists(dbPath) && Files.list(dbPath).iterator().asScala.exists(_.getFileName.toString.endsWith(".ldb")) + if (containsLdbFiles) { + logger.error( + s"Database directory ${dbPath.toAbsolutePath.toString} contains LevelDB files (.ldb) which is not compatible with current database. Please delete these files and restart node" + ) + logger.error("FOR THIS REASON THE NODE STOPPED AUTOMATICALLY") + forceStopApplication(FatalDBError) + } + } +} diff --git a/node/src/main/scala/com/wavesplatform/database/RW.scala b/node/src/main/scala/com/wavesplatform/database/RW.scala index c3243259230..aac07a5547d 100644 --- a/node/src/main/scala/com/wavesplatform/database/RW.scala +++ b/node/src/main/scala/com/wavesplatform/database/RW.scala @@ -1,32 +1,25 @@ package com.wavesplatform.database -import com.wavesplatform.metrics.LevelDBStats -import com.wavesplatform.metrics.LevelDBStats.DbHistogramExt -import org.iq80.leveldb.{DB, ReadOptions, WriteBatch} +import com.wavesplatform.metrics.RocksDBStats +import com.wavesplatform.metrics.RocksDBStats.DbHistogramExt +import org.rocksdb.{ReadOptions, RocksDB, WriteBatch} -class RW(db: DB, readOptions: ReadOptions, batch: WriteBatch) extends ReadOnlyDB(db, readOptions) { - def put[V](key: Key[V], value: V): Unit = { +class RW(db: RocksDB, readOptions: ReadOptions, batch: WriteBatch) extends ReadOnlyDB(db, readOptions) { + def put[V](key: Key[V], value: V): Int = { val bytes = key.encode(value) - LevelDBStats.write.recordTagged(key, bytes) - batch.put(key.keyBytes, bytes) + RocksDBStats.write.recordTagged(key, bytes) + batch.put(key.columnFamilyHandle.getOrElse(db.getDefaultColumnFamily), key.keyBytes, bytes) + bytes.length } def put(key: Array[Byte], value: Array[Byte]): Unit = batch.put(key, value) def update[V](key: Key[V])(f: V => V): Unit = put(key, f(get(key))) - /** Because of how leveldb batches work, you can increment a specific value only once! */ - def inc(key: Key[Int]): Int = { - val newValue = get(key) + 1 - put(key, newValue) - newValue - } - - def delete(key: Array[Byte], statsKey: String): Unit = batch.delete(key) - def delete(key: Array[Byte]): Unit = batch.delete(key) - def delete[V](key: Key[V]): Unit = batch.delete(key.keyBytes) + def delete[V](key: Key[V]): Unit = + batch.delete(key.columnFamilyHandle.getOrElse(db.getDefaultColumnFamily), key.keyBytes) def filterHistory(key: Key[Seq[Int]], heightToRemove: Int): Unit = { val newValue = get(key).filterNot(_ == heightToRemove) diff --git a/node/src/main/scala/com/wavesplatform/database/ReadOnlyDB.scala b/node/src/main/scala/com/wavesplatform/database/ReadOnlyDB.scala index 58fdd97c5c2..3c152b0bbbd 100644 --- a/node/src/main/scala/com/wavesplatform/database/ReadOnlyDB.scala +++ b/node/src/main/scala/com/wavesplatform/database/ReadOnlyDB.scala @@ -1,58 +1,73 @@ package com.wavesplatform.database -import com.wavesplatform.metrics.LevelDBStats -import com.wavesplatform.metrics.LevelDBStats.DbHistogramExt -import org.iq80.leveldb.{DB, DBIterator, ReadOptions} +import com.google.common.collect.Maps +import com.wavesplatform.metrics.RocksDBStats +import com.wavesplatform.metrics.RocksDBStats.DbHistogramExt +import org.rocksdb.{ColumnFamilyHandle, ReadOptions, RocksDB, RocksIterator} import scala.annotation.tailrec +import scala.util.Using -class ReadOnlyDB(db: DB, readOptions: ReadOptions) { +class ReadOnlyDB(db: RocksDB, readOptions: ReadOptions) { def get[V](key: Key[V]): V = { - val bytes = db.get(key.keyBytes, readOptions) - LevelDBStats.read.recordTagged(key, bytes) + val bytes = db.get(key.columnFamilyHandle.getOrElse(db.getDefaultColumnFamily), readOptions, key.keyBytes) + RocksDBStats.read.recordTagged(key, bytes) key.parse(bytes) } + def multiGetOpt[V](keys: Seq[Key[Option[V]]], valBufferSize: Int): Seq[Option[V]] = + db.multiGetOpt(readOptions, keys, valBufferSize) + + def multiGet[V](keys: Seq[Key[V]], valBufferSize: Int): Seq[Option[V]] = + db.multiGet(readOptions, keys, valBufferSize) + + def multiGetOpt[V](keys: Seq[Key[Option[V]]], valBufSizes: Seq[Int]): Seq[Option[V]] = + db.multiGetOpt(readOptions, keys, valBufSizes) + + def multiGetInts(keys: Seq[Key[Int]]): Seq[Option[Int]] = + db.multiGetInts(readOptions, keys) + def has[V](key: Key[V]): Boolean = { - val bytes = db.get(key.keyBytes, readOptions) - LevelDBStats.read.recordTagged(key, bytes) + val bytes = db.get(key.columnFamilyHandle.getOrElse(db.getDefaultColumnFamily), readOptions, key.keyBytes) + RocksDBStats.read.recordTagged(key, bytes) bytes != null } - def newIterator: DBIterator = db.iterator(readOptions) + def newIterator: RocksIterator = db.newIterator(readOptions.setTotalOrderSeek(true)) - def iterateOver(tag: KeyTags.KeyTag)(f: DBEntry => Unit): Unit = iterateOver(tag.prefixBytes)(f) + def newPrefixIterator: RocksIterator = db.newIterator(readOptions.setTotalOrderSeek(false).setPrefixSameAsStart(true)) - def iterateOver(prefix: Array[Byte])(f: DBEntry => Unit): Unit = { - val iterator = db.iterator(readOptions) - try { - iterator.seek(prefix) - while (iterator.hasNext && iterator.peekNext().getKey.startsWith(prefix)) f(iterator.next()) - } finally iterator.close() - } + def iterateOverPrefix(tag: KeyTags.KeyTag)(f: DBEntry => Unit): Unit = iterateOverPrefix(tag.prefixBytes)(f) + + def iterateOverPrefix(prefix: Array[Byte])(f: DBEntry => Unit): Unit = { + @tailrec + def loop(iter: RocksIterator): Unit = { + val key = iter.key() + if (iter.isValid) { + f(Maps.immutableEntry(key, iter.value())) + iter.next() + loop(iter) + } else () + } - def prefixExists(prefix: Array[Byte]): Boolean = { - val iterator = db.iterator(readOptions) - iterator.seek(prefix) - val res = iterator.hasNext && iterator.peekNext().getKey.startsWith(prefix) - iterator.close() - res + Using.resource(db.newIterator(readOptions.setTotalOrderSeek(false).setPrefixSameAsStart(true))) { iter => + iter.seek(prefix) + loop(iter) + } } - def read[T](keyName: String, prefix: Array[Byte], seek: Array[Byte], n: Int)(deserialize: DBEntry => T): Vector[T] = { - val iter = newIterator - @tailrec def loop(aux: Vector[T], restN: Int, totalBytesRead: Long): (Vector[T], Long) = - if (restN > 0 && iter.hasNext) { - val elem = iter.next() - if (elem.getKey.startsWith(prefix)) loop(aux :+ deserialize(elem), restN - 1, totalBytesRead + elem.getValue.length) - else (aux, totalBytesRead) - } else (aux, totalBytesRead) - - try { - iter.seek(seek) - val (r, totalBytesRead) = loop(Vector.empty, n, 0) - LevelDBStats.read.recordTagged(keyName, totalBytesRead) - r - } finally iter.close() + def iterateOver(prefix: Array[Byte], cfh: Option[ColumnFamilyHandle] = None)(f: DBEntry => Unit): Unit = + Using.resource(db.newIterator(cfh.getOrElse(db.getDefaultColumnFamily), readOptions.setTotalOrderSeek(true))) { iter => + iter.seek(prefix) + while (iter.isValid && iter.key().startsWith(prefix)) { + f(Maps.immutableEntry(iter.key(), iter.value())) + iter.next() + } + } + + def prefixExists(prefix: Array[Byte]): Boolean = Using.resource(db.newIterator(readOptions.setTotalOrderSeek(false).setPrefixSameAsStart(true))) { + iter => + iter.seek(prefix) + iter.isValid } } diff --git a/node/src/main/scala/com/wavesplatform/database/LevelDBWriter.scala b/node/src/main/scala/com/wavesplatform/database/RocksDBWriter.scala similarity index 58% rename from node/src/main/scala/com/wavesplatform/database/LevelDBWriter.scala rename to node/src/main/scala/com/wavesplatform/database/RocksDBWriter.scala index 440af49bcd8..7f1dbe3f160 100644 --- a/node/src/main/scala/com/wavesplatform/database/LevelDBWriter.scala +++ b/node/src/main/scala/com/wavesplatform/database/RocksDBWriter.scala @@ -1,27 +1,29 @@ package com.wavesplatform.database import cats.data.Ior -import cats.syntax.option.* import cats.syntax.semigroup.* import com.google.common.cache.CacheBuilder import com.google.common.collect.MultimapBuilder -import com.google.common.primitives.{Bytes, Ints} +import com.google.common.hash.{BloomFilter, Funnels} +import com.google.common.primitives.Ints import com.google.protobuf.ByteString import com.wavesplatform.account.{Address, Alias} -import com.wavesplatform.api.BlockMeta +import com.wavesplatform.api.common.WavesBalanceIterator +import com.wavesplatform.block.Block import com.wavesplatform.block.Block.BlockId -import com.wavesplatform.block.{Block, SignedBlockHeader} import com.wavesplatform.common.state.ByteStr -import com.wavesplatform.common.utils.* +import com.wavesplatform.common.utils.EitherExt2 import com.wavesplatform.database import com.wavesplatform.database.patch.DisableHijackedAliases -import com.wavesplatform.database.protobuf.{EthereumTransactionMeta, StaticAssetInfo, TransactionMeta} +import com.wavesplatform.database.protobuf.{EthereumTransactionMeta, StaticAssetInfo, TransactionMeta, BlockMeta as PBBlockMeta} import com.wavesplatform.features.BlockchainFeatures import com.wavesplatform.lang.ValidationError +import com.wavesplatform.protobuf.ByteStringExt +import com.wavesplatform.protobuf.block.PBBlocks import com.wavesplatform.protobuf.transaction.PBAmounts -import com.wavesplatform.settings.{BlockchainSettings, DBSettings, WavesSettings} +import com.wavesplatform.settings.{BlockchainSettings, DBSettings} +import com.wavesplatform.state.* import com.wavesplatform.state.reader.LeaseDetails -import com.wavesplatform.state.{TxNum, *} import com.wavesplatform.transaction.* import com.wavesplatform.transaction.Asset.{IssuedAsset, Waves} import com.wavesplatform.transaction.EthereumTransaction.Transfer @@ -32,8 +34,7 @@ import com.wavesplatform.transaction.lease.{LeaseCancelTransaction, LeaseTransac import com.wavesplatform.transaction.smart.{InvokeExpressionTransaction, InvokeScriptTransaction, SetScriptTransaction} import com.wavesplatform.transaction.transfer.* import com.wavesplatform.utils.{LoggerFacade, ScorexLogging} -import monix.reactive.Observer -import org.iq80.leveldb.DB +import org.rocksdb.RocksDB import org.slf4j.LoggerFactory import java.util @@ -43,7 +44,7 @@ import scala.collection.mutable.ArrayBuffer import scala.jdk.CollectionConverters.* import scala.util.control.NonFatal -object LevelDBWriter extends ScorexLogging { +object RocksDBWriter extends ScorexLogging { /** {{{ * ([10, 7, 4], 5, 11) => [10, 7, 4] @@ -55,10 +56,6 @@ object LevelDBWriter extends ScorexLogging { c1 :+ c2.headOption.getOrElse(1) } - private[database] def closest(v: Seq[Int], h: Int): Option[Int] = { - v.dropWhile(_ > h).headOption // Should we use binary search? - } - implicit class ReadOnlyDBExt(val db: ReadOnlyDB) extends AnyVal { def fromHistory[A](historyKey: Key[Seq[Int]], valueKey: Int => Key[A]): Option[A] = for { @@ -78,7 +75,7 @@ object LevelDBWriter extends ScorexLogging { } yield db.get(valueKey(lastChange)) } - private def loadHeight(db: DB): Int = db.get(Keys.height) + private def loadHeight(db: RocksDB): Height = db.get(Keys.height) private[database] def merge(wbh: Seq[Int], lbh: Seq[Int]): Seq[(Int, Int)] = { @@ -106,100 +103,45 @@ object LevelDBWriter extends ScorexLogging { recMergeFixed(wbh.head, wbh.tail, lbh.head, lbh.tail, ArrayBuffer.empty).toSeq } - - def apply(db: DB, spendableBalanceChanged: Observer[(Address, Asset)], settings: WavesSettings): LevelDBWriter & AutoCloseable = { - val expectedHeight = loadHeight(db) - def load(name: String, key: KeyTags.KeyTag): Option[BloomFilterImpl] = { - if (settings.dbSettings.useBloomFilter) - Some(BloomFilter.loadOrPopulate(db, settings.dbSettings.directory, name, expectedHeight, key, 100000000)) - else - None - } - - val _orderFilter = load("orders", KeyTags.FilledVolumeAndFeeHistory) - val _dataKeyFilter = load("account-data", KeyTags.DataHistory) - val _wavesBalanceFilter = load("waves-balances", KeyTags.WavesBalanceHistory) - val _assetBalanceFilter = load("asset-balances", KeyTags.AssetBalanceHistory) - new LevelDBWriter(db, spendableBalanceChanged, settings.blockchainSettings, settings.dbSettings) with AutoCloseable { - - override val orderFilter: BloomFilter = _orderFilter.getOrElse(BloomFilter.AlwaysEmpty) - override val dataKeyFilter: BloomFilter = _dataKeyFilter.getOrElse(BloomFilter.AlwaysEmpty) - override val wavesBalanceFilter: BloomFilter = _wavesBalanceFilter.getOrElse(BloomFilter.AlwaysEmpty) - override val assetBalanceFilter: BloomFilter = _assetBalanceFilter.getOrElse(BloomFilter.AlwaysEmpty) - - override def close(): Unit = { - log.debug("Shutting down LevelDBWriter") - val lastHeight = LevelDBWriter.loadHeight(db) - _orderFilter.foreach(_.save(lastHeight)) - _dataKeyFilter.foreach(_.save(lastHeight)) - _wavesBalanceFilter.foreach(_.save(lastHeight)) - _assetBalanceFilter.foreach(_.save(lastHeight)) - } - } - } - - def readOnly(db: DB, settings: WavesSettings): LevelDBWriter = { - val expectedHeight = loadHeight(db) - def loadFilter(filterName: String) = - if (settings.dbSettings.useBloomFilter) - BloomFilter - .tryLoad(db, filterName, settings.dbSettings.directory, expectedHeight) - .fold(_ => BloomFilter.AlwaysEmpty, gf => new Wrapper(gf)) - else - BloomFilter.AlwaysEmpty - - new LevelDBWriter(db, Observer.stopped, settings.blockchainSettings, settings.dbSettings) { - override val orderFilter: BloomFilter = loadFilter("orders") - override val dataKeyFilter: BloomFilter = loadFilter("account-data") - override val wavesBalanceFilter: BloomFilter = loadFilter("waves-balances") - override val assetBalanceFilter: BloomFilter = loadFilter("asset-balances") - } - } } //noinspection UnstableApiUsage -abstract class LevelDBWriter private[database] ( - writableDB: DB, - spendableBalanceChanged: Observer[(Address, Asset)], +class RocksDBWriter( + rdb: RDB, val settings: BlockchainSettings, - val dbSettings: DBSettings -) extends Caches(spendableBalanceChanged) { - - private[this] val log = LoggerFacade(LoggerFactory.getLogger(classOf[LevelDBWriter])) + val dbSettings: DBSettings, + bfBlockInsertions: Int = 10000 +) extends Caches { + import rdb.db as writableDB - def orderFilter: BloomFilter - def dataKeyFilter: BloomFilter - def wavesBalanceFilter: BloomFilter - def assetBalanceFilter: BloomFilter + private[this] val log = LoggerFacade(LoggerFactory.getLogger(classOf[RocksDBWriter])) private[this] var disabledAliases = writableDB.get(Keys.disabledAliases) - private[this] val balanceSnapshotMaxRollbackDepth: Int = dbSettings.maxRollbackDepth + 1000 - import LevelDBWriter.* + import RocksDBWriter.* private[database] def readOnly[A](f: ReadOnlyDB => A): A = writableDB.readOnly(f) private[this] def readWrite[A](f: RW => A): A = writableDB.readWrite(f) - private def loadWithFilter[A, R](filter: BloomFilter, key: Key[A])(f: (ReadOnlyDB, A) => Option[R]): Option[R] = - if (filter.mightContain(key.suffix)) readOnly { ro => - f(ro, ro.get(key)) - } - else None + override protected def loadMaxAddressId(): Long = writableDB.get(Keys.lastAddressId).getOrElse(0L) - override protected def loadMaxAddressId(): Long = readOnly(db => db.get(Keys.lastAddressId).getOrElse(0L)) + override protected def loadAddressId(address: Address): Option[AddressId] = + writableDB.get(Keys.addressId(address)) - override protected def loadAddressId(address: Address): Option[AddressId] = readOnly(db => db.get(Keys.addressId(address))) + override protected def loadAddressIds(addresses: Seq[Address]): Map[Address, Option[AddressId]] = readOnly { ro => + addresses.view.zip(ro.multiGetOpt(addresses.map(Keys.addressId), 8)).toMap + } - override protected def loadHeight(): Int = LevelDBWriter.loadHeight(writableDB) + override protected def loadHeight(): Height = RocksDBWriter.loadHeight(writableDB) - override def safeRollbackHeight: Int = readOnly(_.get(Keys.safeRollbackHeight)) + override def safeRollbackHeight: Int = writableDB.get(Keys.safeRollbackHeight) - override protected def loadScore(): BigInt = readOnly(db => db.get(Keys.score(db.get(Keys.height)))) + override protected def loadBlockMeta(height: Height): Option[PBBlockMeta] = + writableDB.get(Keys.blockMetaAt(height)) - override protected def loadLastBlock(): Option[Block] = readOnly { db => - loadBlock(Height(db.get(Keys.height)), db) - } + override protected def loadTxs(height: Height): Seq[Transaction] = + loadTransactions(height, rdb).map(_._2) override protected def loadScript(address: Address): Option[AccountScriptInfo] = readOnly { db => addressId(address).fold(Option.empty[AccountScriptInfo]) { addressId => @@ -221,16 +163,10 @@ abstract class LevelDBWriter private[database] ( db.fromHistory(Keys.assetScriptHistory(asset), Keys.assetScriptPresent(asset)).flatten.nonEmpty } - override def carryFee: Long = readOnly(_.get(Keys.carryFee(height))) + override def carryFee: Long = writableDB.get(Keys.carryFee(height)) - override protected def loadAccountData(address: Address, key: String): Option[DataEntry[?]] = - loadWithFilter(dataKeyFilter, Keys.dataHistory(address, key)) { (ro, history) => - for { - aid <- addressId(address) - h <- history.headOption - e <- ro.get(Keys.data(aid, key)(h)) - } yield e - } + override protected def loadAccountData(address: Address, key: String): CurrentData = + writableDB.get(Keys.data(address, key)) override def hasData(address: Address): Boolean = { writableDB.readOnly { ro => @@ -240,54 +176,117 @@ abstract class LevelDBWriter private[database] ( } } - protected override def loadBalance(req: (Address, Asset)): Long = - addressId(req._1).fold(0L) { addressId => + protected override def loadBalance(req: (Address, Asset)): CurrentBalance = + addressId(req._1).fold(CurrentBalance.Unavailable) { addressId => req._2 match { case asset @ IssuedAsset(_) => - val kabh = Keys.assetBalanceHistory(addressId, asset) - if (assetBalanceFilter.mightContain(kabh.suffix)) - writableDB.readOnly(_.fromHistory(kabh, Keys.assetBalance(addressId, asset))).getOrElse(0L) - else 0L + writableDB.get(Keys.assetBalance(addressId, asset)) case Waves => - val kwbh = Keys.wavesBalanceHistory(addressId) - if (wavesBalanceFilter.mightContain(kwbh.suffix)) - writableDB.readOnly(_.fromHistory(kwbh, Keys.wavesBalance(addressId))).getOrElse(0L) - else 0L + writableDB.get(Keys.wavesBalance(addressId)) + } + } + + override protected def loadBalances(req: Seq[(Address, Asset)]): Map[(Address, Asset), CurrentBalance] = readOnly { ro => + val addrToId = addressIds(req.map(_._1)).collect { case (address, Some(aid)) => + address -> aid + } + + val reqWithKeys = req.flatMap { case (address, asset) => + addrToId.get(address).map { aid => + (address, asset) -> (asset match { + case Waves => Keys.wavesBalance(aid) + case issuedAsset: IssuedAsset => Keys.assetBalance(aid, issuedAsset) + }) } } - private def loadLeaseBalance(db: ReadOnlyDB, addressId: AddressId): LeaseBalance = - db.fromHistory(Keys.leaseBalanceHistory(addressId), Keys.leaseBalance(addressId)).getOrElse(LeaseBalance.empty) + val addressAssetToBalance = reqWithKeys + .zip(ro.multiGet(reqWithKeys.map(_._2), 16)) + .collect { case (((address, asset), _), Some(balance)) => + (address, asset) -> balance + } + .toMap + + req.map { key => + key -> addressAssetToBalance.getOrElse(key, CurrentBalance.Unavailable) + }.toMap + } + + protected override def loadWavesBalances(req: Seq[(Address, Asset)]): Map[(Address, Asset), CurrentBalance] = readOnly { ro => + val addrToId = addressIds(req.map(_._1)) + val addrIds = addrToId.collect { case (_, Some(aid)) => aid }.toSeq + + val idToBalance = addrIds + .zip( + ro.multiGet( + addrIds.map { addrId => + Keys.wavesBalance(addrId) + }, + 16 + ) + ) + .toMap + + req.map { case (address, asset) => + (address, asset) -> addrToId.get(address).flatMap(_.flatMap(idToBalance.get)).flatten.getOrElse(CurrentBalance.Unavailable) + }.toMap + } + + private def loadLeaseBalance(db: ReadOnlyDB, addressId: AddressId): CurrentLeaseBalance = + db.get(Keys.leaseBalance(addressId)) + + override protected def loadLeaseBalance(address: Address): CurrentLeaseBalance = readOnly { db => + addressId(address).fold(CurrentLeaseBalance.Unavailable)(loadLeaseBalance(db, _)) + } + + override protected def loadLeaseBalances(addresses: Seq[Address]): Map[Address, CurrentLeaseBalance] = readOnly { ro => + val addrToId = addressIds(addresses) + val addrIds = addrToId.collect { case (_, Some(aid)) => aid }.toSeq + + val idToBalance = addrIds + .zip( + ro.multiGet( + addrIds.map { addrId => + Keys.leaseBalance(addrId) + }, + 24 + ) + ) + .toMap - override protected def loadLeaseBalance(address: Address): LeaseBalance = readOnly { db => - addressId(address).fold(LeaseBalance.empty)(loadLeaseBalance(db, _)) + addresses.map { address => + address -> addrToId.get(address).flatMap(_.flatMap(idToBalance.get)).flatten.getOrElse(CurrentLeaseBalance.Unavailable) + }.toMap } override protected def loadAssetDescription(asset: IssuedAsset): Option[AssetDescription] = writableDB.withResource(r => database.loadAssetDescription(r, asset)) - override protected def loadVolumeAndFee(orderId: ByteStr): VolumeAndFee = - loadWithFilter(orderFilter, Keys.filledVolumeAndFeeHistory(orderId)) { (ro, history) => - history.headOption.map(h => ro.get(Keys.filledVolumeAndFee(orderId)(h))) - }.orEmpty + override protected def loadVolumeAndFee(orderId: ByteStr): CurrentVolumeAndFee = writableDB.get(Keys.filledVolumeAndFee(orderId)) - override protected def loadApprovedFeatures(): Map[Short, Int] = { - readOnly(_.get(Keys.approvedFeatures)) + override protected def loadVolumesAndFees(orders: Seq[ByteStr]): Map[ByteStr, CurrentVolumeAndFee] = readOnly { ro => + orders.view + .zip(ro.multiGet(orders.map(Keys.filledVolumeAndFee), 24)) + .map { case (id, v) => id -> v.getOrElse(CurrentVolumeAndFee.Unavailable) } + .toMap } + override protected def loadApprovedFeatures(): Map[Short, Int] = + writableDB.get(Keys.approvedFeatures) + override protected def loadActivatedFeatures(): Map[Short, Int] = { - val stateFeatures = readOnly(_.get(Keys.activatedFeatures)) + val stateFeatures = writableDB.get(Keys.activatedFeatures) stateFeatures ++ settings.functionalitySettings.preActivatedFeatures } - override def wavesAmount(height: Int): BigInt = readOnly { db => - val factHeight = height.min(this.height) - if (db.has(Keys.wavesAmount(factHeight))) db.get(Keys.wavesAmount(factHeight)) + override def wavesAmount(height: Int): BigInt = + if (this.isFeatureActivated(BlockchainFeatures.BlockReward, height)) + loadBlockMeta(Height(height)).fold(settings.genesisSettings.initialBalance)(_.totalWavesAmount) else settings.genesisSettings.initialBalance - } override def blockReward(height: Int): Option[Long] = - readOnly(_.db.get(Keys.blockReward(height))) + if (this.isFeatureActivated(BlockchainFeatures.BlockReward, height)) loadBlockMeta(Height(height)).map(_.reward) + else None private def updateHistory(rw: RW, key: Key[Seq[Int]], threshold: Int, kf: Int => Key[?]): Seq[Array[Byte]] = updateHistory(rw, rw.get(key), key, threshold, kf) @@ -299,45 +298,29 @@ abstract class LevelDBWriter private[database] ( } private def appendBalances( - balances: Map[AddressId, Map[Asset, Long]], + balances: Map[(AddressId, Asset), (CurrentBalance, BalanceNode)], issuedAssets: Map[IssuedAsset, NewAssetInfo], - rw: RW, - threshold: Int, - balanceThreshold: Int + rw: RW ): Unit = { val changedAssetBalances = MultimapBuilder.hashKeys().hashSetValues().build[IssuedAsset, java.lang.Long]() val updatedNftLists = MultimapBuilder.hashKeys().linkedHashSetValues().build[java.lang.Long, IssuedAsset]() - for ((addressId, updatedBalances) <- balances) { - for ((asset, balance) <- updatedBalances) { - asset match { - case Waves => - rw.put(Keys.wavesBalance(addressId)(height), balance) - val kwbh = Keys.wavesBalanceHistory(addressId) - if (wavesBalanceFilter.mightContain(kwbh.suffix)) - updateHistory(rw, kwbh, balanceThreshold, Keys.wavesBalance(addressId)).foreach(rw.delete) - else { - rw.put(kwbh, Seq(height)) - wavesBalanceFilter.put(kwbh.suffix) - } - case a: IssuedAsset => - changedAssetBalances.put(a, addressId.toLong) - rw.put(Keys.assetBalance(addressId, a)(height), balance) - val kabh = Keys.assetBalanceHistory(addressId, a) - val isNFT = balance > 0 && issuedAssets - .get(a) - .map(_.static.nft) - .orElse(assetDescription(a).map(_.nft)) - .getOrElse(false) - if (assetBalanceFilter.mightContain(kabh.suffix)) { - if (rw.get(kabh).isEmpty && isNFT) updatedNftLists.put(addressId.toLong, a) - updateHistory(rw, kabh, threshold, Keys.assetBalance(addressId, a)).foreach(rw.delete) - } else { - rw.put(kabh, Seq(height)) - assetBalanceFilter.put(kabh.suffix) - if (isNFT) updatedNftLists.put(addressId.toLong, a) - } - } + for (((addressId, asset), (currentBalance, balanceNode)) <- balances) { + asset match { + case Waves => + rw.put(Keys.wavesBalance(addressId), currentBalance) + rw.put(Keys.wavesBalanceAt(addressId, currentBalance.height), balanceNode) + case a: IssuedAsset => + changedAssetBalances.put(a, addressId.toLong) + rw.put(Keys.assetBalance(addressId, a), currentBalance) + rw.put(Keys.assetBalanceAt(addressId, a, currentBalance.height), balanceNode) + + val isNFT = currentBalance.balance > 0 && issuedAssets + .get(a) + .map(_.static.nft) + .orElse(assetDescription(a).map(_.nft)) + .getOrElse(false) + if (currentBalance.prevHeight == Height(0) && isNFT) updatedNftLists.put(addressId.toLong, a) } } @@ -355,36 +338,79 @@ abstract class LevelDBWriter private[database] ( } } + private def appendData(newAddresses: Map[Address, AddressId], data: Map[(Address, String), (CurrentData, DataNode)], rw: RW): Unit = { + val changedKeys = MultimapBuilder.hashKeys().hashSetValues().build[AddressId, String]() + + for (((address, key), (currentData, dataNode)) <- data) { + val addressId = addressIdWithFallback(address, newAddresses) + changedKeys.put(addressId, key) + + val kdh = Keys.data(address, key) + rw.put(kdh, currentData) + rw.put(Keys.dataAt(addressId, key)(height), dataNode) + } + + changedKeys.asMap().forEach { (addressId, keys) => + rw.put(Keys.changedDataKeys(height, addressId), keys.asScala.toSeq) + } + } + + // todo: instead of fixed-size block batches, store fixed-time batches + private val BlockStep = 200 + private def mkFilter() = BloomFilter.create[Array[Byte]](Funnels.byteArrayFunnel(), BlockStep * bfBlockInsertions, 0.01f) + private def initFilters(): (BloomFilter[Array[Byte]], BloomFilter[Array[Byte]]) = { + def loadFilter(heights: Seq[Int]): BloomFilter[Array[Byte]] = { + val filter = mkFilter() + heights.filter(_ > 0).foreach { h => + loadTransactions(Height(h), rdb).foreach { case (_, tx) => filter.put(tx.id().arr) } + } + filter + } + + val lastFilterStart = (height / BlockStep) * BlockStep + 1 + val prevFilterStart = lastFilterStart - BlockStep + val (bf0Heights, bf1Heights) = if ((height / BlockStep) % 2 == 0) { + (lastFilterStart to height, prevFilterStart until lastFilterStart) + } else { + (prevFilterStart until lastFilterStart, lastFilterStart to height) + } + (loadFilter(bf0Heights), loadFilter(bf1Heights)) + } + + private var (bf0, bf1) = initFilters() + + override def containsTransaction(tx: Transaction): Boolean = + (bf0.mightContain(tx.id().arr) || bf1.mightContain(tx.id().arr)) && { + writableDB.get(Keys.transactionMetaById(TransactionId(tx.id()), rdb.txMetaHandle)).isDefined + } + // noinspection ScalaStyle override protected def doAppend( - block: Block, + blockMeta: PBBlockMeta, carry: Long, newAddresses: Map[Address, AddressId], - balances: Map[AddressId, Map[Asset, Long]], - leaseBalances: Map[AddressId, LeaseBalance], + balances: Map[(AddressId, Asset), (CurrentBalance, BalanceNode)], + leaseBalances: Map[AddressId, (CurrentLeaseBalance, LeaseBalanceNode)], addressTransactions: util.Map[AddressId, util.Collection[TransactionId]], leaseStates: Map[ByteStr, LeaseDetails], issuedAssets: VectorMap[IssuedAsset, NewAssetInfo], updatedAssets: Map[IssuedAsset, Ior[AssetInfo, AssetVolumeInfo]], - filledQuantity: Map[ByteStr, VolumeAndFee], + filledQuantity: Map[ByteStr, (CurrentVolumeAndFee, VolumeAndFeeNode)], scripts: Map[AddressId, Option[AccountScriptInfo]], assetScripts: Map[IssuedAsset, Option[AssetScriptInfo]], - data: Map[Address, AccountDataInfo], + data: Map[(Address, String), (CurrentData, DataNode)], aliases: Map[Alias, AddressId], sponsorship: Map[IssuedAsset, Sponsorship], - totalFee: Long, - reward: Option[Long], - hitSource: ByteStr, scriptResults: Map[ByteStr, InvokeScriptResult], transactionMeta: Seq[(TxMeta, Transaction)], stateHash: StateHashBuilder.Result, ethereumTransactionMeta: Map[ByteStr, EthereumTransactionMeta] ): Unit = { - log.trace(s"Persisting block ${block.id()} at height $height") + log.trace(s"Persisting block ${blockMeta.id} at height $height") readWrite { rw => val expiredKeys = new ArrayBuffer[Array[Byte]] - rw.put(Keys.height, height) + rw.put(Keys.height, Height(height)) val previousSafeRollbackHeight = rw.get(Keys.safeRollbackHeight) val newSafeRollbackHeight = height - dbSettings.maxRollbackDepth @@ -397,42 +423,37 @@ abstract class LevelDBWriter private[database] ( TransactionId(tx.id()) -> ((tm, tx, TxNum(idx.toShort))) }.toMap - rw.put( - Keys.blockMetaAt(Height(height)), - Some( - BlockMeta.fromBlock(block, height, totalFee, reward, if (block.header.version >= Block.ProtoBlockVersion) Some(hitSource) else None) - ) - ) - rw.put(Keys.heightOf(block.id()), Some(height)) + rw.put(Keys.blockMetaAt(Height(height)), Some(blockMeta)) + rw.put(Keys.heightOf(blockMeta.id), Some(height)) + blockHeightCache.put(blockMeta.id, Some(height)) val lastAddressId = loadMaxAddressId() + newAddresses.size rw.put(Keys.lastAddressId, Some(lastAddressId)) - rw.put(Keys.score(height), rw.get(Keys.score(height - 1)) + block.blockScore()) for ((address, id) <- newAddresses) { - rw.put(Keys.addressId(address), Some(id)) + val kaid = Keys.addressId(address) + rw.put(kaid, Some(id)) rw.put(Keys.idToAddress(id), address) } - val threshold = newSafeRollbackHeight - val balanceThreshold = height - balanceSnapshotMaxRollbackDepth + val threshold = newSafeRollbackHeight - appendBalances(balances, issuedAssets, rw, threshold, balanceThreshold) + appendBalances(balances, issuedAssets, rw) + appendData(newAddresses, data, rw) - val changedAddresses = (addressTransactions.asScala.keys ++ balances.keys).toSet + val changedAddresses = (addressTransactions.asScala.keys ++ balances.keys.map(_._1)).toSet rw.put(Keys.changedAddresses(height), changedAddresses.toSeq) // leases - for ((addressId, leaseBalance) <- leaseBalances) { - rw.put(Keys.leaseBalance(addressId)(height), leaseBalance) - expiredKeys ++= updateHistory(rw, Keys.leaseBalanceHistory(addressId), balanceThreshold, Keys.leaseBalance(addressId)) + for ((addressId, (currentLeaseBalance, leaseBalanceNode)) <- leaseBalances) { + rw.put(Keys.leaseBalance(addressId), currentLeaseBalance) + rw.put(Keys.leaseBalanceAt(addressId, currentLeaseBalance.height), leaseBalanceNode) } - for ((orderId, volumeAndFee) <- filledQuantity) { - orderFilter.put(orderId.arr) - rw.put(Keys.filledVolumeAndFee(orderId)(height), volumeAndFee) - expiredKeys ++= updateHistory(rw, Keys.filledVolumeAndFeeHistory(orderId), threshold, Keys.filledVolumeAndFee(orderId)) + for ((orderId, (currentVolumeAndFee, volumeAndFeeNode)) <- filledQuantity) { + rw.put(Keys.filledVolumeAndFee(orderId), currentVolumeAndFee) + rw.put(Keys.filledVolumeAndFeeAt(orderId, currentVolumeAndFee.height), volumeAndFeeNode) } for (((asset, NewAssetInfo(staticInfo, info, volumeInfo)), assetNum) <- issuedAssets.zipWithIndex) { @@ -481,36 +502,45 @@ abstract class LevelDBWriter private[database] ( if (script.isDefined) rw.put(Keys.assetScript(asset)(height), script) } - for ((address, addressData) <- data) { - val addressId = addressIdWithFallback(address, newAddresses) - rw.put(Keys.changedDataKeys(height, addressId), addressData.data.keys.toSeq) - - for ((key, value) <- addressData.data) { - val kdh = Keys.dataHistory(address, key) - rw.put(Keys.data(addressId, key)(height), Some(value)) - dataKeyFilter.put(kdh.suffix) - expiredKeys ++= updateHistory(rw, kdh, threshold, Keys.data(addressId, key)) + if (height % BlockStep == 1) { + if ((height / BlockStep) % 2 == 0) { + bf0 = mkFilter() + } else { + bf1 = mkFilter() } } - if (dbSettings.storeTransactionsByAddress) for ((addressId, txIds) <- addressTransactions.asScala) { - val kk = Keys.addressTransactionSeqNr(addressId) - val nextSeqNr = rw.get(kk) + 1 - val txTypeNumSeq = txIds.asScala.map { txId => - val (_, tx, num) = transactions(txId) - (tx.tpe.id.toByte, num) - }.toSeq - rw.put(Keys.addressTransactionHN(addressId, nextSeqNr), Some((Height(height), txTypeNumSeq.sortBy(-_._2)))) - rw.put(kk, nextSeqNr) + val targetBf = if ((height / BlockStep) % 2 == 0) bf0 else bf1 + + val txSizes = transactions.map { case (id, (txm, tx, num)) => + val size = rw.put(Keys.transactionAt(Height(height), num, rdb.txHandle), Some((txm, tx))) + + targetBf.put(tx.id().arr) + + rw.put(Keys.transactionMetaById(id, rdb.txMetaHandle), Some(TransactionMeta(height, num, tx.tpe.id, !txm.succeeded, 0, size))) + id -> size } - for ((alias, addressId) <- aliases) { - rw.put(Keys.addressIdOfAlias(alias), Some(addressId)) + if (dbSettings.storeTransactionsByAddress) { + val addressTxs = addressTransactions.asScala.toSeq.map { case (aid, txIds) => + (aid, txIds, Keys.addressTransactionSeqNr(aid)) + } + rw.multiGetInts(addressTxs.map(_._3)) + .zip(addressTxs) + .foreach { case (prevSeqNr, (addressId, txIds, txSeqNrKey)) => + val nextSeqNr = prevSeqNr.getOrElse(0) + 1 + val txTypeNumSeq = txIds.asScala.map { txId => + val (_, tx, num) = transactions(txId) + val size = txSizes(txId) + (tx.tpe.id.toByte, num, size) + }.toSeq + rw.put(Keys.addressTransactionHN(addressId, nextSeqNr), Some((Height(height), txTypeNumSeq.sortBy(-_._2)))) + rw.put(txSeqNrKey, nextSeqNr) + } } - for ((id, (txm, tx, num)) <- transactions) { - rw.put(Keys.transactionAt(Height(height), num), Some((txm, tx))) - rw.put(Keys.transactionMetaById(id), Some(TransactionMeta(height, num, tx.tpe.id, !txm.succeeded))) + for ((alias, addressId) <- aliases) { + rw.put(Keys.addressIdOfAlias(alias), Some(addressId)) } val activationWindowSize = settings.functionalitySettings.activationWindowSize(height) @@ -519,7 +549,7 @@ abstract class LevelDBWriter private[database] ( val newlyApprovedFeatures = featureVotes(height) .filterNot { case (featureId, _) => settings.functionalitySettings.preActivatedFeatures.contains(featureId) } .collect { - case (featureId, voteCount) if voteCount + (if (block.header.featureVotes.contains(featureId)) 1 else 0) >= minVotes => + case (featureId, voteCount) if voteCount + (if (blockMeta.getHeader.featureVotes.contains(featureId.toInt)) 1 else 0) >= minVotes => featureId -> height } @@ -534,11 +564,6 @@ abstract class LevelDBWriter private[database] ( } } - reward.foreach { lastReward => - rw.put(Keys.blockReward(height), Some(lastReward)) - rw.put(Keys.wavesAmount(height), wavesAmount(height - 1) + lastReward) - } - for (case sp <- sponsorship) sp match { case (asset, value: SponsorshipValue) => @@ -554,13 +579,11 @@ abstract class LevelDBWriter private[database] ( rw.put(Keys.carryFee(height), carry) expiredKeys += Keys.carryFee(threshold - 1).keyBytes - rw.put(Keys.blockTransactionsFee(height), totalFee) - if (dbSettings.storeInvokeScriptResults) scriptResults.foreach { case (txId, result) => val (txHeight, txNum) = transactions .get(TransactionId(txId)) .map { case (_, _, txNum) => (height, txNum) } - .orElse(rw.get(Keys.transactionMetaById(TransactionId(txId))).map { tm => + .orElse(rw.get(Keys.transactionMetaById(TransactionId(txId), rdb.txMetaHandle)).map { tm => (tm.height, TxNum(tm.num.toShort)) }) .getOrElse(throw new IllegalArgumentException(s"Couldn't find transaction height and num: $txId")) @@ -576,14 +599,12 @@ abstract class LevelDBWriter private[database] ( rw.put(Keys.ethereumTransactionMeta(Height(height), transactions(TransactionId(id))._3), Some(meta)) } - expiredKeys.foreach(rw.delete(_, "expired-keys")) + expiredKeys.foreach(rw.delete) if (DisableHijackedAliases.height == height) { disabledAliases = DisableHijackedAliases(rw) } - rw.put(Keys.hitSource(height), Some(hitSource)) - if (dbSettings.storeStateHashes) { val prevStateHash = if (height == 1) ByteStr.empty @@ -600,7 +621,7 @@ abstract class LevelDBWriter private[database] ( } } - log.trace(s"Finished persisting block ${block.id()} at height $height") + log.trace(s"Finished persisting block ${blockMeta.id} at height $height") } override protected def doRollback(targetHeight: Int): Seq[(Block, ByteStr)] = { @@ -612,15 +633,16 @@ abstract class LevelDBWriter private[database] ( val discardedBlocks: Seq[(Block, ByteStr)] = for (currentHeightInt <- height until targetHeight by -1; currentHeight = Height(currentHeightInt)) yield { - val balancesToInvalidate = Seq.newBuilder[(Address, Asset)] - val ordersToInvalidate = Seq.newBuilder[ByteStr] - val scriptsToDiscard = Seq.newBuilder[Address] - val assetScriptsToDiscard = Seq.newBuilder[IssuedAsset] - val accountDataToInvalidate = Seq.newBuilder[(Address, String)] - val aliasesToInvalidate = Seq.newBuilder[Alias] + val balancesToInvalidate = Seq.newBuilder[(Address, Asset)] + val ordersToInvalidate = Seq.newBuilder[ByteStr] + val scriptsToDiscard = Seq.newBuilder[Address] + val assetScriptsToDiscard = Seq.newBuilder[IssuedAsset] + val accountDataToInvalidate = Seq.newBuilder[(Address, String)] + val aliasesToInvalidate = Seq.newBuilder[Alias] + val blockHeightsToInvalidate = Seq.newBuilder[ByteStr] val discardedBlock = readWrite { rw => - rw.put(Keys.height, currentHeight - 1) + rw.put(Keys.height, Height(currentHeight - 1)) val discardedMeta = rw .get(Keys.blockMetaAt(currentHeight)) @@ -635,14 +657,8 @@ abstract class LevelDBWriter private[database] ( rw.iterateOver(KeyTags.ChangedAssetBalances.prefixBytes ++ Ints.toByteArray(currentHeight)) { e => val assetId = IssuedAsset(ByteStr(e.getKey.takeRight(32))) for ((addressId, address) <- changedAddresses) { - val kabh = Keys.assetBalanceHistory(addressId, assetId) - val history = rw.get(kabh) - if (history.nonEmpty && history.head == currentHeight) { - log.trace(s"Discarding ${assetId.id} balance for $address at $currentHeight") - balancesToInvalidate += address -> assetId - rw.delete(Keys.assetBalance(addressId, assetId)(history.head)) - rw.put(kabh.keyBytes, writeIntSeq(history.tail)) - } + balancesToInvalidate += address -> assetId + rollbackBalanceHistory(rw, Keys.assetBalance(addressId, assetId), Keys.assetBalanceAt(addressId, assetId, _), currentHeight) } } @@ -650,17 +666,16 @@ abstract class LevelDBWriter private[database] ( for (k <- rw.get(Keys.changedDataKeys(currentHeight, addressId))) { log.trace(s"Discarding $k for $address at $currentHeight") accountDataToInvalidate += (address -> k) - rw.delete(Keys.data(addressId, k)(currentHeight)) - rw.filterHistory(Keys.dataHistory(address, k), currentHeight) + + rw.delete(Keys.dataAt(addressId, k)(currentHeight)) + rollbackDataHistory(rw, Keys.data(address, k), Keys.dataAt(addressId, k)(_), currentHeight) } rw.delete(Keys.changedDataKeys(currentHeight, addressId)) balancesToInvalidate += (address -> Waves) - rw.delete(Keys.wavesBalance(addressId)(currentHeight)) - rw.filterHistory(Keys.wavesBalanceHistory(addressId), currentHeight) + rollbackBalanceHistory(rw, Keys.wavesBalance(addressId), Keys.wavesBalanceAt(addressId, _), currentHeight) - rw.delete(Keys.leaseBalance(addressId)(currentHeight)) - rw.filterHistory(Keys.leaseBalanceHistory(addressId), currentHeight) + rollbackLeaseBalance(rw, addressId, currentHeight) balanceAtHeightCache.invalidate((currentHeight, addressId)) leaseBalanceAtHeightCache.invalidate((currentHeight, addressId)) @@ -684,9 +699,9 @@ abstract class LevelDBWriter private[database] ( rollbackAssetsInfo(rw, currentHeight) - loadTransactions(currentHeight, rw).view.zipWithIndex.foreach { case ((_, tx), idx) => + val blockTxs = loadTransactions(currentHeight, rdb) + blockTxs.view.zipWithIndex.foreach { case ((_, tx), idx) => val num = TxNum(idx.toShort) - forgetTransaction(tx.id()) (tx: @unchecked) match { case _: GenesisTransaction => // genesis transaction can not be rolled back case _: PaymentTransaction | _: TransferTransaction | _: MassTransferTransaction => @@ -727,30 +742,32 @@ abstract class LevelDBWriter private[database] ( } if (tx.tpe != TransactionType.Genesis) { - rw.delete(Keys.transactionAt(currentHeight, num)) - rw.delete(Keys.transactionMetaById(TransactionId(tx.id()))) + rw.delete(Keys.transactionAt(currentHeight, num, rdb.txHandle)) + rw.delete(Keys.transactionMetaById(TransactionId(tx.id()), rdb.txMetaHandle)) } } rw.delete(Keys.blockMetaAt(currentHeight)) - rw.delete(Keys.score(currentHeight)) rw.delete(Keys.changedAddresses(currentHeight)) rw.delete(Keys.heightOf(discardedMeta.id)) + blockHeightsToInvalidate.addOne(discardedMeta.id) rw.delete(Keys.carryFee(currentHeight)) rw.delete(Keys.blockTransactionsFee(currentHeight)) - rw.delete(Keys.blockReward(currentHeight)) - rw.delete(Keys.wavesAmount(currentHeight)) rw.delete(Keys.stateHash(currentHeight)) - rw.delete(Keys.hitSource(currentHeight)) if (DisableHijackedAliases.height == currentHeight) { disabledAliases = DisableHijackedAliases.revert(rw) } - val hitSource = rw.get(Keys.hitSource(currentHeight)).get - val block = createBlock(discardedMeta.header, discardedMeta.signature, loadTransactions(currentHeight, rw).map(_._2)).explicitGet() + val block = createBlock( + PBBlocks.vanilla( + discardedMeta.header.getOrElse(throw new IllegalArgumentException(s"Block header is missing at height ${currentHeight.toInt}")) + ), + ByteStr(discardedMeta.signature.toByteArray), + blockTxs.map(_._2) + ).explicitGet() - (block, hitSource) + (block, Caches.toHitSource(discardedMeta)) } balancesToInvalidate.result().foreach(discardBalance) @@ -759,6 +776,7 @@ abstract class LevelDBWriter private[database] ( assetScriptsToDiscard.result().foreach(discardAssetScript) accountDataToInvalidate.result().foreach(discardAccountData) aliasesToInvalidate.result().foreach(discardAlias) + blockHeightsToInvalidate.result().foreach(discardBlockHeight) discardedBlock } @@ -766,6 +784,24 @@ abstract class LevelDBWriter private[database] ( discardedBlocks.reverse } + private def rollbackDataHistory(rw: RW, currentDataKey: Key[CurrentData], dataNodeKey: Height => Key[DataNode], currentHeight: Height): Unit = { + val currentData = rw.get(currentDataKey) + if (currentData.height == currentHeight) { + val prevDataNode = rw.get(dataNodeKey(currentData.prevHeight)) + rw.delete(dataNodeKey(currentHeight)) + rw.put(currentDataKey, CurrentData(prevDataNode.entry, currentData.prevHeight, prevDataNode.prevHeight)) + } + } + + private def rollbackBalanceHistory(rw: RW, curBalanceKey: Key[CurrentBalance], balanceNodeKey: Height => Key[BalanceNode], height: Height): Unit = { + val balance = rw.get(curBalanceKey) + if (balance.height == height) { + val prevBalanceNode = rw.get(balanceNodeKey(balance.prevHeight)) + rw.delete(balanceNodeKey(height)) + rw.put(curBalanceKey, CurrentBalance(prevBalanceNode.balance, balance.prevHeight, prevBalanceNode.prevHeight)) + } + } + private def rollbackAssetsInfo(rw: RW, currentHeight: Int): Unit = { val issuedKey = Keys.issuedAssets(currentHeight) val updatedKey = Keys.updatedAssets(currentHeight) @@ -796,12 +832,29 @@ abstract class LevelDBWriter private[database] ( } } - private def rollbackOrderFill(rw: RW, orderId: ByteStr, currentHeight: Int): ByteStr = { - rw.delete(Keys.filledVolumeAndFee(orderId)(currentHeight)) - rw.filterHistory(Keys.filledVolumeAndFeeHistory(orderId), currentHeight) + private def rollbackOrderFill(rw: RW, orderId: ByteStr, height: Height): ByteStr = { + val curVfKey = Keys.filledVolumeAndFee(orderId) + val vf = rw.get(curVfKey) + if (vf.height == height) { + val vfNodeKey = Keys.filledVolumeAndFeeAt(orderId, _) + val prevVfNode = rw.get(vfNodeKey(vf.prevHeight)) + rw.delete(vfNodeKey(height)) + rw.put(curVfKey, CurrentVolumeAndFee(prevVfNode.volume, prevVfNode.fee, vf.prevHeight, prevVfNode.prevHeight)) + } orderId } + private def rollbackLeaseBalance(rw: RW, addressId: AddressId, height: Height): Unit = { + val curLbKey = Keys.leaseBalance(addressId) + val lb = rw.get(curLbKey) + if (lb.height == height) { + val lbNodeKey = Keys.leaseBalanceAt(addressId, _) + val prevLbNode = rw.get(lbNodeKey(lb.prevHeight)) + rw.delete(lbNodeKey(height)) + rw.put(curLbKey, CurrentLeaseBalance(prevLbNode.in, prevLbNode.out, lb.prevHeight, prevLbNode.prevHeight)) + } + } + private def rollbackLeaseStatus(rw: RW, leaseId: ByteStr, currentHeight: Int): Unit = { rw.delete(Keys.leaseDetails(leaseId)(currentHeight)) rw.filterHistory(Keys.leaseDetailsHistory(leaseId), currentHeight) @@ -809,10 +862,10 @@ abstract class LevelDBWriter private[database] ( override def transferById(id: ByteStr): Option[(Int, TransferTransactionLike)] = readOnly { db => for { - tm <- db.get(Keys.transactionMetaById(TransactionId @@ id)) + tm <- db.get(Keys.transactionMetaById(TransactionId @@ id, rdb.txMetaHandle)) if tm.`type` == TransferTransaction.typeId || tm.`type` == TransactionType.Ethereum.id tx <- db - .get(Keys.transactionAt(Height(tm.height), TxNum(tm.num.toShort))) + .get(Keys.transactionAt(Height(tm.height), TxNum(tm.num.toShort), rdb.txHandle)) .collect { case (tm, t: TransferTransaction) if tm.succeeded => t case (m, e @ EthereumTransaction(_: Transfer, _, _, _)) if m.succeeded => @@ -827,14 +880,24 @@ abstract class LevelDBWriter private[database] ( override def transactionInfo(id: ByteStr): Option[(TxMeta, Transaction)] = readOnly(transactionInfo(id, _)) + override def transactionInfos(ids: Seq[ByteStr]): Seq[Option[(TxMeta, Transaction)]] = readOnly { db => + val tms = db.multiGetOpt(ids.map(id => Keys.transactionMetaById(TransactionId(id), rdb.txMetaHandle)), 36) + val (keys, sizes) = tms.map { + case Some(tm) => Keys.transactionAt(Height(tm.height), TxNum(tm.num.toShort), rdb.txHandle) -> tm.size + case None => Keys.transactionAt(Height(0), TxNum(0.toShort), rdb.txHandle) -> 0 + }.unzip + + db.multiGetOpt(keys, sizes) + } + protected def transactionInfo(id: ByteStr, db: ReadOnlyDB): Option[(TxMeta, Transaction)] = for { - tm <- db.get(Keys.transactionMetaById(TransactionId(id))) - (txm, tx) <- db.get(Keys.transactionAt(Height(tm.height), TxNum(tm.num.toShort))) + tm <- db.get(Keys.transactionMetaById(TransactionId(id), rdb.txMetaHandle)) + (txm, tx) <- db.get(Keys.transactionAt(Height(tm.height), TxNum(tm.num.toShort), rdb.txHandle)) } yield (txm, tx) - override def transactionMeta(id: ByteStr): Option[TxMeta] = readOnly { db => - db.get(Keys.transactionMetaById(TransactionId(id))).map { tm => + override def transactionMeta(id: ByteStr): Option[TxMeta] = { + writableDB.get(Keys.transactionMetaById(TransactionId(id), rdb.txMetaHandle)).map { tm => TxMeta(Height(tm.height), !tm.failed, tm.spentComplexity) } } @@ -848,6 +911,8 @@ abstract class LevelDBWriter private[database] ( .map(addressId => db.get(Keys.idToAddress(addressId))) } + override protected def loadBlockHeight(blockId: BlockId): Option[Int] = readOnly(_.get(Keys.heightOf(blockId))) + override def leaseDetails(leaseId: ByteStr): Option[LeaseDetails] = readOnly { db => for { h <- db.get(Keys.leaseDetailsHistory(leaseId)).headOption @@ -877,59 +942,78 @@ abstract class LevelDBWriter private[database] ( .newBuilder() .maximumSize(100000) .recordStats() - .build[(Int, AddressId), java.lang.Long]() + .build[(Int, AddressId), BalanceNode]() private val leaseBalanceAtHeightCache = CacheBuilder .newBuilder() .maximumSize(100000) .recordStats() - .build[(Int, AddressId), LeaseBalance]() + .build[(Int, AddressId), LeaseBalanceNode]() override def balanceAtHeight(address: Address, height: Int, assetId: Asset = Waves): Option[(Int, Long)] = readOnly { db => - db.get(Keys.addressId(address)).flatMap { addressId => - assetId match { - case Waves => - closest(db.get(Keys.wavesBalanceHistory(addressId)), height).map { wh => - val b: Long = db.get(Keys.wavesBalance(addressId)(wh)) - (wh, b) - } - case asset @ IssuedAsset(_) => - closest(db.get(Keys.assetBalanceHistory(addressId, asset)), height).map { wh => - val b: Long = db.get(Keys.assetBalance(addressId, asset)(wh)) - (wh, b) - } + @tailrec + def getBalanceAtHeight(h: Height, key: Height => Key[BalanceNode]): (Int, Long) = { + val balance = db.get(key(h)) + if (h <= height) { + h -> balance.balance + } else { + getBalanceAtHeight(balance.prevHeight, key) } } - } - override def balanceSnapshots(address: Address, from: Int, to: Option[BlockId]): Seq[BalanceSnapshot] = readOnly { db => - db.get(Keys.addressId(address)).fold(Seq(BalanceSnapshot(1, 0, 0, 0))) { addressId => - val toHeigth = to.flatMap(this.heightOf).getOrElse(this.height) - val wbh = slice(db.get(Keys.wavesBalanceHistory(addressId)), from, toHeigth) - val lbh = slice(db.get(Keys.leaseBalanceHistory(addressId)), from, toHeigth) - for { - (wh, lh) <- merge(wbh, lbh) - wb = balanceAtHeightCache.get((wh, addressId), () => db.get(Keys.wavesBalance(addressId)(wh))) - lb = leaseBalanceAtHeightCache.get((lh, addressId), () => db.get(Keys.leaseBalance(addressId)(lh))) - } yield BalanceSnapshot(wh.max(lh), wb, lb.in, lb.out) + db.get(Keys.addressId(address)).map { aid => + val (balance, balanceNodeKey) = + assetId match { + case Waves => (db.get(Keys.wavesBalance(aid)), Keys.wavesBalanceAt(aid, _)) + case asset @ IssuedAsset(_) => (db.get(Keys.assetBalance(aid, asset)), Keys.assetBalanceAt(aid, asset, _)) + } + + if (balance.height > height) { + getBalanceAtHeight(balance.prevHeight, balanceNodeKey) + } else { + balance.height -> balance.balance + } } } - def loadScoreOf(blockId: ByteStr): Option[BigInt] = { - readOnly(db => db.get(Keys.heightOf(blockId)).map(h => db.get(Keys.score(h)))) - } + override def balanceSnapshots(address: Address, from: Int, to: Option[BlockId]): Seq[BalanceSnapshot] = readOnly { db => + addressId(address).fold(Seq(BalanceSnapshot(1, 0, 0, 0))) { addressId => + val toHeight = to.flatMap(this.heightOf).getOrElse(this.height) + + val lastBalance = balancesCache.get((address, Asset.Waves)) + val lastLeaseBalance = leaseBalanceCache.get(address) + + @tailrec + def collectBalanceHistory(acc: Vector[Int], hh: Int): Seq[Int] = + if (hh < from) + acc :+ hh + else { + val bn = balanceAtHeightCache.get((hh, addressId), () => db.get(Keys.wavesBalanceAt(addressId, Height(hh)))) + val newAcc = if (hh > toHeight) acc else acc :+ hh + collectBalanceHistory(newAcc, bn.prevHeight) + } - override def loadBlockInfo(height: Int): Option[SignedBlockHeader] = { - writableDB.get(Keys.blockMetaAt(Height(height))).map(_.toSignedHeader) - } + @tailrec + def collectLeaseBalanceHistory(acc: Vector[Int], hh: Int): Seq[Int] = + if (hh < from) + acc :+ hh + else { + val lbn = leaseBalanceAtHeightCache.get((hh, addressId), () => db.get(Keys.leaseBalanceAt(addressId, Height(hh)))) + val newAcc = if (hh > toHeight) acc else acc :+ hh + collectLeaseBalanceHistory(newAcc, lbn.prevHeight) + } - def loadBlockInfo(height: Int, db: ReadOnlyDB): Option[SignedBlockHeader] = { - db.get(Keys.blockMetaAt(Height(height))).map(_.toSignedHeader) + val wbh = slice(collectBalanceHistory(Vector.empty, lastBalance.height), from, toHeight) + val lbh = slice(collectLeaseBalanceHistory(Vector.empty, lastLeaseBalance.height), from, toHeight) + for { + (wh, lh) <- merge(wbh, lbh) + wb = balanceAtHeightCache.get((wh, addressId), () => db.get(Keys.wavesBalanceAt(addressId, Height(wh)))) + lb = leaseBalanceAtHeightCache.get((lh, addressId), () => db.get(Keys.leaseBalanceAt(addressId, Height(lh)))) + } yield BalanceSnapshot(wh.max(lh), wb.balance, lb.in, lb.out) + } } - override def loadHeightOf(blockId: ByteStr): Option[Int] = { - readOnly(_.get(Keys.heightOf(blockId))) - } + override def loadHeightOf(blockId: ByteStr): Option[Int] = blockHeightCache.get(blockId) override def featureVotes(height: Int): Map[Short, Int] = readOnly { db => settings.functionalitySettings @@ -937,8 +1021,8 @@ abstract class LevelDBWriter private[database] ( .flatMap { h => val height = Height(h) db.get(Keys.blockMetaAt(height)) - .map(_.header.featureVotes) - .getOrElse(Seq.empty) + .flatMap(_.header) + .fold(Seq.empty[Short])(_.featureVotes.map(_.toShort)) } .groupBy(identity) .view @@ -953,28 +1037,24 @@ abstract class LevelDBWriter private[database] ( .votingWindow(activatedAt, height) .flatMap { h => db.get(Keys.blockMetaAt(Height(h))) - .map(_.header.rewardVote) + .flatMap(_.header) + .map(_.rewardVote) } case _ => Seq() } } - override def hitSource(height: Int): Option[ByteStr] = readOnly { db => - db.get(Keys.hitSource(height)) - .filter(_.arr.length == Block.HitSourceLength) - } - def loadStateHash(height: Int): Option[StateHash] = readOnly { db => db.get(Keys.stateHash(height)) } - override def resolveERC20Address(address: ERC20Address): Option[IssuedAsset] = writableDB.withResource { r => - import scala.jdk.CollectionConverters.* - r.iterator.seek(Bytes.concat(KeyTags.AssetStaticInfo.prefixBytes, address.arr)) - r.iterator.asScala - .to(LazyList) - .headOption - .map(e => IssuedAsset(ByteStr(e.getKey.drop(2)))) - .filter(asset => asset.id.size == 32 && ERC20Address(asset) == address) + // TODO: maybe add length constraint + def loadBalanceHistory(address: Address): Seq[(Int, Long)] = writableDB.withResource { dbResource => + dbResource.get(Keys.addressId(address)).fold(Seq.empty[(Int, Long)]) { aid => + new WavesBalanceIterator(aid, dbResource).asScala.toSeq + } } + + override def resolveERC20Address(address: ERC20Address): Option[IssuedAsset] = + readOnly(_.get(Keys.assetStaticInfo(address)).map(assetInfo => IssuedAsset(assetInfo.id.toByteStr))) } diff --git a/node/src/main/scala/com/wavesplatform/database/SortedBatch.scala b/node/src/main/scala/com/wavesplatform/database/SortedBatch.scala deleted file mode 100644 index d06dafc01ce..00000000000 --- a/node/src/main/scala/com/wavesplatform/database/SortedBatch.scala +++ /dev/null @@ -1,27 +0,0 @@ -package com.wavesplatform.database - -import com.wavesplatform.common.state.ByteStr -import org.iq80.leveldb.WriteBatch - -import scala.collection.mutable - -class SortedBatch extends WriteBatch { - val addedEntries: mutable.Map[ByteStr, Array[Byte]] = mutable.TreeMap[ByteStr, Array[Byte]]() - val deletedEntries: mutable.Set[ByteStr] = mutable.TreeSet[ByteStr]() - - override def put(bytes: Array[Byte], bytes1: Array[Byte]): WriteBatch = { - val k = ByteStr(bytes) - addedEntries.put(k, bytes1) - deletedEntries.remove(k) - this - } - - override def delete(bytes: Array[Byte]): WriteBatch = { - val k = ByteStr(bytes) - addedEntries.remove(k) - deletedEntries.add(k) - this - } - - override def close(): Unit = {} -} diff --git a/node/src/main/scala/com/wavesplatform/database/jna/LevelDBJNADB.scala b/node/src/main/scala/com/wavesplatform/database/jna/LevelDBJNADB.scala deleted file mode 100644 index 8b42fb701e0..00000000000 --- a/node/src/main/scala/com/wavesplatform/database/jna/LevelDBJNADB.scala +++ /dev/null @@ -1,168 +0,0 @@ -package com.wavesplatform.database.jna - -import com.google.common.collect.AbstractIterator -import com.protonail.leveldb.jna._ -import com.wavesplatform.database.jna.LevelDBJNADB.{JNADBIterator, JNASnapshot, JNAWriteBatch} -import org.iq80.leveldb._ - -private[jna] class LevelDBJNADB(levelDB: LevelDB) extends DB { - override def get(key: Array[Byte]): Array[Byte] = - get(key, new ReadOptions) - - override def get(key: Array[Byte], options: ReadOptions): Array[Byte] = { - val o1 = LevelDBJNADB.toJNAReadOptions(options) - try levelDB.get(key, o1) - finally o1.close() - } - - override def iterator(): DBIterator = - new JNADBIterator(levelDB, new LevelDBReadOptions) - - override def iterator(options: ReadOptions): DBIterator = - new JNADBIterator(levelDB, LevelDBJNADB.toJNAReadOptions(options)) - - override def put(key: Array[Byte], value: Array[Byte]): Unit = - put(key, value, new WriteOptions) - - override def delete(key: Array[Byte]): Unit = - delete(key, new WriteOptions) - - override def write(updates: WriteBatch): Unit = - write(updates, new WriteOptions) - - override def createWriteBatch(): WriteBatch = - new JNAWriteBatch - - override def put(key: Array[Byte], value: Array[Byte], options: WriteOptions): Snapshot = { - val o1 = LevelDBJNADB.toJNAWriteOptions(options) - try levelDB.put(key, value, o1) - finally o1.close() - null - } - - override def delete(key: Array[Byte], options: WriteOptions): Snapshot = { - val o1 = LevelDBJNADB.toJNAWriteOptions(options) - try levelDB.delete(key, o1) - finally o1.close() - null - } - - override def write(updates: WriteBatch, options: WriteOptions): Snapshot = updates match { - case wb: JNAWriteBatch => - val o1 = LevelDBJNADB.toJNAWriteOptions(options) - try levelDB.write(wb.batch, o1) - finally o1.close() - null - - case _ => - throw new IllegalArgumentException(s"Write batch not supported: $updates") - } - - override def getSnapshot: Snapshot = - JNASnapshot(levelDB.createSnapshot()) - - override def getApproximateSizes(ranges: org.iq80.leveldb.Range*): Array[Long] = { - val jnaRanges = ranges.map(r => new com.protonail.leveldb.jna.Range(r.start(), r.limit())) - levelDB.approximateSizes(jnaRanges*) - } - - override def getProperty(name: String): String = - levelDB.property(name) - - override def suspendCompactions(): Unit = () - - override def resumeCompactions(): Unit = () - - override def compactRange(begin: Array[Byte], end: Array[Byte]): Unit = - levelDB.compactRange(begin, end) - - override def close(): Unit = - levelDB.close() -} - -private object LevelDBJNADB { - final class JNAWriteBatch extends WriteBatch { - val batch = new LevelDBWriteBatch - - override def put(key: Array[Byte], value: Array[Byte]): WriteBatch = { - batch.put(key, value) - this - } - - override def delete(key: Array[Byte]): WriteBatch = { - batch.delete(key) - this - } - - override def close(): Unit = - batch.close() - } - - private[this] type DBEntry = java.util.Map.Entry[Array[Byte], Array[Byte]] - - //noinspection ScalaStyle - final class JNADBIterator(levelDB: LevelDB, options: LevelDBReadOptions) extends AbstractIterator[DBEntry] with DBIterator { - private[this] val iterator = new LevelDBKeyValueIterator(levelDB, options) - - override def seek(key: Array[Byte]): Unit = - iterator.seekToKey(key) - - override def seekToFirst(): Unit = - iterator.seekToFirst() - - override def peekNext(): DBEntry = - this.peek() - - override def hasPrev: Boolean = - false - - override def prev(): DBEntry = - throw new NotImplementedError("prev() is not implemented") - - override def peekPrev(): DBEntry = - throw new NotImplementedError("peekPrev() is not implemented") - - override def seekToLast(): Unit = - iterator.seekToLast() - - override def close(): Unit = { - iterator.close() - options.close() - } - - override def computeNext(): DBEntry = { - if (iterator.hasNext) { - val pair = iterator.next() - new DBEntry { - override def getKey: Array[Byte] = pair.getKey - override def getValue: Array[Byte] = pair.getValue - override def setValue(value: Array[Byte]): Array[Byte] = throw new NotImplementedError("setValue(Array[Byte]) is not implemented") - } - } else { - this.endOfData() - null - } - } - } - - final case class JNASnapshot(snapshot: LevelDBSnapshot) extends Snapshot { - override def close(): Unit = snapshot.close() - } - - def toJNAReadOptions(ro: ReadOptions): LevelDBReadOptions = { - val jna = new LevelDBReadOptions - jna.setFillCache(ro.fillCache()) - jna.setVerifyChecksum(ro.verifyChecksums()) - ro.snapshot() match { - case JNASnapshot(snapshot) => jna.setSnapshot(snapshot) - case _ => // Ignore - } - jna - } - - def toJNAWriteOptions(wo: WriteOptions): LevelDBWriteOptions = { - val jna = new LevelDBWriteOptions - jna.setSync(wo.sync()) - jna - } -} diff --git a/node/src/main/scala/com/wavesplatform/database/jna/LevelDBJNADBFactory.scala b/node/src/main/scala/com/wavesplatform/database/jna/LevelDBJNADBFactory.scala deleted file mode 100644 index d5ab743eb61..00000000000 --- a/node/src/main/scala/com/wavesplatform/database/jna/LevelDBJNADBFactory.scala +++ /dev/null @@ -1,49 +0,0 @@ -package com.wavesplatform.database.jna - -import java.io.File - -import com.protonail.leveldb.jna.{LevelDB, LevelDBCompressionType, LevelDBOptions} -import org.iq80.leveldb.{CompressionType, DB, DBFactory, Options} - -class LevelDBJNADBFactory extends DBFactory { - private[this] def openJnaDatabase(path: File, options: Options): LevelDB = { - val opts = LevelDBJNADBFactory.toJNAOptions(options) - try new LevelDB(path.toString, opts) - finally opts.close() - } - - override def open(path: File, options: Options): DB = { - val db = openJnaDatabase(path, options) - new LevelDBJNADB(db) - } - - override def destroy(path: File, options: Options): Unit = { - val options1 = LevelDBJNADBFactory.toJNAOptions(options) - try LevelDB.destroy(path.toString, options1) - finally options1.close() - } - - override def repair(path: File, options: Options): Unit = { - val options1 = LevelDBJNADBFactory.toJNAOptions(options) - try LevelDB.repair(path.toString, options1) - finally options1.close() - } -} - -private object LevelDBJNADBFactory { - def toJNAOptions(o1: Options): LevelDBOptions = { - val opts = new LevelDBOptions - opts.setBlockRestartInterval(o1.blockRestartInterval()) - opts.setBlockSize(o1.blockSize()) - opts.setCompressionType(o1.compressionType() match { - case CompressionType.NONE => LevelDBCompressionType.NoCompression - case CompressionType.SNAPPY => LevelDBCompressionType.SnappyCompression - }) - opts.setCreateIfMissing(o1.createIfMissing()) - opts.setErrorIfExists(o1.errorIfExists()) - opts.setMaxOpenFiles(o1.maxOpenFiles()) - opts.setParanoidChecks(o1.paranoidChecks()) - opts.setWriteBufferSize(o1.writeBufferSize()) - opts - } -} diff --git a/node/src/main/scala/com/wavesplatform/database/package.scala b/node/src/main/scala/com/wavesplatform/database/package.scala index 83948f985fa..001b1a6d98b 100644 --- a/node/src/main/scala/com/wavesplatform/database/package.scala +++ b/node/src/main/scala/com/wavesplatform/database/package.scala @@ -1,13 +1,15 @@ package com.wavesplatform +import java.nio.ByteBuffer +import java.util +import java.util.Map as JMap import com.google.common.base.Charsets.UTF_8 +import com.google.common.collect.{Interners, Maps} import com.google.common.io.ByteStreams.{newDataInput, newDataOutput} import com.google.common.io.{ByteArrayDataInput, ByteArrayDataOutput} -import com.google.common.primitives.{Bytes, Ints, Longs} +import com.google.common.primitives.{Ints, Longs} import com.google.protobuf.ByteString -import com.typesafe.scalalogging.Logger import com.wavesplatform.account.{AddressScheme, PublicKey} -import com.wavesplatform.api.BlockMeta import com.wavesplatform.block.validation.Validators import com.wavesplatform.block.{Block, BlockHeader} import com.wavesplatform.common.state.ByteStr @@ -16,7 +18,7 @@ import com.wavesplatform.crypto.* import com.wavesplatform.database.protobuf as pb import com.wavesplatform.database.protobuf.DataEntry.Value import com.wavesplatform.database.protobuf.TransactionData.Transaction as TD -import com.wavesplatform.lang.script.{Script, ScriptReader} +import com.wavesplatform.lang.script.ScriptReader import com.wavesplatform.protobuf.ByteStringExt import com.wavesplatform.protobuf.block.PBBlocks import com.wavesplatform.protobuf.transaction.{PBRecipients, PBTransactions} @@ -37,61 +39,27 @@ import com.wavesplatform.transaction.{ import com.wavesplatform.utils.* import monix.eval.Task import monix.reactive.Observable -import org.iq80.leveldb.* -import org.slf4j.LoggerFactory +import org.rocksdb.* +import sun.nio.ch.Util import supertagged.TaggedType -import java.io.File -import java.nio.ByteBuffer -import java.util.Map as JMap -import scala.collection.mutable +import scala.annotation.tailrec +import scala.collection.mutable.ArrayBuffer +import scala.collection.{View, mutable} +import scala.jdk.CollectionConverters.* +import scala.util.Using //noinspection UnstableApiUsage package object database { - private lazy val logger: Logger = Logger(LoggerFactory.getLogger(getClass.getName)) - - def openDB(path: String, recreate: Boolean = false): DB = { - logger.debug(s"Open DB at $path") - val file = new File(path) - val options = new Options() - .createIfMissing(true) - .paranoidChecks(true) - - if (recreate) { - LevelDBFactory.factory.destroy(file, options) - } - - file.getAbsoluteFile.getParentFile.mkdirs() - LevelDBFactory.factory.open(file, options) - } - final type DBEntry = JMap.Entry[Array[Byte], Array[Byte]] implicit class ByteArrayDataOutputExt(val output: ByteArrayDataOutput) extends AnyVal { def writeByteStr(s: ByteStr): Unit = { output.write(s.arr) } - - def writeScriptOption(v: Option[Script]): Unit = { - output.writeBoolean(v.isDefined) - v.foreach { s => - val b = s.bytes().arr - output.writeShort(b.length) - output.write(b) - } - } } implicit class ByteArrayDataInputExt(val input: ByteArrayDataInput) extends AnyVal { - def readScriptOption(): Option[Script] = { - if (input.readBoolean()) { - val len = input.readShort() - val b = new Array[Byte](len) - input.readFully(b) - Some(ScriptReader.fromBytes(b).explicitGet()) - } else None - } - def readBytes(len: Int): Array[Byte] = { val arr = new Array[Byte](len) input.readFully(arr) @@ -101,9 +69,6 @@ package object database { def readByteStr(len: Int): ByteStr = { ByteStr(readBytes(len)) } - - def readSignature: ByteStr = readByteStr(SignatureLength) - def readPublicKey: PublicKey = PublicKey(readBytes(KeyLength)) } def writeIntSeq(values: Seq[Int]): Array[Byte] = { @@ -137,32 +102,6 @@ package object database { def writeAssetIds(values: Seq[ByteStr]): Array[Byte] = values.foldLeft(ByteBuffer.allocate(values.length * transaction.AssetIdLength)) { case (buf, ai) => buf.put(ai.arr) }.array() - def readTxIds(data: Array[Byte]): List[ByteStr] = Option(data).fold(List.empty[ByteStr]) { d => - val b = ByteBuffer.wrap(d) - val ids = List.newBuilder[ByteStr] - - while (b.remaining() > 0) { - val buffer = (b.get(): @unchecked) match { - case crypto.DigestLength => new Array[Byte](crypto.DigestLength) - case crypto.SignatureLength => new Array[Byte](crypto.SignatureLength) - } - b.get(buffer) - ids += ByteStr(buffer) - } - - ids.result() - } - - def writeTxIds(ids: Seq[ByteStr]): Array[Byte] = - ids - .foldLeft(ByteBuffer.allocate(ids.map(_.arr.length + 1).sum)) { case (b, id) => - b.put((id.arr.length: @unchecked) match { - case crypto.DigestLength => crypto.DigestLength.toByte - case crypto.SignatureLength => crypto.SignatureLength.toByte - }).put(id.arr) - } - .array() - def readStrings(data: Array[Byte]): Seq[String] = Option(data).fold(Seq.empty[String]) { _ => var i = 0 val s = Seq.newBuilder[String] @@ -184,17 +123,24 @@ package object database { .array() } - def writeLeaseBalance(lb: LeaseBalance): Array[Byte] = { - val ndo = newDataOutput() - ndo.writeLong(lb.in) - ndo.writeLong(lb.out) - ndo.toByteArray - } + def readLeaseBalanceNode(data: Array[Byte]): LeaseBalanceNode = if (data != null && data.length == 20) + LeaseBalanceNode(Longs.fromByteArray(data.take(8)), Longs.fromByteArray(data.slice(8, 16)), Height(Ints.fromByteArray(data.takeRight(4)))) + else LeaseBalanceNode.Empty - def readLeaseBalance(data: Array[Byte]): LeaseBalance = Option(data).fold(LeaseBalance.empty) { d => - val ndi = newDataInput(d) - LeaseBalance(ndi.readLong(), ndi.readLong()) - } + def writeLeaseBalanceNode(leaseBalanceNode: LeaseBalanceNode): Array[Byte] = + Longs.toByteArray(leaseBalanceNode.in) ++ Longs.toByteArray(leaseBalanceNode.out) ++ Ints.toByteArray(leaseBalanceNode.prevHeight) + + def readLeaseBalance(data: Array[Byte]): CurrentLeaseBalance = if (data != null && data.length == 24) + CurrentLeaseBalance( + Longs.fromByteArray(data.take(8)), + Longs.fromByteArray(data.slice(8, 16)), + Height(Ints.fromByteArray(data.slice(16, 20))), + Height(Ints.fromByteArray(data.takeRight(4))) + ) + else CurrentLeaseBalance.Unavailable + + def writeLeaseBalance(lb: CurrentLeaseBalance): Array[Byte] = + Longs.toByteArray(lb.in) ++ Longs.toByteArray(lb.out) ++ Ints.toByteArray(lb.height) ++ Ints.toByteArray(lb.prevHeight) def writeLeaseDetails(lde: Either[Boolean, LeaseDetails]): Array[Byte] = lde.fold( @@ -238,44 +184,24 @@ package object database { ) } - def readVolumeAndFee(data: Array[Byte]): VolumeAndFee = Option(data).fold(VolumeAndFee.empty) { d => - val ndi = newDataInput(d) - VolumeAndFee(ndi.readLong(), ndi.readLong()) - } - - def writeVolumeAndFee(vf: VolumeAndFee): Array[Byte] = { - val ndo = newDataOutput() - ndo.writeLong(vf.volume) - ndo.writeLong(vf.fee) - ndo.toByteArray - } - - def readTransactionInfo(data: Array[Byte]): (Int, Transaction) = - (Ints.fromByteArray(data), TransactionParsers.parseBytes(data.drop(4)).get) + def readVolumeAndFeeNode(data: Array[Byte]): VolumeAndFeeNode = if (data != null && data.length == 20) + VolumeAndFeeNode(Longs.fromByteArray(data.take(8)), Longs.fromByteArray(data.slice(8, 16)), Height(Ints.fromByteArray(data.takeRight(4)))) + else VolumeAndFeeNode.Empty - def readTransactionHeight(data: Array[Byte]): Int = Ints.fromByteArray(data) + def writeVolumeAndFeeNode(volumeAndFeeNode: VolumeAndFeeNode): Array[Byte] = + Longs.toByteArray(volumeAndFeeNode.volume) ++ Longs.toByteArray(volumeAndFeeNode.fee) ++ Ints.toByteArray(volumeAndFeeNode.prevHeight) - def readTransactionIds(data: Array[Byte]): Seq[(Int, ByteStr)] = Option(data).fold(Seq.empty[(Int, ByteStr)]) { d => - val b = ByteBuffer.wrap(d) - val ids = Seq.newBuilder[(Int, ByteStr)] - while (b.hasRemaining) { - ids += b.get.toInt -> { - val buf = new Array[Byte](b.get) - b.get(buf) - ByteStr(buf) - } - } - ids.result() - } + def readVolumeAndFee(data: Array[Byte]): CurrentVolumeAndFee = if (data != null && data.length == 24) + CurrentVolumeAndFee( + Longs.fromByteArray(data.take(8)), + Longs.fromByteArray(data.slice(8, 16)), + Height(Ints.fromByteArray(data.slice(16, 20))), + Height(Ints.fromByteArray(data.takeRight(4))) + ) + else CurrentVolumeAndFee.Unavailable - def writeTransactionIds(ids: Seq[(Int, ByteStr)]): Array[Byte] = { - val size = ids.foldLeft(0) { case (prev, (_, id)) => prev + 2 + id.arr.length } - val buffer = ByteBuffer.allocate(size) - for ((typeId, id) <- ids) { - buffer.put(typeId.toByte).put(id.arr.length.toByte).put(id.arr) - } - buffer.array() - } + def writeVolumeAndFee(vf: CurrentVolumeAndFee): Array[Byte] = + Longs.toByteArray(vf.volume) ++ Longs.toByteArray(vf.fee) ++ Ints.toByteArray(vf.height) ++ Ints.toByteArray(vf.prevHeight) def readFeatureMap(data: Array[Byte]): Map[Short, Int] = Option(data).fold(Map.empty[Short, Int]) { _ => val b = ByteBuffer.wrap(data) @@ -328,35 +254,22 @@ package object database { ).toByteArray } - def writeBlockMeta(data: BlockMeta): Array[Byte] = - pb.BlockMeta( - Some(PBBlocks.protobuf(data.header)), - ByteString.copyFrom(data.signature.arr), - data.headerHash.fold(ByteString.EMPTY)(hh => ByteString.copyFrom(hh.arr)), - data.height, - data.size, - data.transactionCount, - data.totalFeeInWaves, - data.reward.getOrElse(-1L), - data.vrf.fold(ByteString.EMPTY)(vrf => ByteString.copyFrom(vrf.arr)) - ).toByteArray - - def readBlockMeta(bs: Array[Byte]): BlockMeta = { - val pbbm = pb.BlockMeta.parseFrom(bs) - BlockMeta( - PBBlocks.vanilla(pbbm.header.get), - pbbm.signature.toByteStr, - Option(pbbm.headerHash).collect { case bs if !bs.isEmpty => bs.toByteStr }, - pbbm.height, - pbbm.size, - pbbm.transactionCount, - pbbm.totalFeeInWaves, - Option(pbbm.reward).filter(_ >= 0), - Option(pbbm.vrf).collect { case bs if !bs.isEmpty => bs.toByteStr } + def readAssetStaticInfo(bb: Array[Byte]): AssetStaticInfo = { + val sai = pb.StaticAssetInfo.parseFrom(bb) + AssetStaticInfo( + sai.id.toByteStr, + TransactionId(sai.sourceId.toByteStr), + PublicKey(sai.issuerPublicKey.toByteArray), + sai.decimals, + sai.isNft ) } - def readTransactionHNSeqAndType(bs: Array[Byte]): (Height, Seq[(Byte, TxNum)]) = { + def writeBlockMeta(data: pb.BlockMeta): Array[Byte] = data.toByteArray + + def readBlockMeta(bs: Array[Byte]): pb.BlockMeta = pb.BlockMeta.parseFrom(bs) + + def readTransactionHNSeqAndType(bs: Array[Byte]): (Height, Seq[(Byte, TxNum, Int)]) = { val ndi = newDataInput(bs) val height = Height(ndi.readInt()) val numSeqLength = ndi.readInt() @@ -364,25 +277,27 @@ package object database { ( height, List.fill(numSeqLength) { - val tp = ndi.readByte() - val num = TxNum(ndi.readShort()) - (tp, num) + val tp = ndi.readByte() + val num = TxNum(ndi.readShort()) + val size = ndi.readInt() + (tp, num, size) } ) } - def writeTransactionHNSeqAndType(v: (Height, Seq[(Byte, TxNum)])): Array[Byte] = { + def writeTransactionHNSeqAndType(v: (Height, Seq[(Byte, TxNum, Int)])): Array[Byte] = { val (height, numSeq) = v val numSeqLength = numSeq.length - val outputLength = 4 + 4 + numSeqLength * (4 + 1) + val outputLength = 4 + 4 + numSeqLength * (1 + 2 + 4) val ndo = newDataOutput(outputLength) ndo.writeInt(height) ndo.writeInt(numSeqLength) - numSeq.foreach { case (tp, num) => + numSeq.foreach { case (tp, num, size) => ndo.writeByte(tp) ndo.writeShort(num) + ndo.writeInt(size) } ndo.toByteArray @@ -412,7 +327,7 @@ package object database { ndo.toByteArray } - def readDataEntry(key: String)(bs: Array[Byte]): DataEntry[?] = + private def readDataEntry(key: String)(bs: Array[Byte]): DataEntry[?] = pb.DataEntry.parseFrom(bs).value match { case Value.Empty => EmptyDataEntry(key) case Value.IntValue(value) => IntegerDataEntry(key, value) @@ -421,7 +336,7 @@ package object database { case Value.StringValue(value) => StringDataEntry(key, value) } - def writeDataEntry(e: DataEntry[?]): Array[Byte] = + private def writeDataEntry(e: DataEntry[?]): Array[Byte] = pb.DataEntry(e match { case IntegerDataEntry(_, value) => pb.DataEntry.Value.IntValue(value) case BooleanDataEntry(_, value) => pb.DataEntry.Value.BoolValue(value) @@ -430,54 +345,167 @@ package object database { case _: EmptyDataEntry => pb.DataEntry.Value.Empty }).toByteArray - implicit class EntryExt(val e: JMap.Entry[Array[Byte], Array[Byte]]) extends AnyVal { - import com.wavesplatform.crypto.DigestLength - def extractId(offset: Int = 2, length: Int = DigestLength): ByteStr = { - val id = ByteStr(new Array[Byte](length)) - Array.copy(e.getKey, offset, id.arr, 0, length) - id - } - } + def readCurrentData(key: String)(bs: Array[Byte]): CurrentData = if (bs == null) CurrentData.empty(key) + else + CurrentData( + readDataEntry(key)(bs.drop(8)), + Height(Ints.fromByteArray(bs.take(4))), + Height(Ints.fromByteArray(bs.slice(4, 8))) + ) + + def writeCurrentData(cdn: CurrentData): Array[Byte] = + Ints.toByteArray(cdn.height) ++ Ints.toByteArray(cdn.prevHeight) ++ writeDataEntry(cdn.entry) + + def readDataNode(key: String)(bs: Array[Byte]): DataNode = if (bs == null) DataNode.empty(key) + else + DataNode(readDataEntry(key)(bs.drop(4)), Height(Ints.fromByteArray(bs.take(4)))) + + def writeDataNode(dn: DataNode): Array[Byte] = + Ints.toByteArray(dn.prevHeight) ++ writeDataEntry(dn.entry) + + def readCurrentBalance(bs: Array[Byte]): CurrentBalance = if (bs != null && bs.length == 16) + CurrentBalance(Longs.fromByteArray(bs.take(8)), Height(Ints.fromByteArray(bs.slice(8, 12))), Height(Ints.fromByteArray(bs.takeRight(4)))) + else CurrentBalance.Unavailable + + def writeCurrentBalance(balance: CurrentBalance): Array[Byte] = + Longs.toByteArray(balance.balance) ++ Ints.toByteArray(balance.height) ++ Ints.toByteArray(balance.prevHeight) + + def readBalanceNode(bs: Array[Byte]): BalanceNode = if (bs != null && bs.length == 12) + BalanceNode(Longs.fromByteArray(bs.take(8)), Height(Ints.fromByteArray(bs.takeRight(4)))) + else BalanceNode.Empty + + def writeBalanceNode(balance: BalanceNode): Array[Byte] = + Longs.toByteArray(balance.balance) ++ Ints.toByteArray(balance.prevHeight) + + implicit class DBExt(val db: RocksDB) extends AnyVal { - implicit class DBExt(val db: DB) extends AnyVal { def readOnly[A](f: ReadOnlyDB => A): A = { - val snapshot = db.getSnapshot - try f(new ReadOnlyDB(db, new ReadOptions().snapshot(snapshot))) - finally snapshot.close() + Using.resource(db.getSnapshot) { s => + Using.resource(new ReadOptions().setSnapshot(s).setVerifyChecksums(false)) { ro => + f(new ReadOnlyDB(db, ro)) + } + }(db.releaseSnapshot(_)) } /** @note * Runs operations in batch, so keep in mind, that previous changes don't appear lately in f */ def readWrite[A](f: RW => A): A = { - val snapshot = db.getSnapshot - val readOptions = new ReadOptions().snapshot(snapshot) - val batch = new SortedBatch - val rw = new RW(db, readOptions, batch) - val nativeBatch = db.createWriteBatch() + val snapshot = db.getSnapshot + val readOptions = new ReadOptions().setSnapshot(snapshot).setVerifyChecksums(false) + val batch = new WriteBatch() + val rw = new RW(db, readOptions, batch) + val writeOptions = new WriteOptions().setSync(false).setDisableWAL(true) try { val r = f(rw) - batch.addedEntries.foreach { case (k, v) => nativeBatch.put(k.arr, v) } - batch.deletedEntries.foreach(k => nativeBatch.delete(k.arr)) - db.write(nativeBatch, new WriteOptions().sync(false).snapshot(false)) + db.write(writeOptions, batch) r } finally { - nativeBatch.close() - snapshot.close() + readOptions.close() + writeOptions.close() + batch.close() + db.releaseSnapshot(snapshot) } } - def get[A](key: Key[A]): A = key.parse(db.get(key.keyBytes)) - def get[A](key: Key[A], readOptions: ReadOptions): A = key.parse(db.get(key.keyBytes, readOptions)) - def has(key: Key[?]): Boolean = db.get(key.keyBytes) != null + def multiGetOpt[A](readOptions: ReadOptions, keys: Seq[Key[Option[A]]], valBufSize: Int): Seq[Option[A]] = + multiGetOpt(readOptions, keys, getKeyBuffersFromKeys(keys), getValueBuffers(keys.size, valBufSize)) + + def multiGetOpt[A](readOptions: ReadOptions, keys: Seq[Key[Option[A]]], valBufSizes: Seq[Int]): Seq[Option[A]] = + multiGetOpt(readOptions, keys, getKeyBuffersFromKeys(keys), getValueBuffers(valBufSizes)) + + def multiGet[A](readOptions: ReadOptions, keys: ArrayBuffer[Key[A]], valBufSizes: ArrayBuffer[Int]): View[A] = + multiGet(readOptions, keys, getKeyBuffersFromKeys(keys), getValueBuffers(valBufSizes)) + + def multiGet[A](readOptions: ReadOptions, keys: ArrayBuffer[Key[A]], valBufSize: Int): View[A] = + multiGet(readOptions, keys, getKeyBuffersFromKeys(keys), getValueBuffers(keys.size, valBufSize)) + + def multiGet[A](readOptions: ReadOptions, keys: Seq[Key[A]], valBufSize: Int): Seq[Option[A]] = { + val keyBufs = getKeyBuffersFromKeys(keys) + val valBufs = getValueBuffers(keys.size, valBufSize) + + val cfhs = keys.map(_.columnFamilyHandle.getOrElse(db.getDefaultColumnFamily)).asJava + val result = keys.view + .zip(db.multiGetByteBuffers(readOptions, cfhs, keyBufs, valBufs).asScala) + .map { case (parser, value) => + if (value.status.getCode == Status.Code.Ok) { + val arr = new Array[Byte](value.requiredSize) + value.value.get(arr) + Util.releaseTemporaryDirectBuffer(value.value) + Some(parser.parse(arr)) + } else None + } + .toSeq + + keyBufs.forEach(Util.releaseTemporaryDirectBuffer(_)) + result + } + + def multiGetInts(readOptions: ReadOptions, keys: Seq[Key[Int]]): Seq[Option[Int]] = { + val keyBytes = keys.map(_.keyBytes) + val keyBufs = getKeyBuffers(keyBytes) + val valBufs = getValueBuffers(keyBytes.size, 4) + + val cfhs = keys.map(_.columnFamilyHandle.getOrElse(db.getDefaultColumnFamily)).asJava + val result = db + .multiGetByteBuffers(readOptions, cfhs, keyBufs, valBufs) + .asScala + .map { value => + if (value.status.getCode == Status.Code.Ok) { + val h = Some(value.value.getInt) + Util.releaseTemporaryDirectBuffer(value.value) + h + } else None + } + .toSeq + + keyBufs.forEach(Util.releaseTemporaryDirectBuffer(_)) + result + } + + def multiGetFlat[A](readOptions: ReadOptions, keys: ArrayBuffer[Key[Option[A]]], valBufSizes: ArrayBuffer[Int]): Seq[A] = { + val keyBufs = getKeyBuffersFromKeys(keys) + val valBufs = getValueBuffers(valBufSizes) + + val cfhs = keys.map(_.columnFamilyHandle.getOrElse(db.getDefaultColumnFamily)).asJava + val result = keys.view + .zip(db.multiGetByteBuffers(readOptions, cfhs, keyBufs, valBufs).asScala) + .flatMap { case (parser, value) => + if (value.status.getCode == Status.Code.Ok) { + val arr = new Array[Byte](value.requiredSize) + value.value.get(arr) + Util.releaseTemporaryDirectBuffer(value.value) + parser.parse(arr) + } else None + } + .toSeq + + keyBufs.forEach(Util.releaseTemporaryDirectBuffer(_)) + result + } - def iterateOver(tag: KeyTags.KeyTag)(f: DBEntry => Unit): Unit = iterateOver(tag.prefixBytes)(f) + def get[A](key: Key[A]): A = key.parse(db.get(key.columnFamilyHandle.getOrElse(db.getDefaultColumnFamily), key.keyBytes)) + def get[A](key: Key[A], readOptions: ReadOptions): A = + key.parse(db.get(key.columnFamilyHandle.getOrElse(db.getDefaultColumnFamily), readOptions, key.keyBytes)) + def has(key: Key[?]): Boolean = db.get(key.columnFamilyHandle.getOrElse(db.getDefaultColumnFamily), key.keyBytes) != null + + def iterateOver(tag: KeyTags.KeyTag, cfh: Option[ColumnFamilyHandle] = None)(f: DBEntry => Unit): Unit = + iterateOver(tag.prefixBytes, cfh)(f) + + def iterateOver(prefix: Array[Byte], cfh: Option[ColumnFamilyHandle])(f: DBEntry => Unit): Unit = { + @tailrec + def loop(iter: RocksIterator): Unit = { + if (iter.isValid && iter.key().startsWith(prefix)) { + f(Maps.immutableEntry(iter.key(), iter.value())) + iter.next() + loop(iter) + } else () + } - def iterateOver(prefix: Array[Byte], seekPrefix: Array[Byte] = Array.emptyByteArray)(f: DBEntry => Unit): Unit = { - val iterator = db.iterator() + val iterator = db.newIterator(cfh.getOrElse(db.getDefaultColumnFamily), new ReadOptions().setTotalOrderSeek(true)) try { - iterator.seek(Bytes.concat(prefix, seekPrefix)) - while (iterator.hasNext && iterator.peekNext().getKey.startsWith(prefix)) f(iterator.next()) + iterator.seek(prefix) + loop(iterator) } finally iterator.close() } @@ -488,6 +516,82 @@ package object database { try f(resource) finally resource.close() } + + private def getKeyBuffersFromKeys(keys: collection.Seq[Key[?]]): util.List[ByteBuffer] = + keys.map { k => + val arr = k.keyBytes + val b = Util.getTemporaryDirectBuffer(arr.length) + b.put(k.keyBytes).flip() + b + }.asJava + + private def getKeyBuffers(keys: collection.Seq[Array[Byte]]): util.List[ByteBuffer] = + keys.map { k => + val b = Util.getTemporaryDirectBuffer(k.length) + b.put(k).flip() + b + }.asJava + + private def getValueBuffers(amount: Int, bufferSize: Int): util.List[ByteBuffer] = + List + .fill(amount) { + val buf = Util.getTemporaryDirectBuffer(bufferSize) + buf.limit(buf.capacity()) + buf + } + .asJava + + private def getValueBuffers(bufferSizes: collection.Seq[Int]): util.List[ByteBuffer] = + bufferSizes.map { size => + val buf = Util.getTemporaryDirectBuffer(size) + buf.limit(buf.capacity()) + buf + }.asJava + + private def multiGetOpt[A]( + readOptions: ReadOptions, + keys: Seq[Key[Option[A]]], + keyBufs: util.List[ByteBuffer], + valBufs: util.List[ByteBuffer] + ): Seq[Option[A]] = { + val cfhs = keys.map(_.columnFamilyHandle.getOrElse(db.getDefaultColumnFamily)).asJava + val result = keys.view + .zip(db.multiGetByteBuffers(readOptions, cfhs, keyBufs, valBufs).asScala) + .map { case (parser, value) => + if (value.status.getCode == Status.Code.Ok) { + val arr = new Array[Byte](value.requiredSize) + value.value.get(arr) + Util.releaseTemporaryDirectBuffer(value.value) + parser.parse(arr) + } else None + } + .toSeq + + keyBufs.forEach(Util.releaseTemporaryDirectBuffer(_)) + result + } + + private def multiGet[A]( + readOptions: ReadOptions, + keys: ArrayBuffer[Key[A]], + keyBufs: util.List[ByteBuffer], + valBufs: util.List[ByteBuffer] + ): View[A] = { + val cfhs = keys.map(_.columnFamilyHandle.getOrElse(db.getDefaultColumnFamily)).asJava + val result = keys.view + .zip(db.multiGetByteBuffers(readOptions, cfhs, keyBufs, valBufs).asScala) + .flatMap { case (parser, value) => + if (value.status.getCode == Status.Code.Ok) { + val arr = new Array[Byte](value.requiredSize) + value.value.get(arr) + Util.releaseTemporaryDirectBuffer(value.value) + Some(parser.parse(arr)) + } else None + } + + keyBufs.forEach(Util.releaseTemporaryDirectBuffer(_)) + result + } } def createBlock(header: BlockHeader, signature: ByteStr, txs: Seq[Transaction]): Either[TxValidationError.GenericError, Block] = @@ -511,15 +615,19 @@ package object database { ) ) + private val scriptInterner = Interners.newWeakInterner[AccountScriptInfo]() + def readAccountScriptInfo(b: Array[Byte]): AccountScriptInfo = { val asi = pb.AccountScriptInfo.parseFrom(b) - AccountScriptInfo( - PublicKey(asi.publicKey.toByteArray), - ScriptReader.fromBytes(asi.scriptBytes.toByteArray).explicitGet(), - asi.maxComplexity, - asi.callableComplexity.map { c => - c.version -> c.callableComplexity - }.toMap + scriptInterner.intern( + AccountScriptInfo( + PublicKey(asi.publicKey.toByteArray), + ScriptReader.fromBytes(asi.scriptBytes.toByteArray).explicitGet(), + asi.maxComplexity, + asi.callableComplexity.map { c => + c.version -> c.callableComplexity + }.toMap + ) ) } @@ -545,18 +653,18 @@ package object database { pb.TransactionData(ptx, !m.succeeded, m.spentComplexity).toByteArray } - def loadTransactions(height: Height, db: ReadOnlyDB): Seq[(TxMeta, Transaction)] = { + def loadTransactions(height: Height, rdb: RDB): Seq[(TxMeta, Transaction)] = { val transactions = Seq.newBuilder[(TxMeta, Transaction)] - db.iterateOver(KeyTags.NthTransactionInfoAtHeight.prefixBytes ++ Ints.toByteArray(height)) { e => + rdb.db.iterateOver(KeyTags.NthTransactionInfoAtHeight.prefixBytes ++ Ints.toByteArray(height), Some(rdb.txHandle.handle)) { e => transactions += readTransaction(height)(e.getValue) } transactions.result() } - def loadBlock(height: Height, db: ReadOnlyDB): Option[Block] = + def loadBlock(height: Height, rdb: RDB): Option[Block] = for { - meta <- db.get(Keys.blockMetaAt(height)) - block <- createBlock(meta.header, meta.signature, loadTransactions(height, db).map(_._2)).toOption + meta <- rdb.db.get(Keys.blockMetaAt(height)) + block <- createBlock(PBBlocks.vanilla(meta.getHeader), meta.signature.toByteStr, loadTransactions(height, rdb).map(_._2)).toOption } yield block def fromHistory[A](resource: DBResource, historyKey: Key[Seq[Int]], valueKey: Int => Key[A]): Option[A] = @@ -586,13 +694,13 @@ package object database { Height(pbStaticInfo.height) ) - def loadActiveLeases(db: DB, fromHeight: Int, toHeight: Int): Seq[LeaseTransaction] = db.withResource { r => + def loadActiveLeases(rdb: RDB, fromHeight: Int, toHeight: Int): Seq[LeaseTransaction] = rdb.db.withResource { r => (for { id <- loadLeaseIds(r, fromHeight, toHeight, includeCancelled = false) details <- fromHistory(r, Keys.leaseDetailsHistory(id), Keys.leaseDetails(id)) if details.exists(_.fold(identity, _.isActive)) - tm <- r.get(Keys.transactionMetaById(TransactionId(id))) - tx <- r.get(Keys.transactionAt(Height(tm.height), TxNum(tm.num.toShort))) + tm <- r.get(Keys.transactionMetaById(TransactionId(id), rdb.txMetaHandle)) + tx <- r.get(Keys.transactionAt(Height(tm.height), TxNum(tm.num.toShort), rdb.txHandle)) } yield tx).collect { case (ltm, lt: LeaseTransaction) if ltm.succeeded => lt }.toSeq @@ -600,22 +708,24 @@ package object database { def loadLeaseIds(resource: DBResource, fromHeight: Int, toHeight: Int, includeCancelled: Boolean): Set[ByteStr] = { val leaseIds = mutable.Set.empty[ByteStr] - val iterator = resource.iterator + + val iterator = resource.fullIterator @inline def keyInRange(): Boolean = { - val actualKey = iterator.peekNext().getKey + val actualKey = iterator.key() actualKey.startsWith(KeyTags.LeaseDetails.prefixBytes) && Ints.fromByteArray(actualKey.slice(2, 6)) <= toHeight } iterator.seek(KeyTags.LeaseDetails.prefixBytes ++ Ints.toByteArray(fromHeight)) - while (iterator.hasNext && keyInRange()) { - val e = iterator.next() - val leaseId = ByteStr(e.getKey.drop(6)) - if (includeCancelled || readLeaseDetails(e.getValue).fold(identity, _.isActive)) + while (iterator.isValid && keyInRange()) { + val leaseId = ByteStr(iterator.key().drop(6)) + if (includeCancelled || readLeaseDetails(iterator.value()).fold(identity, _.isActive)) leaseIds += leaseId else leaseIds -= leaseId + + iterator.next() } leaseIds.toSet diff --git a/node/src/main/scala/com/wavesplatform/database/patch/DisableHijackedAliases.scala b/node/src/main/scala/com/wavesplatform/database/patch/DisableHijackedAliases.scala index 514b3193d63..8df1e7b9c06 100644 --- a/node/src/main/scala/com/wavesplatform/database/patch/DisableHijackedAliases.scala +++ b/node/src/main/scala/com/wavesplatform/database/patch/DisableHijackedAliases.scala @@ -1,7 +1,7 @@ package com.wavesplatform.database.patch import com.wavesplatform.account.{AddressScheme, Alias} -import com.wavesplatform.common.utils._ +import com.wavesplatform.common.utils.* import com.wavesplatform.database.{Keys, RW} import com.wavesplatform.state.patch.PatchDataLoader diff --git a/node/src/main/scala/com/wavesplatform/database/protobuf/package.scala b/node/src/main/scala/com/wavesplatform/database/protobuf/package.scala new file mode 100644 index 00000000000..fc4b8cbf6c0 --- /dev/null +++ b/node/src/main/scala/com/wavesplatform/database/protobuf/package.scala @@ -0,0 +1,12 @@ +package com.wavesplatform.database + +import com.wavesplatform.common.state.ByteStr +import com.wavesplatform.crypto.DigestLength +import com.wavesplatform.protobuf.* + +package object protobuf { + implicit class BlockMetaExt(final val blockMeta: BlockMeta) extends AnyVal { + def id: ByteStr = + (if (blockMeta.headerHash.size() == DigestLength) blockMeta.headerHash else blockMeta.signature).toByteStr + } +} diff --git a/node/src/main/scala/com/wavesplatform/extensions/Context.scala b/node/src/main/scala/com/wavesplatform/extensions/Context.scala index e6a13e549e0..c0c2e9e900d 100644 --- a/node/src/main/scala/com/wavesplatform/extensions/Context.scala +++ b/node/src/main/scala/com/wavesplatform/extensions/Context.scala @@ -1,7 +1,6 @@ package com.wavesplatform.extensions import akka.actor.ActorSystem -import com.wavesplatform.account.Address import com.wavesplatform.api.common.* import com.wavesplatform.common.state.ByteStr import com.wavesplatform.events.UtxEvent @@ -9,7 +8,7 @@ import com.wavesplatform.lang.ValidationError import com.wavesplatform.settings.WavesSettings import com.wavesplatform.state.Blockchain import com.wavesplatform.transaction.smart.script.trace.TracedResult -import com.wavesplatform.transaction.{Asset, DiscardedBlocks, Transaction} +import com.wavesplatform.transaction.{DiscardedBlocks, Transaction} import com.wavesplatform.utils.Time import com.wavesplatform.utx.UtxPool import com.wavesplatform.wallet.Wallet @@ -30,7 +29,6 @@ trait Context { def assetsApi: CommonAssetsApi def broadcastTransaction(tx: Transaction): TracedResult[ValidationError, Boolean] - def spendableBalanceChanged: Observable[(Address, Asset)] def utxEvents: Observable[UtxEvent] def actorSystem: ActorSystem } diff --git a/node/src/main/scala/com/wavesplatform/history/History.scala b/node/src/main/scala/com/wavesplatform/history/History.scala index 2d686b8aa31..afd7fb5e3dc 100644 --- a/node/src/main/scala/com/wavesplatform/history/History.scala +++ b/node/src/main/scala/com/wavesplatform/history/History.scala @@ -3,9 +3,8 @@ package com.wavesplatform.history import com.wavesplatform.block.{Block, MicroBlock} import com.wavesplatform.common.state.ByteStr import com.wavesplatform.database -import com.wavesplatform.database.DBExt +import com.wavesplatform.database.RDB import com.wavesplatform.state.{Blockchain, Height} -import org.iq80.leveldb.DB trait History { def loadBlockBytes(id: ByteStr): Option[(Byte, Array[Byte])] @@ -16,21 +15,20 @@ trait History { object History { private def versionedBytes(block: Block): (Byte, Array[Byte]) = block.header.version -> block.bytes() - def apply(blockchain: Blockchain, liquidBlock: ByteStr => Option[Block], microBlock: ByteStr => Option[MicroBlock], db: DB): History = new History { - override def loadBlockBytes(id: ByteStr): Option[(Byte, Array[Byte])] = - liquidBlock(id) - .orElse(blockchain.heightOf(id).flatMap { h => - db.readOnly { ro => - database.loadBlock(Height(h), ro) - } - }) - .map(versionedBytes) + def apply(blockchain: Blockchain, liquidBlock: ByteStr => Option[Block], microBlock: ByteStr => Option[MicroBlock], rdb: RDB): History = + new History { + override def loadBlockBytes(id: ByteStr): Option[(Byte, Array[Byte])] = + liquidBlock(id) + .orElse(blockchain.heightOf(id).flatMap { h => + database.loadBlock(Height(h), rdb) + }) + .map(versionedBytes) - override def loadMicroBlock(id: ByteStr): Option[MicroBlock] = microBlock(id) + override def loadMicroBlock(id: ByteStr): Option[MicroBlock] = microBlock(id) - override def blockIdsAfter(candidates: Seq[ByteStr], count: Int): Seq[ByteStr] = - candidates.view.flatMap(blockchain.heightOf).headOption.fold[Seq[ByteStr]](Seq.empty) { firstCommonHeight => - (firstCommonHeight to firstCommonHeight + count).flatMap(blockchain.blockId) - } - } + override def blockIdsAfter(candidates: Seq[ByteStr], count: Int): Seq[ByteStr] = + candidates.view.flatMap(blockchain.heightOf).headOption.fold[Seq[ByteStr]](Seq.empty) { firstCommonHeight => + (firstCommonHeight to firstCommonHeight + count).flatMap(blockchain.blockId) + } + } } diff --git a/node/src/main/scala/com/wavesplatform/history/StorageFactory.scala b/node/src/main/scala/com/wavesplatform/history/StorageFactory.scala index 72f03205074..d963baf5359 100644 --- a/node/src/main/scala/com/wavesplatform/history/StorageFactory.scala +++ b/node/src/main/scala/com/wavesplatform/history/StorageFactory.scala @@ -1,42 +1,37 @@ package com.wavesplatform.history -import com.wavesplatform.account.Address -import com.wavesplatform.database.{DBExt, Keys, LevelDBWriter, loadActiveLeases} +import com.wavesplatform.database.{DBExt, Keys, RDB, RocksDBWriter, loadActiveLeases} import com.wavesplatform.events.BlockchainUpdateTriggers import com.wavesplatform.mining.Miner import com.wavesplatform.settings.WavesSettings import com.wavesplatform.state.BlockchainUpdaterImpl -import com.wavesplatform.transaction.Asset import com.wavesplatform.utils.{ScorexLogging, Time, UnsupportedFeature, forceStopApplication} -import monix.reactive.Observer -import org.iq80.leveldb.DB +import org.rocksdb.RocksDB object StorageFactory extends ScorexLogging { - private val StorageVersion = 5 + private val StorageVersion = 1 def apply( settings: WavesSettings, - db: DB, + rdb: RDB, time: Time, - spendableBalanceChanged: Observer[(Address, Asset)], blockchainUpdateTriggers: BlockchainUpdateTriggers, miner: Miner = _ => () - ): (BlockchainUpdaterImpl, LevelDBWriter & AutoCloseable) = { - checkVersion(db) - val levelDBWriter = LevelDBWriter(db, spendableBalanceChanged, settings) + ): (BlockchainUpdaterImpl, RocksDBWriter) = { + checkVersion(rdb.db) + val rocksDBWriter = new RocksDBWriter(rdb, settings.blockchainSettings, settings.dbSettings) val bui = new BlockchainUpdaterImpl( - levelDBWriter, - spendableBalanceChanged, + rocksDBWriter, settings, time, blockchainUpdateTriggers, - (minHeight, maxHeight) => loadActiveLeases(db, minHeight, maxHeight), + (minHeight, maxHeight) => loadActiveLeases(rdb, minHeight, maxHeight), miner ) - (bui, levelDBWriter) + (bui, rocksDBWriter) } - private def checkVersion(db: DB): Unit = db.readWrite { rw => + private def checkVersion(db: RocksDB): Unit = db.readWrite { rw => val version = rw.get(Keys.version) val height = rw.get(Keys.height) if (version != StorageVersion) { diff --git a/node/src/main/scala/com/wavesplatform/metrics/LevelDBStats.scala b/node/src/main/scala/com/wavesplatform/metrics/RocksDBStats.scala similarity index 89% rename from node/src/main/scala/com/wavesplatform/metrics/LevelDBStats.scala rename to node/src/main/scala/com/wavesplatform/metrics/RocksDBStats.scala index c9b31cd2dc4..8971f6f3c44 100644 --- a/node/src/main/scala/com/wavesplatform/metrics/LevelDBStats.scala +++ b/node/src/main/scala/com/wavesplatform/metrics/RocksDBStats.scala @@ -5,9 +5,9 @@ import kamon.Kamon import kamon.metric.{MeasurementUnit, Metric} //noinspection TypeAnnotation -object LevelDBStats { +object RocksDBStats { implicit class DbHistogramExt(private val h: Metric.Histogram) extends AnyVal { - def recordTagged(key: Key[_], value: Array[Byte]): Unit = recordTagged(key.name, value) + def recordTagged(key: Key[?], value: Array[Byte]): Unit = recordTagged(key.name, value) def recordTagged(tag: String, value: Array[Byte]): Unit = h.withTag("key", tag).record(Option(value).fold(0L)(_.length)) diff --git a/node/src/main/scala/com/wavesplatform/network/InvalidBlockStorage.scala b/node/src/main/scala/com/wavesplatform/network/InvalidBlockStorage.scala index adf5b3b29e9..2bae04cd2d9 100644 --- a/node/src/main/scala/com/wavesplatform/network/InvalidBlockStorage.scala +++ b/node/src/main/scala/com/wavesplatform/network/InvalidBlockStorage.scala @@ -3,7 +3,7 @@ package com.wavesplatform.network import com.google.common.cache.CacheBuilder import com.wavesplatform.common.state.ByteStr import com.wavesplatform.lang.ValidationError -import com.wavesplatform.network.InvalidBlockStorageImpl._ +import com.wavesplatform.network.InvalidBlockStorageImpl.* import scala.concurrent.duration.FiniteDuration diff --git a/node/src/main/scala/com/wavesplatform/network/LegacyFrameCodec.scala b/node/src/main/scala/com/wavesplatform/network/LegacyFrameCodec.scala index 6f790d348df..9d1bccb6afe 100644 --- a/node/src/main/scala/com/wavesplatform/network/LegacyFrameCodec.scala +++ b/node/src/main/scala/com/wavesplatform/network/LegacyFrameCodec.scala @@ -1,28 +1,26 @@ package com.wavesplatform.network -import java.util - import com.google.common.cache.CacheBuilder + +import java.util import com.wavesplatform.block.Block import com.wavesplatform.common.utils.Base64 import com.wavesplatform.crypto -import com.wavesplatform.network.message.Message._ +import com.wavesplatform.network.message.Message.* import com.wavesplatform.transaction.Transaction import com.wavesplatform.utils.ScorexLogging import io.netty.buffer.ByteBuf -import io.netty.buffer.Unpooled._ +import io.netty.buffer.Unpooled.* import io.netty.channel.ChannelHandlerContext import io.netty.handler.codec.{ByteToMessageCodec, DecoderException} import scala.concurrent.duration.FiniteDuration import scala.util.control.NonFatal -class LegacyFrameCodec(peerDatabase: PeerDatabase, receivedTxsCacheTimeout: FiniteDuration) - extends ByteToMessageCodec[Any] - with ScorexLogging { +class LegacyFrameCodec(peerDatabase: PeerDatabase, receivedTxsCacheTimeout: FiniteDuration) extends ByteToMessageCodec[Any] with ScorexLogging { import BasicMessagesRepo.specsByCodes - import LegacyFrameCodec._ + import LegacyFrameCodec.* private val receivedTxsCache = CacheBuilder .newBuilder() diff --git a/node/src/main/scala/com/wavesplatform/network/MicroBlockSynchronizer.scala b/node/src/main/scala/com/wavesplatform/network/MicroBlockSynchronizer.scala index 434397db055..260ee374a96 100644 --- a/node/src/main/scala/com/wavesplatform/network/MicroBlockSynchronizer.scala +++ b/node/src/main/scala/com/wavesplatform/network/MicroBlockSynchronizer.scala @@ -1,8 +1,8 @@ package com.wavesplatform.network -import java.util.concurrent.TimeUnit - import com.google.common.cache.{Cache, CacheBuilder} + +import java.util.concurrent.TimeUnit import com.wavesplatform.block.Block.BlockId import com.wavesplatform.block.MicroBlock import com.wavesplatform.common.state.ByteStr @@ -26,7 +26,7 @@ object MicroBlockSynchronizer extends ScorexLogging { lastBlockIdEvents: Observable[ByteStr], microblockInvs: ChannelObservable[MicroBlockInv], microblockResponses: ChannelObservable[MicroBlockResponse], - scheduler: SchedulerService, + scheduler: SchedulerService ): (Observable[(Channel, MicroblockData)], Coeval[CacheSizes]) = { implicit val schdlr: SchedulerService = scheduler @@ -90,48 +90,50 @@ object MicroBlockSynchronizer extends ScorexLogging { .subscribe() microblockInvs - .mapEval { - case (ch, mbInv @ MicroBlockInv(_, totalBlockId, reference, _)) => - Task.evalAsync { - val sig = try mbInv.signaturesValid() + .mapEval { case (ch, mbInv @ MicroBlockInv(_, totalBlockId, reference, _)) => + Task.evalAsync { + val sig = + try mbInv.signaturesValid() catch { case t: Throwable => log.error(s"Error validating signature", t) throw t } - sig match { - case Left(err) => - peerDatabase.blacklistAndClose(ch, err.toString) - case Right(_) => - microBlockOwners.get(totalBlockId, () => MSet.empty) += ch - nextInvs.get(reference, { () => + sig match { + case Left(err) => + peerDatabase.blacklistAndClose(ch, err.toString) + case Right(_) => + microBlockOwners.get(totalBlockId, () => MSet.empty) += ch + nextInvs.get( + reference, + { () => BlockStats.inv(mbInv, ch) mbInv - }) - lastBlockId() match { - case Some(`reference`) if !alreadyRequested(totalBlockId) => tryDownloadNext(reference) - case _ => // either the microblock has already been requested or it does no reference the last block } - } - }.logErr + ) + lastBlockId() match { + case Some(`reference`) if !alreadyRequested(totalBlockId) => tryDownloadNext(reference) + case _ => // either the microblock has already been requested or it does no reference the last block + } + } + }.logErr } .executeOn(scheduler) .logErr .subscribe() - val observable = microblockResponses.observeOn(scheduler).flatMap { - case (ch, MicroBlockResponse(mb, totalRef)) => - successfullyReceived.put(totalRef, dummy) - BlockStats.received(mb, ch, totalRef) - Option(awaiting.getIfPresent(totalRef)) match { - case None => - log.trace(s"${id(ch)} Got unexpected ${mb.stringRepr(totalRef)}") - Observable.empty - case Some(mi) => - log.trace(s"${id(ch)} Got ${mb.stringRepr(totalRef)}, as expected") - awaiting.invalidate(totalRef) - Observable((ch, MicroblockData(Option(mi), mb, Coeval.evalOnce(owners(totalRef))))) - } + val observable = microblockResponses.observeOn(scheduler).flatMap { case (ch, MicroBlockResponse(mb, totalRef)) => + successfullyReceived.put(totalRef, dummy) + BlockStats.received(mb, ch, totalRef) + Option(awaiting.getIfPresent(totalRef)) match { + case None => + log.trace(s"${id(ch)} Got unexpected ${mb.stringRepr(totalRef)}") + Observable.empty + case Some(mi) => + log.trace(s"${id(ch)} Got ${mb.stringRepr(totalRef)}, as expected") + awaiting.invalidate(totalRef) + Observable((ch, MicroblockData(Option(mi), mb, Coeval.evalOnce(owners(totalRef))))) + } } (observable, cacheSizesReporter) } diff --git a/node/src/main/scala/com/wavesplatform/network/PeerDatabaseImpl.scala b/node/src/main/scala/com/wavesplatform/network/PeerDatabaseImpl.scala index 747966d068f..b735fad6302 100644 --- a/node/src/main/scala/com/wavesplatform/network/PeerDatabaseImpl.scala +++ b/node/src/main/scala/com/wavesplatform/network/PeerDatabaseImpl.scala @@ -10,8 +10,8 @@ import com.wavesplatform.utils.{JsonFileStorage, ScorexLogging} import io.netty.channel.Channel import io.netty.channel.socket.nio.NioSocketChannel -import scala.jdk.CollectionConverters._ -import scala.collection._ +import scala.jdk.CollectionConverters.* +import scala.collection.* import scala.concurrent.duration.FiniteDuration import scala.util.Random import scala.util.control.NonFatal diff --git a/node/src/main/scala/com/wavesplatform/network/RxExtensionLoader.scala b/node/src/main/scala/com/wavesplatform/network/RxExtensionLoader.scala index 1169be39394..35117d455ae 100644 --- a/node/src/main/scala/com/wavesplatform/network/RxExtensionLoader.scala +++ b/node/src/main/scala/com/wavesplatform/network/RxExtensionLoader.scala @@ -8,16 +8,17 @@ import com.wavesplatform.metrics.BlockStats import com.wavesplatform.network.RxExtensionLoader.ApplierState.Buffer import com.wavesplatform.network.RxExtensionLoader.LoaderState.WithPeer import com.wavesplatform.network.RxScoreObserver.{ChannelClosedAndSyncWith, SyncWith} +import com.wavesplatform.state.ParSignatureChecker import com.wavesplatform.transaction.TxValidationError.GenericError import com.wavesplatform.utils.ScorexLogging -import io.netty.channel._ +import io.netty.channel.* import monix.eval.{Coeval, Task} import monix.execution.CancelableFuture import monix.execution.schedulers.SchedulerService import monix.reactive.subjects.{ConcurrentSubject, Subject} import monix.reactive.{Observable, Observer} -import scala.concurrent.duration._ +import scala.concurrent.duration.* case class ExtensionBlocks(remoteScore: BigInt, blocks: Seq[Block]) { override def toString: String = s"ExtensionBlocks($remoteScore, ${formatSignatures(blocks.map(_.id()))}" @@ -148,6 +149,7 @@ object RxExtensionLoader extends ScorexLogging { state.loaderState match { case LoaderState.ExpectingBlocks(c, requested, expected, recieved, _) if c.channel == ch && expected.contains(block.id()) => BlockStats.received(block, BlockStats.Source.Ext, ch) + ParSignatureChecker.checkBlockSignature(block) if (expected == Set(block.id())) { val blockById = (recieved + block).map(b => b.id() -> b).toMap val ext = ExtensionBlocks(c.score, requested.map(blockById)) @@ -225,7 +227,7 @@ object RxExtensionLoader extends ScorexLogging { Observable( signatures.observeOn(scheduler).map { case (ch, sigs) => stateValue = onNewSignatures(stateValue, ch, sigs) }, - blocks.observeOn(scheduler).map { case (ch, block) => stateValue = onBlock(stateValue, ch, block) }, + blocks.observeOn(scheduler).map { case (ch, block) => stateValue = onBlock(stateValue, ch, block) }, syncWithChannelClosed.observeOn(scheduler).map { ch => stateValue = onNewSyncWithChannelClosed(stateValue, ch) }, diff --git a/node/src/main/scala/com/wavesplatform/network/RxScoreObserver.scala b/node/src/main/scala/com/wavesplatform/network/RxScoreObserver.scala index 38d664fea50..5d563350d53 100644 --- a/node/src/main/scala/com/wavesplatform/network/RxScoreObserver.scala +++ b/node/src/main/scala/com/wavesplatform/network/RxScoreObserver.scala @@ -1,19 +1,18 @@ package com.wavesplatform.network import java.util.concurrent.TimeUnit - -import cats._ -import cats.instances.bigInt._ -import cats.instances.tuple._ +import cats.* +import cats.instances.bigInt.* +import cats.instances.tuple.* import com.google.common.cache.CacheBuilder import com.wavesplatform.utils.ScorexLogging -import io.netty.channel._ +import io.netty.channel.* import monix.eval.Coeval import monix.execution.Scheduler import monix.reactive.Observable import scala.concurrent.duration.FiniteDuration -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* case class BestChannel(channel: Channel, score: BigInt) { override def toString: String = s"BestChannel(${id(channel)},$score)" @@ -42,7 +41,8 @@ object RxScoreObserver extends ScorexLogging { val head = bestScoreChannels.head log.trace(s"${id(head)} Publishing new best channel with score=$bestScore > localScore $localScore") Some(BestChannel(head, bestScore)) - } else None + } + else None } def apply( diff --git a/node/src/main/scala/com/wavesplatform/network/TransactionSynchronizer.scala b/node/src/main/scala/com/wavesplatform/network/TransactionSynchronizer.scala index 81a07747cc6..ac0040ab860 100644 --- a/node/src/main/scala/com/wavesplatform/network/TransactionSynchronizer.scala +++ b/node/src/main/scala/com/wavesplatform/network/TransactionSynchronizer.scala @@ -29,19 +29,22 @@ object TransactionSynchronizer extends LazyLogging { def transactionIsNew(txId: ByteStr): Boolean = { var isNew = false - knownTransactions.get(txId, { () => - isNew = true; dummy - }) + knownTransactions.get( + txId, + { () => + isNew = true; dummy + } + ) isNew } transactions - .filter { - case (_, tx) => transactionIsNew(tx.id()) + .filter { case (_, tx) => + transactionIsNew(tx.id()) } .whileBusyBuffer(OverflowStrategy.DropNew(settings.maxQueueSize)) - .mapParallelUnorderedF(settings.maxThreads) { - case (channel, tx) => transactionValidator.validateAndBroadcast(tx, Some(channel)) + .mapParallelUnorderedF(settings.maxThreads) { case (channel, tx) => + transactionValidator.validateAndBroadcast(tx, Some(channel)) } .subscribe() } diff --git a/node/src/main/scala/com/wavesplatform/serialization/package.scala b/node/src/main/scala/com/wavesplatform/serialization/package.scala index 6b8089e30d0..1d165a2eef3 100644 --- a/node/src/main/scala/com/wavesplatform/serialization/package.scala +++ b/node/src/main/scala/com/wavesplatform/serialization/package.scala @@ -55,6 +55,7 @@ package object serialization { } def getByteArray(size: Int): Array[Byte] = { + require(size < (10 << 20), s"requested array size $size exceeds 10MB limit") val result = new Array[Byte](size) buf.get(result) result diff --git a/node/src/main/scala/com/wavesplatform/settings/DBSettings.scala b/node/src/main/scala/com/wavesplatform/settings/DBSettings.scala index fd718a87d6e..5818d68a55d 100644 --- a/node/src/main/scala/com/wavesplatform/settings/DBSettings.scala +++ b/node/src/main/scala/com/wavesplatform/settings/DBSettings.scala @@ -9,5 +9,6 @@ case class DBSettings( maxCacheSize: Int, maxRollbackDepth: Int, rememberBlocks: FiniteDuration, - useBloomFilter: Boolean + useBloomFilter: Boolean, + rocksdb: RocksDBSettings ) diff --git a/node/src/main/scala/com/wavesplatform/settings/RocksDBSettings.scala b/node/src/main/scala/com/wavesplatform/settings/RocksDBSettings.scala new file mode 100644 index 00000000000..b6969adabdc --- /dev/null +++ b/node/src/main/scala/com/wavesplatform/settings/RocksDBSettings.scala @@ -0,0 +1,9 @@ +package com.wavesplatform.settings + +case class RocksDBSettings( + mainCacheSize: SizeInBytes, + txCacheSize: SizeInBytes, + txMetaCacheSize: SizeInBytes, + writeBufferSize: SizeInBytes, + enableStatistics: Boolean +) diff --git a/node/src/main/scala/com/wavesplatform/settings/package.scala b/node/src/main/scala/com/wavesplatform/settings/package.scala index ccb29624f01..ab883522bb1 100644 --- a/node/src/main/scala/com/wavesplatform/settings/package.scala +++ b/node/src/main/scala/com/wavesplatform/settings/package.scala @@ -10,6 +10,7 @@ import net.ceedubs.ficus.Ficus.traversableReader import net.ceedubs.ficus.readers.namemappers.HyphenNameMapper import net.ceedubs.ficus.readers.{NameMapper, ValueReader} import org.apache.commons.lang3.SystemUtils +import supertagged.TaggedType import scala.jdk.CollectionConverters.* import scala.util.Try @@ -54,6 +55,13 @@ package object settings { case x :: xs => NonEmptyList(x, xs) } + object SizeInBytes extends TaggedType[Long] + type SizeInBytes = SizeInBytes.Type + + implicit val sizeInBytesReader: ValueReader[SizeInBytes] = {(cfg: Config, path: String) => + SizeInBytes(cfg.getBytes(path).toLong) + } + def loadConfig(userConfig: Config): Config = { loadConfig(Some(userConfig)) } diff --git a/node/src/main/scala/com/wavesplatform/state/Blockchain.scala b/node/src/main/scala/com/wavesplatform/state/Blockchain.scala index 0566ac6ad2a..82a0336a6ed 100644 --- a/node/src/main/scala/com/wavesplatform/state/Blockchain.scala +++ b/node/src/main/scala/com/wavesplatform/state/Blockchain.scala @@ -44,6 +44,7 @@ trait Blockchain { def transferById(id: ByteStr): Option[(Int, TransferTransactionLike)] def transactionInfo(id: ByteStr): Option[(TxMeta, Transaction)] + def transactionInfos(ids: Seq[ByteStr]): Seq[Option[(TxMeta, Transaction)]] def transactionMeta(id: ByteStr): Option[TxMeta] def containsTransaction(tx: Transaction): Boolean @@ -71,8 +72,14 @@ trait Blockchain { def leaseBalance(address: Address): LeaseBalance + def leaseBalances(addresses: Seq[Address]): Map[Address, LeaseBalance] + def balance(address: Address, mayBeAssetId: Asset = Waves): Long + def balances(req: Seq[(Address, Asset)]): Map[(Address, Asset), Long] + + def wavesBalances(addresses: Seq[Address]): Map[Address, Long] + def resolveERC20Address(address: ERC20Address): Option[IssuedAsset] } @@ -81,7 +88,7 @@ object Blockchain { def isEmpty: Boolean = blockchain.height == 0 def isSponsorshipActive: Boolean = blockchain.height >= Sponsorship.sponsoredFeesSwitchHeight(blockchain) - def isNGActive: Boolean = blockchain.isFeatureActivated(BlockchainFeatures.NG, blockchain.height - 1) + def isNGActive: Boolean = blockchain.isFeatureActivated(BlockchainFeatures.NG, blockchain.height - 1) def parentHeader(block: BlockHeader, back: Int = 1): Option[BlockHeader] = blockchain diff --git a/node/src/main/scala/com/wavesplatform/state/BlockchainUpdaterImpl.scala b/node/src/main/scala/com/wavesplatform/state/BlockchainUpdaterImpl.scala index 16050903436..19e3d6830fa 100644 --- a/node/src/main/scala/com/wavesplatform/state/BlockchainUpdaterImpl.scala +++ b/node/src/main/scala/com/wavesplatform/state/BlockchainUpdaterImpl.scala @@ -7,7 +7,7 @@ import com.wavesplatform.api.BlockMeta import com.wavesplatform.block.Block.BlockId import com.wavesplatform.block.{Block, MicroBlock, SignedBlockHeader} import com.wavesplatform.common.state.ByteStr -import com.wavesplatform.database.Storage +import com.wavesplatform.database.RocksDBWriter import com.wavesplatform.events.BlockchainUpdateTriggers import com.wavesplatform.features.BlockchainFeatures import com.wavesplatform.features.BlockchainFeatures.ConsensusImprovements @@ -25,13 +25,12 @@ import com.wavesplatform.transaction.transfer.TransferTransactionLike import com.wavesplatform.utils.{ScorexLogging, Time, UnsupportedFeature, forceStopApplication} import kamon.Kamon import monix.reactive.subjects.ReplaySubject -import monix.reactive.{Observable, Observer} +import monix.reactive.Observable import java.util.concurrent.locks.{Lock, ReentrantReadWriteLock} class BlockchainUpdaterImpl( - leveldb: Blockchain & Storage, - spendableBalanceChanged: Observer[(Address, Asset)], + rocksdb: RocksDBWriter, wavesSettings: WavesSettings, time: Time, blockchainUpdateTriggers: BlockchainUpdateTriggers, @@ -61,14 +60,14 @@ class BlockchainUpdaterImpl( private[this] var ngState: Option[NgState] = Option.empty @volatile - private[this] var restTotalConstraint: MiningConstraint = MiningConstraints(leveldb, leveldb.height).total + private[this] var restTotalConstraint: MiningConstraint = MiningConstraints(rocksdb, rocksdb.height).total private val internalLastBlockInfo = ReplaySubject.createLimited[LastBlockInfo](1) private def lastBlockReward: Option[Long] = this.blockReward(this.height) private def publishLastBlockInfo(): Unit = - for (id <- this.lastBlockId; ts <- ngState.map(_.base.header.timestamp).orElse(leveldb.lastBlockTimestamp)) { + for (id <- this.lastBlockId; ts <- ngState.map(_.base.header.timestamp).orElse(rocksdb.lastBlockTimestamp)) { val blockchainReady = ts + maxBlockReadinessAge > time.correctedTime() internalLastBlockInfo.onNext(LastBlockInfo(id, height, score, blockchainReady)) } @@ -102,19 +101,19 @@ class BlockchainUpdaterImpl( override val settings: BlockchainSettings = wavesSettings.blockchainSettings override def isLastBlockId(id: ByteStr): Boolean = readLock { - ngState.fold(leveldb.lastBlockId.contains(id))(_.contains(id)) + ngState.fold(rocksdb.lastBlockId.contains(id))(_.contains(id)) } override val lastBlockInfo: Observable[LastBlockInfo] = internalLastBlockInfo private def featuresApprovedWithBlock(block: Block): Set[Short] = { - val height = leveldb.height + 1 + val height = rocksdb.height + 1 val featuresCheckPeriod = functionalitySettings.activationWindowSize(height) val blocksForFeatureActivation = functionalitySettings.blocksForFeatureActivation(height) if (height % featuresCheckPeriod == 0) { - val approvedFeatures = leveldb + val approvedFeatures = rocksdb .featureVotes(height) .map { case (feature, votes) => feature -> (if (block.header.featureVotes.contains(feature)) votes + 1 else votes) } .filter { case (_, votes) => votes >= blocksForFeatureActivation } @@ -130,7 +129,7 @@ class BlockchainUpdaterImpl( |OTHERWISE THE NODE WILL BE STOPPED OR FORKED UPON FEATURE ACTIVATION""".stripMargin) } - val activatedFeatures: Set[Short] = leveldb.activatedFeaturesAt(height) + val activatedFeatures: Set[Short] = rocksdb.activatedFeaturesAt(height) val unimplementedActivated = activatedFeatures.diff(BlockchainFeatures.implemented) if (unimplementedActivated.nonEmpty) { @@ -153,10 +152,10 @@ class BlockchainUpdaterImpl( val settings = this.settings.rewardsSettings val nextHeight = this.height + 1 - if (height == 0 && leveldb.featureActivationHeight(ConsensusImprovements.id).exists(_ <= 1)) + if (height == 0 && rocksdb.featureActivationHeight(ConsensusImprovements.id).exists(_ <= 1)) None else - leveldb + rocksdb .featureActivationHeight(BlockchainFeatures.BlockReward.id) .filter(_ <= nextHeight) .flatMap { activatedAt => @@ -186,10 +185,15 @@ class BlockchainUpdaterImpl( .orElse(lastBlockReward) } - override def processBlock(block: Block, hitSource: ByteStr, verify: Boolean = true): Either[ValidationError, Seq[Diff]] = + override def processBlock( + block: Block, + hitSource: ByteStr, + verify: Boolean = true, + txSignParCheck: Boolean = true + ): Either[ValidationError, Seq[Diff]] = writeLock { - val height = leveldb.height - val notImplementedFeatures: Set[Short] = leveldb.activatedFeaturesAt(height).diff(BlockchainFeatures.implemented) + val height = rocksdb.height + val notImplementedFeatures: Set[Short] = rocksdb.activatedFeaturesAt(height).diff(BlockchainFeatures.implemented) Either .cond( @@ -200,28 +204,30 @@ class BlockchainUpdaterImpl( .flatMap[ValidationError, Seq[Diff]](_ => (ngState match { case None => - leveldb.lastBlockId match { + rocksdb.lastBlockId match { case Some(uniqueId) if uniqueId != block.header.reference => val logDetails = s"The referenced block(${block.header.reference})" + - s" ${if (leveldb.contains(block.header.reference)) "exits, it's not last persisted" else "doesn't exist"}" + s" ${if (rocksdb.contains(block.header.reference)) "exits, it's not last persisted" else "doesn't exist"}" Left(BlockAppendError(s"References incorrect or non-existing block: " + logDetails, block)) case lastBlockId => - val height = lastBlockId.fold(0)(leveldb.unsafeHeightOf) - val miningConstraints = MiningConstraints(leveldb, height) + val height = lastBlockId.fold(0)(rocksdb.unsafeHeightOf) + val miningConstraints = MiningConstraints(rocksdb, height) val reward = nextReward() - val referencedBlockchain = CompositeBlockchain(leveldb, reward) + val referencedBlockchain = CompositeBlockchain(rocksdb, reward) BlockDiffer .fromBlock( referencedBlockchain, - leveldb.lastBlock, + rocksdb.lastBlock, block, miningConstraints.total, hitSource, - verify + rocksdb.loadCacheData, + verify, + txSignParCheck = txSignParCheck ) .map { r => - val updatedBlockchain = CompositeBlockchain(leveldb, r.diff, block, hitSource, r.carry, reward) + val updatedBlockchain = CompositeBlockchain(rocksdb, r.diff, block, hitSource, r.carry, reward) miner.scheduleMining(Some(updatedBlockchain)) blockchainUpdateTriggers.onProcessBlock(block, r.detailedDiff, reward, hitSource, referencedBlockchain) Option((r, Nil, reward, hitSource)) @@ -230,20 +236,22 @@ class BlockchainUpdaterImpl( case Some(ng) => if (ng.base.header.reference == block.header.reference) { if (block.blockScore() > ng.base.blockScore()) { - val height = leveldb.unsafeHeightOf(ng.base.header.reference) - val miningConstraints = MiningConstraints(leveldb, height) + val height = rocksdb.unsafeHeightOf(ng.base.header.reference) + val miningConstraints = MiningConstraints(rocksdb, height) - blockchainUpdateTriggers.onRollback(this, ng.base.header.reference, leveldb.height) + blockchainUpdateTriggers.onRollback(this, ng.base.header.reference, rocksdb.height) - val referencedBlockchain = CompositeBlockchain(leveldb, ng.reward) + val referencedBlockchain = CompositeBlockchain(rocksdb, ng.reward) BlockDiffer .fromBlock( referencedBlockchain, - leveldb.lastBlock, + rocksdb.lastBlock, block, miningConstraints.total, hitSource, - verify + rocksdb.loadCacheData, + verify, + txSignParCheck = txSignParCheck ) .map { r => log.trace( @@ -260,20 +268,22 @@ class BlockchainUpdaterImpl( Right(None) } else { log.trace(s"New liquid block is better version of existing, swapping") - val height = leveldb.unsafeHeightOf(ng.base.header.reference) - val miningConstraints = MiningConstraints(leveldb, height) + val height = rocksdb.unsafeHeightOf(ng.base.header.reference) + val miningConstraints = MiningConstraints(rocksdb, height) - blockchainUpdateTriggers.onRollback(this, ng.base.header.reference, leveldb.height) + blockchainUpdateTriggers.onRollback(this, ng.base.header.reference, rocksdb.height) - val referencedBlockchain = CompositeBlockchain(leveldb, ng.reward) + val referencedBlockchain = CompositeBlockchain(rocksdb, ng.reward) BlockDiffer .fromBlock( referencedBlockchain, - leveldb.lastBlock, + rocksdb.lastBlock, block, miningConstraints.total, hitSource, - verify + rocksdb.loadCacheData, + verify, + txSignParCheck = txSignParCheck ) .map { r => blockchainUpdateTriggers.onProcessBlock(block, r.detailedDiff, ng.reward, hitSource, referencedBlockchain) @@ -293,7 +303,7 @@ class BlockchainUpdaterImpl( case None => Left(BlockAppendError(s"References incorrect or non-existing block", block)) case Some((referencedForgedBlock, referencedLiquidDiff, carry, totalFee, discarded)) => if (!verify || referencedForgedBlock.signatureValid()) { - val height = leveldb.heightOf(referencedForgedBlock.header.reference).getOrElse(0) + val height = rocksdb.heightOf(referencedForgedBlock.header.reference).getOrElse(0) if (discarded.nonEmpty) { blockchainUpdateTriggers.onMicroBlockRollback(this, block.header.reference) @@ -302,7 +312,7 @@ class BlockchainUpdaterImpl( } val constraint: MiningConstraint = { - val miningConstraints = MiningConstraints(leveldb, height) + val miningConstraints = MiningConstraints(rocksdb, height) miningConstraints.total } @@ -314,7 +324,7 @@ class BlockchainUpdaterImpl( for { liquidDiffWithCancelledLeases <- ng.cancelExpiredLeases(referencedLiquidDiff).leftMap(GenericError(_)) referencedBlockchain = CompositeBlockchain( - leveldb, + rocksdb, liquidDiffWithCancelledLeases, referencedForgedBlock, ng.hitSource, @@ -328,7 +338,9 @@ class BlockchainUpdaterImpl( block, constraint, hitSource, - verify + rocksdb.loadCacheData, + verify, + txSignParCheck = txSignParCheck ) } yield { val tempBlockchain = CompositeBlockchain( @@ -343,7 +355,7 @@ class BlockchainUpdaterImpl( blockchainUpdateTriggers.onProcessBlock(block, differResult.detailedDiff, reward, hitSource, this) - leveldb.append(liquidDiffWithCancelledLeases, carry, totalFee, prevReward, prevHitSource, referencedForgedBlock) + rocksdb.append(liquidDiffWithCancelledLeases, carry, totalFee, prevReward, prevHitSource, referencedForgedBlock) BlockStats.appended(referencedForgedBlock, referencedLiquidDiff.scriptsComplexity) TxsInBlockchainStats.record(ng.transactions.size) val (discardedMbs, discardedDiffs) = discarded.unzip @@ -361,8 +373,7 @@ class BlockchainUpdaterImpl( } }).map { _ map { case (BlockDiffer.Result(newBlockDiff, carry, totalFee, updatedTotalConstraint, _), discDiffs, reward, hitSource) => - val newHeight = leveldb.height + 1 - val prevNgState = ngState + val newHeight = rocksdb.height + 1 restTotalConstraint = updatedTotalConstraint ngState = Some( @@ -377,7 +388,6 @@ class BlockchainUpdaterImpl( cancelLeases(collectLeasesToCancel(newHeight), newHeight) ) ) - notifyChangedSpendable(prevNgState, ngState) publishLastBlockInfo() if ( @@ -394,9 +404,9 @@ class BlockchainUpdaterImpl( } private def collectLeasesToCancel(newHeight: Int): Seq[LeaseTransaction] = - if (leveldb.isFeatureActivated(BlockchainFeatures.LeaseExpiration, newHeight)) { - val toHeight = newHeight - leveldb.settings.functionalitySettings.leaseExpiration - val fromHeight = leveldb.featureActivationHeight(BlockchainFeatures.LeaseExpiration.id) match { + if (rocksdb.isFeatureActivated(BlockchainFeatures.LeaseExpiration, newHeight)) { + val toHeight = newHeight - rocksdb.settings.functionalitySettings.leaseExpiration + val fromHeight = rocksdb.featureActivationHeight(BlockchainFeatures.LeaseExpiration.id) match { case Some(`newHeight`) => log.trace(s"Collecting leases created up till height $toHeight") 1 @@ -411,7 +421,7 @@ class BlockchainUpdaterImpl( (for { lt <- leaseTransactions ltMeta <- transactionMeta(lt.id()).toSeq - recipient <- leveldb.resolveAlias(lt.recipient).toSeq + recipient <- rocksdb.resolveAlias(lt.recipient).toSeq } yield lt.id() -> Diff( portfolios = Map( lt.sender.toAddress -> Portfolio(0, LeaseBalance(0, -lt.amount.value)), @@ -436,14 +446,14 @@ class BlockchainUpdaterImpl( Right(Seq((ng.bestLiquidBlock, ng.hitSource))) case maybeNg => for { - height <- leveldb.heightOf(blockId).toRight(GenericError(s"No such block $blockId")) + height <- rocksdb.heightOf(blockId).toRight(GenericError(s"No such block $blockId")) _ <- Either.cond( - height >= leveldb.safeRollbackHeight, + height >= rocksdb.safeRollbackHeight, (), - GenericError(s"Rollback is possible only to the block at the height ${leveldb.safeRollbackHeight}") + GenericError(s"Rollback is possible only to the block at the height ${rocksdb.safeRollbackHeight}") ) _ = blockchainUpdateTriggers.onRollback(this, blockId, height) - blocks <- leveldb.rollbackTo(height).leftMap(GenericError(_)) + blocks <- rocksdb.rollbackTo(height).leftMap(GenericError(_)) } yield { ngState = None blocks ++ maybeNg.map(ng => (ng.bestLiquidBlock, ng.hitSource)).toSeq @@ -453,7 +463,6 @@ class BlockchainUpdaterImpl( result match { case Right(_) => log.info(s"Blockchain rollback to $blockId succeeded") - notifyChangedSpendable(prevNgState, ngState) publishLastBlockInfo() miner.scheduleMining() @@ -463,22 +472,6 @@ class BlockchainUpdaterImpl( result } - private def notifyChangedSpendable(prevNgState: Option[NgState], newNgState: Option[NgState]): Unit = { - val changedPortfolios = (prevNgState, newNgState) match { - case (Some(p), Some(n)) => - Diff.combine(p.bestLiquidDiff.portfolios, n.bestLiquidDiff.portfolios.view.mapValues(_.negate).toMap).getOrElse(Map.empty) - case (Some(x), _) => x.bestLiquidDiff.portfolios - case (_, Some(x)) => x.bestLiquidDiff.portfolios - case _ => Map.empty - } - - changedPortfolios.foreach { case (addr, p) => - p.assetIds.view - .filter(x => p.spendableBalanceOf(x) != 0) - .foreach(assetId => spendableBalanceChanged.onNext(addr -> assetId)) - } - } - override def processMicroBlock(microBlock: MicroBlock, verify: Boolean = true): Either[ValidationError, BlockId] = writeLock { ngState match { case None => @@ -511,7 +504,7 @@ class BlockchainUpdaterImpl( MicroBlockAppendError("Invalid total block signature", microBlock) ) blockDifferResult <- { - BlockDiffer.fromMicroBlock(this, leveldb.lastBlockTimestamp, microBlock, restTotalConstraint, verify) + BlockDiffer.fromMicroBlock(this, rocksdb.lastBlockTimestamp, microBlock, restTotalConstraint, rocksdb.loadCacheData, verify) } } yield { val BlockDiffer.Result(diff, carry, totalFee, updatedMdConstraint, detailedDiff) = blockDifferResult @@ -526,10 +519,6 @@ class BlockchainUpdaterImpl( log.info(s"${microBlock.stringRepr(blockId)} appended, diff=${diff.hashString}") internalLastBlockInfo.onNext(LastBlockInfo(blockId, height, score, ready = true)) - for { - (addr, p) <- diff.portfolios - assetId <- p.assetIds - } spendableBalanceChanged.onNext(addr -> assetId) blockId } } @@ -543,15 +532,15 @@ class BlockchainUpdaterImpl( private def newlyApprovedFeatures = ngState.fold(Map.empty[Short, Int])(_.approvedFeatures.map(_ -> height).toMap) override def approvedFeatures: Map[Short, Int] = readLock { - newlyApprovedFeatures ++ leveldb.approvedFeatures + newlyApprovedFeatures ++ rocksdb.approvedFeatures } override def activatedFeatures: Map[Short, Int] = readLock { - (newlyApprovedFeatures.view.mapValues(_ + functionalitySettings.activationWindowSize(height)) ++ leveldb.activatedFeatures).toMap + (newlyApprovedFeatures.view.mapValues(_ + functionalitySettings.activationWindowSize(height)) ++ rocksdb.activatedFeatures).toMap } override def featureVotes(height: Int): Map[Short, Int] = readLock { - val innerVotes = leveldb.featureVotes(height) + val innerVotes = rocksdb.featureVotes(height) ngState match { case Some(ng) if this.height <= height => val ngVotes = ng.base.header.featureVotes.map { featureId => @@ -564,9 +553,9 @@ class BlockchainUpdaterImpl( } override def blockReward(height: Int): Option[Long] = readLock { - leveldb.blockReward(height) match { + rocksdb.blockReward(height) match { case r @ Some(_) => r - case None => ngState.collect { case ng if leveldb.height + 1 == height => ng.reward }.flatten + case None => ngState.collect { case ng if rocksdb.height + 1 == height => ng.reward }.flatten } } @@ -574,9 +563,9 @@ class BlockchainUpdaterImpl( activatedFeatures.get(BlockchainFeatures.BlockReward.id) match { case Some(activatedAt) if activatedAt <= height => ngState match { - case None => leveldb.blockRewardVotes(height) + case None => rocksdb.blockRewardVotes(height) case Some(ng) => - val innerVotes = leveldb.blockRewardVotes(height) + val innerVotes = rocksdb.blockRewardVotes(height) if (height == this.height && settings.rewardsSettings.votingWindow(activatedAt, height).contains(height)) innerVotes :+ ng.base.header.rewardVote else innerVotes @@ -588,14 +577,14 @@ class BlockchainUpdaterImpl( override def wavesAmount(height: Int): BigInt = readLock { ngState match { case Some(ng) if this.height == height => - leveldb.wavesAmount(height - 1) + BigInt(ng.reward.getOrElse(0L)) + rocksdb.wavesAmount(height - 1) + BigInt(ng.reward.getOrElse(0L)) case _ => - leveldb.wavesAmount(height) + rocksdb.wavesAmount(height) } } override def height: Int = readLock { - leveldb.height + ngState.fold(0)(_ => 1) + rocksdb.height + ngState.fold(0)(_ => 1) } override def heightOf(blockId: BlockId): Option[Int] = readLock { @@ -603,7 +592,7 @@ class BlockchainUpdaterImpl( .collect { case ng if ng.contains(blockId) => this.height } - .orElse(leveldb.heightOf(blockId)) + .orElse(rocksdb.heightOf(blockId)) } override def microBlock(id: BlockId): Option[MicroBlock] = readLock { @@ -621,25 +610,25 @@ class BlockchainUpdaterImpl( ngState .map(_.bestLastBlockInfo(maxTimestamp)) .orElse( - leveldb.lastBlockHeader.map { sh => + rocksdb.lastBlockHeader.map { sh => BlockMinerInfo(sh.header.baseTarget, sh.header.generationSignature, sh.header.timestamp, sh.id()) } ) } override def score: BigInt = readLock { - leveldb.score + ngState.fold(BigInt(0))(_.bestLiquidBlock.blockScore()) + rocksdb.score + ngState.fold(BigInt(0))(_.bestLiquidBlock.blockScore()) } override def carryFee: Long = readLock { - ngState.fold(leveldb.carryFee)(_.carryFee) + ngState.fold(rocksdb.carryFee)(_.carryFee) } override def blockHeader(height: Int): Option[SignedBlockHeader] = readLock { - if (height == leveldb.height + 1) ngState.map { x => + if (height == rocksdb.height + 1) ngState.map { x => SignedBlockHeader(x.bestLiquidBlock.header, x.bestLiquidBlock.signature) } - else leveldb.blockHeader(height) + else rocksdb.blockHeader(height) } override def transferById(id: BlockId): Option[(Int, TransferTransactionLike)] = readLock { @@ -650,6 +639,10 @@ class BlockchainUpdaterImpl( compositeBlockchain.transactionInfo(id) } + override def transactionInfos(ids: Seq[BlockId]): Seq[Option[(TxMeta, Transaction)]] = readLock { + compositeBlockchain.transactionInfos(ids) + } + override def containsTransaction(tx: Transaction): Boolean = readLock { compositeBlockchain.containsTransaction(tx) } @@ -677,7 +670,7 @@ class BlockchainUpdaterImpl( override def balanceSnapshots(address: Address, from: Int, to: Option[BlockId]): Seq[BalanceSnapshot] = readLock { to.fold(ngState.map(_.bestLiquidDiff))(id => ngState.map(_.diffFor(id)._1)) - .fold[Blockchain](leveldb)(CompositeBlockchain(leveldb, _)) + .fold[Blockchain](rocksdb)(CompositeBlockchain(rocksdb, _)) .balanceSnapshots(address, from, to) } @@ -709,14 +702,26 @@ class BlockchainUpdaterImpl( compositeBlockchain.balance(address, mayBeAssetId) } + override def balances(req: Seq[(Address, Asset)]): Map[(Address, Asset), TxTimestamp] = readLock { + compositeBlockchain.balances(req) + } + + override def wavesBalances(addresses: Seq[Address]): Map[Address, Long] = readLock { + compositeBlockchain.wavesBalances(addresses) + } + override def leaseBalance(address: Address): LeaseBalance = readLock { compositeBlockchain.leaseBalance(address) } + override def leaseBalances(addresses: Seq[Address]): Map[Address, LeaseBalance] = readLock { + compositeBlockchain.leaseBalances(addresses) + } + override def hitSource(height: Int): Option[ByteStr] = readLock { ngState match { case Some(ng) if this.height == height => ng.hitSource.some - case _ => leveldb.hitSource(height) + case _ => rocksdb.hitSource(height) } } @@ -724,8 +729,11 @@ class BlockchainUpdaterImpl( compositeBlockchain.resolveERC20Address(address) } - private[this] def compositeBlockchain = - ngState.fold(leveldb: Blockchain)(CompositeBlockchain(leveldb, _)) + def getCompositeBlockchain: CompositeBlockchain = + ngState.fold(CompositeBlockchain(rocksdb, Diff.empty))(CompositeBlockchain(rocksdb, _)) + + private[this] def compositeBlockchain: Blockchain = + ngState.fold(rocksdb: Blockchain)(CompositeBlockchain(rocksdb, _)) // noinspection ScalaStyle,TypeAnnotation private[this] object metrics { diff --git a/node/src/main/scala/com/wavesplatform/state/DataEntry.scala b/node/src/main/scala/com/wavesplatform/state/DataEntry.scala index 575d37c7573..d4c4558fd53 100644 --- a/node/src/main/scala/com/wavesplatform/state/DataEntry.scala +++ b/node/src/main/scala/com/wavesplatform/state/DataEntry.scala @@ -1,6 +1,9 @@ package com.wavesplatform.state +import com.fasterxml.jackson.core.JsonGenerator +import com.fasterxml.jackson.databind.{JsonSerializer, SerializerProvider} import com.google.common.primitives.{Bytes, Longs, Shorts} +import com.wavesplatform.api.http.StreamSerializerUtils.* import com.wavesplatform.common.state.ByteStr import com.wavesplatform.lang.v1.traits.domain.{DataItem, DataOp} import com.wavesplatform.serialization.Deser @@ -41,8 +44,8 @@ object DataEntry { val String = Value(3) } - implicit object Format extends Format[DataEntry[_]] { - def reads(jsv: JsValue): JsResult[DataEntry[_]] = { + implicit object Format extends Format[DataEntry[?]] { + def reads(jsv: JsValue): JsResult[DataEntry[?]] = { jsv \ "key" match { case JsDefined(JsString(key)) => jsv \ "type" match { @@ -78,14 +81,41 @@ object DataEntry { } } - def writes(item: DataEntry[_]): JsValue = item.toJson + def writes(item: DataEntry[?]): JsValue = item.toJson } - implicit class DataEntryExt(private val de: DataEntry[_]) extends AnyVal { + def dataEntrySerializer(numberAsString: Boolean): JsonSerializer[DataEntry[?]] = + (value: DataEntry[?], gen: JsonGenerator, _: SerializerProvider) => { + gen.writeStartObject() + value match { + case BinaryDataEntry(key, value) => + gen.writeStringField("type", "binary") + gen.writeStringField("key", key) + gen.writeStringField("value", value.base64) + case IntegerDataEntry(key, value) => + gen.writeStringField("type", "integer") + gen.writeStringField("key", key) + gen.writeNumberField("value", value, numberAsString) + case BooleanDataEntry(key, value) => + gen.writeStringField("type", "boolean") + gen.writeStringField("key", key) + gen.writeBooleanField("value", value) + case StringDataEntry(key, value) => + gen.writeStringField("type", "string") + gen.writeStringField("key", key) + gen.writeStringField("value", value) + case EmptyDataEntry(key) => + gen.writeStringField("key", key) + gen.writeNullField("value") + } + gen.writeEndObject() + } + + implicit class DataEntryExt(private val de: DataEntry[?]) extends AnyVal { def isEmpty: Boolean = de.isInstanceOf[EmptyDataEntry] } - def fromLangDataOp(di: DataOp): DataEntry[_] = di match { + def fromLangDataOp(di: DataOp): DataEntry[?] = di match { case DataItem.Lng(k, v) => IntegerDataEntry(k, v) case DataItem.Bool(k, v) => BooleanDataEntry(k, v) case DataItem.Bin(k, v) => BinaryDataEntry(k, v) diff --git a/node/src/main/scala/com/wavesplatform/state/Diff.scala b/node/src/main/scala/com/wavesplatform/state/Diff.scala index 663b84a8bb0..4f2583cf870 100755 --- a/node/src/main/scala/com/wavesplatform/state/Diff.scala +++ b/node/src/main/scala/com/wavesplatform/state/Diff.scala @@ -7,7 +7,7 @@ import cats.kernel.{Monoid, Semigroup} import cats.syntax.either.* import com.google.common.hash.{BloomFilter, Funnels} import com.google.protobuf.ByteString -import com.wavesplatform.account.{Address, Alias, PublicKey} +import com.wavesplatform.account.{Address, AddressOrAlias, Alias, PublicKey} import com.wavesplatform.common.state.ByteStr import com.wavesplatform.database.protobuf.EthereumTransactionMeta import com.wavesplatform.features.BlockchainFeatures @@ -57,7 +57,7 @@ object AssetInfo { AssetInfo(ByteString.copyFromUtf8(name), ByteString.copyFromUtf8(description), lastUpdatedAt) } -case class AssetStaticInfo(source: TransactionId, issuer: PublicKey, decimals: Int, nft: Boolean) +case class AssetStaticInfo(id: ByteStr, source: TransactionId, issuer: PublicKey, decimals: Int, nft: Boolean) case class AssetVolumeInfo(isReissuable: Boolean, volume: BigInt) object AssetVolumeInfo { @@ -86,16 +86,6 @@ case class AssetDescription( issueHeight: Height ) -case class AccountDataInfo(data: Map[String, DataEntry[?]]) - -object AccountDataInfo { - implicit val accountDataInfoMonoid: Monoid[AccountDataInfo] = new Monoid[AccountDataInfo] { - override def empty: AccountDataInfo = AccountDataInfo(Map.empty) - - override def combine(x: AccountDataInfo, y: AccountDataInfo): AccountDataInfo = AccountDataInfo(x.data ++ y.data) - } -} - sealed abstract class Sponsorship case class SponsorshipValue(minFee: Long) extends Sponsorship case object SponsorshipNoInfo extends Sponsorship @@ -144,6 +134,8 @@ case class NewTransactionInfo(transaction: Transaction, affected: Set[Address], case class NewAssetInfo(static: AssetStaticInfo, dynamic: AssetInfo, volume: AssetVolumeInfo) +case class LeaseActionInfo(invokeId: ByteStr, dAppPublicKey: PublicKey, recipient: AddressOrAlias, amount: Long) + case class Diff private ( transactions: Vector[NewTransactionInfo], portfolios: Map[Address, Portfolio], @@ -154,7 +146,7 @@ case class Diff private ( leaseState: Map[ByteStr, LeaseDetails], scripts: Map[Address, Option[AccountScriptInfo]], assetScripts: Map[IssuedAsset, Option[AssetScriptInfo]], - accountData: Map[Address, AccountDataInfo], + accountData: Map[Address, Map[String, DataEntry[?]]], sponsorship: Map[IssuedAsset, Sponsorship], scriptsRun: Int, scriptsComplexity: Long, @@ -165,6 +157,9 @@ case class Diff private ( @inline final def combineE(newer: Diff): Either[ValidationError, Diff] = combineF(newer).leftMap(GenericError(_)) + def containsTransaction(txId: ByteStr): Boolean = + transactions.nonEmpty && transactionFilter.exists(_.mightContain(txId.arr)) && transactions.exists(_.transaction.id() == txId) + def transaction(txId: ByteStr): Option[NewTransactionInfo] = if (transactions.nonEmpty && transactionFilter.exists(_.mightContain(txId.arr))) transactions.find(_.transaction.id() == txId) @@ -200,7 +195,7 @@ case class Diff private ( leaseState = leaseState ++ newer.leaseState, scripts = scripts ++ newer.scripts, assetScripts = assetScripts ++ newer.assetScripts, - accountData = accountData.combine(newer.accountData), + accountData = Diff.combine(accountData, newer.accountData), sponsorship = sponsorship.combine(newer.sponsorship), scriptsRun = scriptsRun + newer.scriptsRun, scriptResults = scriptResults.combine(newer.scriptResults), @@ -221,7 +216,7 @@ object Diff { leaseState: Map[ByteStr, LeaseDetails] = Map.empty, scripts: Map[Address, Option[AccountScriptInfo]] = Map.empty, assetScripts: Map[IssuedAsset, Option[AssetScriptInfo]] = Map.empty, - accountData: Map[Address, AccountDataInfo] = Map.empty, + accountData: Map[Address, Map[String, DataEntry[?]]] = Map.empty, sponsorship: Map[IssuedAsset, Sponsorship] = Map.empty, scriptsRun: Int = 0, scriptsComplexity: Long = 0, @@ -257,7 +252,7 @@ object Diff { leaseState: Map[ByteStr, LeaseDetails] = Map.empty, scripts: Map[Address, Option[AccountScriptInfo]] = Map.empty, assetScripts: Map[IssuedAsset, Option[AssetScriptInfo]] = Map.empty, - accountData: Map[Address, AccountDataInfo] = Map.empty, + accountData: Map[Address, Map[String, DataEntry[?]]] = Map.empty, sponsorship: Map[IssuedAsset, Sponsorship] = Map.empty, scriptsRun: Int = 0, scriptsComplexity: Long = 0, @@ -302,6 +297,16 @@ object Diff { case (r, _) => r } + def combine[K, IK, IV](first: Map[K, Map[IK, IV]], second: Map[K, Map[IK, IV]]): Map[K, Map[IK, IV]] = { + if (first.isEmpty) { + second + } else { + first ++ second.map { case (k, innerMap) => + k -> first.get(k).fold(innerMap)(_ ++ innerMap) + } + } + } + private def mkFilter() = BloomFilter.create[Array[Byte]](Funnels.byteArrayFunnel(), 10000, 0.01f) private def mkFilterForTransactions(tx: Transaction*) = diff --git a/node/src/main/scala/com/wavesplatform/state/DiffToStateApplier.scala b/node/src/main/scala/com/wavesplatform/state/DiffToStateApplier.scala index 494fb5ccd0d..295daad5cdb 100644 --- a/node/src/main/scala/com/wavesplatform/state/DiffToStateApplier.scala +++ b/node/src/main/scala/com/wavesplatform/state/DiffToStateApplier.scala @@ -7,10 +7,7 @@ import com.wavesplatform.transaction.Asset.Waves import scala.collection.immutable.VectorMap -/** - * A set of functions that apply diff - * to the blockchain and return new - * state values (only changed ones) +/** A set of functions that apply diff to the blockchain and return new state values (only changed ones) */ object DiffToStateApplier { case class PortfolioUpdates( @@ -19,7 +16,7 @@ object DiffToStateApplier { ) def portfolios(blockchain: Blockchain, diff: Diff): PortfolioUpdates = { - val balances = Map.newBuilder[Address, Map[Asset, Long]] + val balances = VectorMap.newBuilder[Address, Map[Asset, Long]] val leases = Map.newBuilder[Address, LeaseBalance] for ((address, portfolioDiff) <- diff.portfolios) { diff --git a/node/src/main/scala/com/wavesplatform/state/InvokeScriptResult.scala b/node/src/main/scala/com/wavesplatform/state/InvokeScriptResult.scala index 7aaa5d62c82..bba89f815a5 100644 --- a/node/src/main/scala/com/wavesplatform/state/InvokeScriptResult.scala +++ b/node/src/main/scala/com/wavesplatform/state/InvokeScriptResult.scala @@ -42,7 +42,7 @@ object InvokeScriptResult { type Issue = com.wavesplatform.lang.v1.traits.domain.Issue type Reissue = com.wavesplatform.lang.v1.traits.domain.Reissue type Burn = com.wavesplatform.lang.v1.traits.domain.Burn - type DataEntry = com.wavesplatform.state.DataEntry[_] + type DataEntry = com.wavesplatform.state.DataEntry[?] val empty = InvokeScriptResult() diff --git a/node/src/main/scala/com/wavesplatform/state/NgState.scala b/node/src/main/scala/com/wavesplatform/state/NgState.scala index 0e8baa84c4a..038ade906d9 100644 --- a/node/src/main/scala/com/wavesplatform/state/NgState.scala +++ b/node/src/main/scala/com/wavesplatform/state/NgState.scala @@ -1,8 +1,8 @@ package com.wavesplatform.state -import java.util.concurrent.TimeUnit - import com.google.common.cache.CacheBuilder + +import java.util.concurrent.TimeUnit import com.wavesplatform.block import com.wavesplatform.block.Block.BlockId import com.wavesplatform.block.{Block, MicroBlock} @@ -63,7 +63,7 @@ case class NgState( .toList .foldLeft[Either[String, Diff]](Right(diff)) { case (Right(d1), d2) => d1.combineF(d2) - case (r, _) => r + case (r, _) => r } def microBlockIds: Seq[BlockId] = microBlocks.map(_.totalBlockId) @@ -74,7 +74,8 @@ case class NgState( (baseBlockDiff, baseBlockCarry, baseBlockTotalFee) else internalCaches.blockDiffCache.get( - totalResBlockRef, { () => + totalResBlockRef, + { () => microBlocks.find(_.idEquals(totalResBlockRef)) match { case Some(MicroBlockInfo(blockId, current)) => val (prevDiff, prevCarry, prevTotalFee) = this.diffFor(current.reference) @@ -113,10 +114,9 @@ case class NgState( } def totalDiffOf(id: BlockId): Option[(Block, Diff, Long, Long, DiscardedMicroBlocks)] = - forgeBlock(id).map { - case (block, discarded) => - val (diff, carry, totalFee) = this.diffFor(id) - (block, diff, carry, totalFee, discarded) + forgeBlock(id).map { case (block, discarded) => + val (diff, carry, totalFee) = this.diffFor(id) + (block, diff, carry, totalFee, discarded) } def bestLiquidDiffAndFees: (Diff, Long, Long) = diffFor(microBlocks.headOption.fold(base.id())(_.totalBlockId)) @@ -177,14 +177,20 @@ case class NgState( private[this] def forgeBlock(blockId: BlockId): Option[(Block, DiscardedMicroBlocks)] = internalCaches.forgedBlockCache.get( - blockId, { () => + blockId, + { () => val microBlocksAsc = microBlocks.reverse if (base.id() == blockId) { - Some((base, microBlocksAsc.toVector.map { mb => - val diff = microDiffs(mb.totalBlockId).diff - (mb.microBlock, diff) - })) + Some( + ( + base, + microBlocksAsc.toVector.map { mb => + val diff = microDiffs(mb.totalBlockId).diff + (mb.microBlock, diff) + } + ) + ) } else if (!microBlocksAsc.exists(_.idEquals(blockId))) None else { val (accumulatedTxs, maybeFound) = microBlocksAsc.foldLeft((Vector.empty[Transaction], Option.empty[(ByteStr, DiscardedMicroBlocks)])) { @@ -200,9 +206,8 @@ case class NgState( (accumulated ++ mb.transactionData, None) } - maybeFound.map { - case (sig, discarded) => - (Block.create(base, base.transactionData ++ accumulatedTxs, sig), discarded) + maybeFound.map { case (sig, discarded) => + (Block.create(base, base.transactionData ++ accumulatedTxs, sig), discarded) } } } diff --git a/node/src/main/scala/com/wavesplatform/state/ParSignatureChecker.scala b/node/src/main/scala/com/wavesplatform/state/ParSignatureChecker.scala new file mode 100644 index 00000000000..95ce3a6345f --- /dev/null +++ b/node/src/main/scala/com/wavesplatform/state/ParSignatureChecker.scala @@ -0,0 +1,50 @@ +package com.wavesplatform.state + +import cats.syntax.parallel.* +import com.wavesplatform.block.Block +import com.wavesplatform.transaction.{ProvenTransaction, Transaction} +import com.wavesplatform.utils.Schedulers +import monix.eval.Task +import monix.execution.schedulers.SchedulerService + +object ParSignatureChecker { + implicit val sigverify: SchedulerService = Schedulers.fixedPool(4, "sigverify") + + def checkTxSignatures(txs: Seq[Transaction], rideV6Activated: Boolean): Unit = + txs + .parUnorderedTraverse { + case tx: ProvenTransaction => + Task { + if (rideV6Activated) { + tx.firstProofIsValidSignatureAfterV6 + } else { + tx.firstProofIsValidSignatureBeforeV6 + } + }.void + case _ => Task.unit + } + .executeOn(sigverify) + .runAsyncAndForget + + def checkBlockAndTxSignatures(block: Block, rideV6Activated: Boolean): Unit = { + val verifiedObjects: Seq[Any] = (block +: block.transactionData) + verifiedObjects + .parTraverse { + case tx: ProvenTransaction => + Task { + if (rideV6Activated) { + tx.firstProofIsValidSignatureAfterV6 + } else { + tx.firstProofIsValidSignatureBeforeV6 + } + }.void + case b: Block => Task(b.signatureValid()).void + case _ => Task.unit + } + .executeOn(sigverify) + .runAsyncAndForget + } + + def checkBlockSignature(block: Block): Unit = + Task(block.signatureValid()).executeOn(sigverify).runAsyncAndForget +} diff --git a/node/src/main/scala/com/wavesplatform/state/appender/BlockAppender.scala b/node/src/main/scala/com/wavesplatform/state/appender/BlockAppender.scala index e16a411d438..cefe485684c 100644 --- a/node/src/main/scala/com/wavesplatform/state/appender/BlockAppender.scala +++ b/node/src/main/scala/com/wavesplatform/state/appender/BlockAppender.scala @@ -12,7 +12,7 @@ import com.wavesplatform.state.Blockchain import com.wavesplatform.transaction.BlockchainUpdater import com.wavesplatform.transaction.TxValidationError.{BlockAppendError, GenericError, InvalidSignature} import com.wavesplatform.utils.{ScorexLogging, Time} -import com.wavesplatform.utx.UtxPoolImpl +import com.wavesplatform.utx.UtxForAppender import io.netty.channel.Channel import io.netty.channel.group.ChannelGroup import kamon.Kamon @@ -24,15 +24,16 @@ object BlockAppender extends ScorexLogging { def apply( blockchainUpdater: BlockchainUpdater & Blockchain, time: Time, - utxStorage: UtxPoolImpl, + utxStorage: UtxForAppender, pos: PoSSelector, scheduler: Scheduler, - verify: Boolean = true + verify: Boolean = true, + txSignParCheck: Boolean = true )(newBlock: Block): Task[Either[ValidationError, Option[BigInt]]] = Task { - if (blockchainUpdater.isLastBlockId(newBlock.header.reference)) - appendKeyBlock(blockchainUpdater, utxStorage, pos, time, verify)(newBlock).map(_ => Some(blockchainUpdater.score)) - else if (blockchainUpdater.contains(newBlock.id()) || blockchainUpdater.isLastBlockId(newBlock.id())) + if (blockchainUpdater.isLastBlockId(newBlock.header.reference)) { + appendKeyBlock(blockchainUpdater, utxStorage, pos, time, verify, txSignParCheck)(newBlock).map(_ => Some(blockchainUpdater.score)) + } else if (blockchainUpdater.contains(newBlock.id()) || blockchainUpdater.isLastBlockId(newBlock.id())) Right(None) else Left(BlockAppendError("Block is not a child of the last block", newBlock)) @@ -41,7 +42,7 @@ object BlockAppender extends ScorexLogging { def apply( blockchainUpdater: BlockchainUpdater & Blockchain, time: Time, - utxStorage: UtxPoolImpl, + utxStorage: UtxForAppender, pos: PoSSelector, allChannels: ChannelGroup, peerDatabase: PeerDatabase, diff --git a/node/src/main/scala/com/wavesplatform/state/appender/ExtensionAppender.scala b/node/src/main/scala/com/wavesplatform/state/appender/ExtensionAppender.scala index fa1d2c2abc5..ac1ba404696 100644 --- a/node/src/main/scala/com/wavesplatform/state/appender/ExtensionAppender.scala +++ b/node/src/main/scala/com/wavesplatform/state/appender/ExtensionAppender.scala @@ -2,6 +2,7 @@ package com.wavesplatform.state.appender import com.wavesplatform.common.utils.EitherExt2 import com.wavesplatform.consensus.PoSSelector +import com.wavesplatform.features.BlockchainFeatures import com.wavesplatform.lang.ValidationError import com.wavesplatform.metrics.{BlockStats, Metrics} import com.wavesplatform.network.{ExtensionBlocks, InvalidBlockStorage, PeerDatabase, formatBlocks, id} @@ -32,7 +33,7 @@ object ExtensionAppender extends ScorexLogging { if (extension.remoteScore <= blockchainUpdater.score) { log.trace(s"Ignoring extension $extension because declared remote was not greater than local score ${blockchainUpdater.score}") Right(None) - } else + } else { extension.blocks .collectFirst { case b if !b.signatureValid() => GenericError(s"Block $b has invalid signature") } .toLeft(extension) @@ -53,10 +54,15 @@ object ExtensionAppender extends ScorexLogging { } yield (commonBlockHeight, droppedBlocks) droppedBlocksEi.flatMap { case (commonBlockHeight, droppedBlocks) => + newBlocks.zipWithIndex.foreach { case (block, idx) => + val rideV6Activated = blockchainUpdater.isFeatureActivated(BlockchainFeatures.RideV6, commonBlockHeight + idx + 1) + ParSignatureChecker.checkTxSignatures(block.transactionData, rideV6Activated) + } + val forkApplicationResultEi = { newBlocks.view .map { b => - b -> appendExtensionBlock(blockchainUpdater, pos, time, verify = true)(b) + b -> appendExtensionBlock(blockchainUpdater, pos, time, verify = true, txSignParCheck = false)(b) .map { _.foreach(bh => BlockStats.applied(b, BlockStats.Source.Ext, bh)) } @@ -112,6 +118,7 @@ object ExtensionAppender extends ScorexLogging { Right(None) } } + } log.debug(s"${id(ch)} Attempting to append extension ${formatBlocks(extensionBlocks.blocks)}") Task(appendExtension(extensionBlocks)).executeOn(scheduler).map { diff --git a/node/src/main/scala/com/wavesplatform/state/appender/package.scala b/node/src/main/scala/com/wavesplatform/state/appender/package.scala index bfdffe201ce..aa7604b576c 100644 --- a/node/src/main/scala/com/wavesplatform/state/appender/package.scala +++ b/node/src/main/scala/com/wavesplatform/state/appender/package.scala @@ -11,7 +11,7 @@ import com.wavesplatform.mining.Miner import com.wavesplatform.transaction.* import com.wavesplatform.transaction.TxValidationError.{BlockAppendError, BlockFromFuture, GenericError} import com.wavesplatform.utils.Time -import com.wavesplatform.utx.UtxPoolImpl +import com.wavesplatform.utx.UtxForAppender import kamon.Kamon package object appender { @@ -26,34 +26,34 @@ package object appender { private[appender] def appendKeyBlock( blockchainUpdater: BlockchainUpdater & Blockchain, - utx: UtxPoolImpl, + utx: UtxForAppender, pos: PoSSelector, time: Time, - verify: Boolean + verify: Boolean, + txSignParCheck: Boolean )(block: Block): Either[ValidationError, Option[Int]] = for { hitSource <- if (verify) validateBlock(blockchainUpdater, pos, time)(block) else pos.validateGenerationSignature(block) - newHeight <- utx.priorityPool.lockedWrite { + newHeight <- metrics.appendBlock - .measureSuccessful(blockchainUpdater.processBlock(block, hitSource, verify)) + .measureSuccessful(blockchainUpdater.processBlock(block, hitSource, verify, txSignParCheck)) .map { discardedDiffs => - utx.removeAll(block.transactionData) utx.setPriorityDiffs(discardedDiffs) - utx.scheduleCleanup() Some(blockchainUpdater.height) } - } + } yield newHeight private[appender] def appendExtensionBlock( blockchainUpdater: BlockchainUpdater & Blockchain, pos: PoSSelector, time: Time, - verify: Boolean + verify: Boolean, + txSignParCheck: Boolean )(block: Block): Either[ValidationError, Option[Int]] = for { hitSource <- if (verify) validateBlock(blockchainUpdater, pos, time)(block) else pos.validateGenerationSignature(block) - _ <- metrics.appendBlock.measureSuccessful(blockchainUpdater.processBlock(block, hitSource, verify)) + _ <- metrics.appendBlock.measureSuccessful(blockchainUpdater.processBlock(block, hitSource, verify, txSignParCheck)) } yield Some(blockchainUpdater.height) private def validateBlock(blockchainUpdater: Blockchain, pos: PoSSelector, time: Time)(block: Block) = @@ -88,7 +88,9 @@ package object appender { _ <- Either.cond(blockTime - currentTs < MaxTimeDrift, (), BlockFromFuture(blockTime)) _ <- pos.validateBaseTarget(height, block, parent, grandParent) hitSource <- pos.validateGenerationSignature(block) - _ <- pos.validateBlockDelay(height, block.header, parent, effectiveBalance).orElse(checkExceptions(height, block)) + _ <- pos + .validateBlockDelay(height, block.header, parent, effectiveBalance) + .orElse(checkExceptions(height, block)) } yield hitSource } .left diff --git a/node/src/main/scala/com/wavesplatform/state/diffs/AssetTransactionsDiff.scala b/node/src/main/scala/com/wavesplatform/state/diffs/AssetTransactionsDiff.scala index 8f59bfc57a4..abfd88df278 100644 --- a/node/src/main/scala/com/wavesplatform/state/diffs/AssetTransactionsDiff.scala +++ b/node/src/main/scala/com/wavesplatform/state/diffs/AssetTransactionsDiff.scala @@ -33,12 +33,12 @@ object AssetTransactionsDiff { // First 20 bytes of id should be unique def requireUnique(): Boolean = blockchain.resolveERC20Address(ERC20Address(tx.asset)).isEmpty - val staticInfo = AssetStaticInfo(TransactionId @@ tx.id(), tx.sender, tx.decimals.value, blockchain.isNFT(tx)) + val asset = IssuedAsset(tx.id()) + + val staticInfo = AssetStaticInfo(asset.id, TransactionId @@ tx.id(), tx.sender, tx.decimals.value, blockchain.isNFT(tx)) val volumeInfo = AssetVolumeInfo(tx.reissuable, BigInt(tx.quantity.value)) val info = AssetInfo(tx.name, tx.description, Height @@ blockchain.height) - val asset = IssuedAsset(tx.id()) - for { _ <- Either.cond(requireValidUtf(), (), GenericError("Valid UTF-8 strings required")) _ <- Either.cond(requireUnique(), (), GenericError(s"Asset ${tx.asset} is already issued")) diff --git a/node/src/main/scala/com/wavesplatform/state/diffs/BalanceDiffValidation.scala b/node/src/main/scala/com/wavesplatform/state/diffs/BalanceDiffValidation.scala index fbc801f97d2..25f7e1ee036 100644 --- a/node/src/main/scala/com/wavesplatform/state/diffs/BalanceDiffValidation.scala +++ b/node/src/main/scala/com/wavesplatform/state/diffs/BalanceDiffValidation.scala @@ -5,7 +5,6 @@ import com.wavesplatform.account.Address import com.wavesplatform.common.state.ByteStr import com.wavesplatform.state import com.wavesplatform.state.{Blockchain, Diff, LeaseBalance, Portfolio} -import com.wavesplatform.transaction.Asset.Waves import com.wavesplatform.transaction.TxValidationError.AccountBalanceError import scala.util.{Left, Right} @@ -17,9 +16,12 @@ object BalanceDiffValidation { } def apply(b: Blockchain)(d: Diff): Either[AccountBalanceError, Diff] = { - def check(acc: Address, portfolio: Portfolio): Either[(Address, String), Unit] = { + def check( + acc: Address, + portfolio: Portfolio + ): Either[(Address, String), Unit] = { val balance = portfolio.balance - val oldWaves = b.balance(acc, Waves) + val oldWaves = b.balance(acc) val oldLease = b.leaseBalance(acc) def negativeBalanceCheck(newLease: LeaseBalance, newWaves: Long): Either[(Address, String), Unit] = @@ -68,7 +70,11 @@ object BalanceDiffValidation { } } - private def negativeAssetsInfo(b: Blockchain, acc: Address, diff: Portfolio): Map[ByteStr, Long] = + private def negativeAssetsInfo( + b: Blockchain, + acc: Address, + diff: Portfolio + ): Map[ByteStr, Long] = diff.assets .map { case (aid, balanceChange) => aid.id -> (b.balance(acc, aid) + balanceChange) } .filter(_._2 < 0) diff --git a/node/src/main/scala/com/wavesplatform/state/diffs/BlockDiffer.scala b/node/src/main/scala/com/wavesplatform/state/diffs/BlockDiffer.scala index 74367395870..642008ad3c2 100644 --- a/node/src/main/scala/com/wavesplatform/state/diffs/BlockDiffer.scala +++ b/node/src/main/scala/com/wavesplatform/state/diffs/BlockDiffer.scala @@ -2,6 +2,7 @@ package com.wavesplatform.state.diffs import cats.implicits.{toBifunctorOps, toFoldableOps} import cats.syntax.either.catsSyntaxEitherId +import com.wavesplatform.account.Address import com.wavesplatform.block.{Block, MicroBlock} import com.wavesplatform.common.state.ByteStr import com.wavesplatform.features.BlockchainFeatures @@ -12,8 +13,13 @@ import com.wavesplatform.state.patch.* import com.wavesplatform.state.reader.CompositeBlockchain import com.wavesplatform.transaction.Asset.{IssuedAsset, Waves} import com.wavesplatform.transaction.TxValidationError.* +import com.wavesplatform.transaction.assets.exchange.ExchangeTransaction +import com.wavesplatform.transaction.lease.LeaseTransaction +import com.wavesplatform.transaction.smart.InvokeScriptTransaction import com.wavesplatform.transaction.smart.script.trace.TracedResult -import com.wavesplatform.transaction.{Asset, Transaction} +import com.wavesplatform.transaction.transfer.{MassTransferTransaction, TransferTransaction} +import com.wavesplatform.transaction.transfer.MassTransferTransaction.ParsedTransfer +import com.wavesplatform.transaction.{Asset, Authorized, GenesisTransaction, PaymentTransaction, Transaction} object BlockDiffer { final case class DetailedDiff(parentDiff: Diff, transactionDiffs: List[Diff]) @@ -31,9 +37,12 @@ object BlockDiffer { block: Block, constraint: MiningConstraint, hitSource: ByteStr, - verify: Boolean = true + loadCacheData: (Set[Address], Set[ByteStr]) => Unit = (_, _) => (), + verify: Boolean = true, + enableExecutionLog: Boolean = false, + txSignParCheck: Boolean = true ): Either[ValidationError, Result] = - fromBlockTraced(blockchain, maybePrevBlock, block, constraint, hitSource, verify).resultE + fromBlockTraced(blockchain, maybePrevBlock, block, constraint, hitSource, loadCacheData, verify, enableExecutionLog, txSignParCheck).resultE def fromBlockTraced( blockchain: Blockchain, @@ -41,7 +50,10 @@ object BlockDiffer { block: Block, constraint: MiningConstraint, hitSource: ByteStr, - verify: Boolean + loadCacheData: (Set[Address], Set[ByteStr]) => Unit, + verify: Boolean, + enableExecutionLog: Boolean, + txSignParCheck: Boolean ): TracedResult[ValidationError, Result] = { val stateHeight = blockchain.height @@ -95,7 +107,10 @@ object BlockDiffer { resultDiff, stateHeight >= ngHeight, block.transactionData, - verify + loadCacheData, + verify = verify, + enableExecutionLog = enableExecutionLog, + txSignParCheck = txSignParCheck ) } yield r } @@ -105,16 +120,21 @@ object BlockDiffer { prevBlockTimestamp: Option[Long], micro: MicroBlock, constraint: MiningConstraint, - verify: Boolean = true + loadCacheData: (Set[Address], Set[ByteStr]) => Unit = (_, _) => (), + verify: Boolean = true, + enableExecutionLog: Boolean = false ): Either[ValidationError, Result] = - fromMicroBlockTraced(blockchain, prevBlockTimestamp, micro, constraint, verify).resultE + fromMicroBlockTraced(blockchain, prevBlockTimestamp, micro, constraint, loadCacheData, verify, enableExecutionLog).resultE def fromMicroBlockTraced( blockchain: Blockchain, prevBlockTimestamp: Option[Long], micro: MicroBlock, constraint: MiningConstraint, - verify: Boolean = true + loadCacheData: (Set[Address], Set[ByteStr]) => Unit = (_, _) => (), + verify: Boolean = true, + enableExecutionLog: Boolean = false, + txSignParCheck: Boolean = true ): TracedResult[ValidationError, Result] = { for { // microblocks are processed within block which is next after 40-only-block which goes on top of activated height @@ -133,7 +153,10 @@ object BlockDiffer { Diff.empty, hasNg = true, micro.transactionData, - verify = verify + loadCacheData, + verify = verify, + enableExecutionLog = enableExecutionLog, + txSignParCheck = txSignParCheck ) } yield r } @@ -152,7 +175,10 @@ object BlockDiffer { initDiff: Diff, hasNg: Boolean, txs: Seq[Transaction], - verify: Boolean + loadCacheData: (Set[Address], Set[ByteStr]) => Unit, + verify: Boolean, + enableExecutionLog: Boolean, + txSignParCheck: Boolean ): TracedResult[ValidationError, Result] = { def updateConstraint(constraint: MiningConstraint, blockchain: Blockchain, tx: Transaction, diff: Diff): MiningConstraint = constraint.put(blockchain, tx, diff) @@ -160,10 +186,16 @@ object BlockDiffer { val currentBlockHeight = blockchain.height val timestamp = blockchain.lastBlockTimestamp.get val blockGenerator = blockchain.lastBlockHeader.get.header.generator.toAddress + val rideV6Activated = blockchain.isFeatureActivated(BlockchainFeatures.RideV6) - val txDiffer = TransactionDiffer(prevBlockTimestamp, timestamp, verify) _ + val txDiffer = TransactionDiffer(prevBlockTimestamp, timestamp, verify, enableExecutionLog = enableExecutionLog) _ val hasSponsorship = currentBlockHeight >= Sponsorship.sponsoredFeesSwitchHeight(blockchain) + if (verify && txSignParCheck) + ParSignatureChecker.checkTxSignatures(txs, rideV6Activated) + + prepareCaches(blockGenerator, txs, loadCacheData) + txs .foldLeft(TracedResult(Result(initDiff, 0L, 0L, initConstraint, DetailedDiff(initDiff, Nil)).asRight[ValidationError])) { case (acc @ TracedResult(Left(_), _, _), _) => acc @@ -213,4 +245,38 @@ object BlockDiffer { .fold(prevDiff.asRight[String])(prevDiff.combineF) } } + + private def prepareCaches(blockGenerator: Address, txs: Seq[Transaction], loadCacheData: (Set[Address], Set[ByteStr]) => Unit): Unit = { + val addresses = Set.newBuilder[Address].addOne(blockGenerator) + val orders = Set.newBuilder[ByteStr] + + txs.foreach { + case tx: ExchangeTransaction => + addresses.addAll(Seq(tx.sender.toAddress, tx.buyOrder.senderAddress, tx.sellOrder.senderAddress)) + orders.addOne(tx.buyOrder.id()).addOne(tx.sellOrder.id()) + case tx: GenesisTransaction => addresses.addOne(tx.recipient) + case tx: InvokeScriptTransaction => + addresses.addAll(Seq(tx.senderAddress) ++ (tx.dApp match { + case addr: Address => Some(addr) + case _ => None + })) + case tx: LeaseTransaction => + addresses.addAll(Seq(tx.sender.toAddress) ++ (tx.recipient match { + case addr: Address => Some(addr) + case _ => None + })) + case tx: MassTransferTransaction => + addresses.addAll(Seq(tx.sender.toAddress) ++ tx.transfers.collect { case ParsedTransfer(addr: Address, _) => addr }) + case tx: PaymentTransaction => addresses.addAll(Seq(tx.sender.toAddress, tx.recipient)) + case tx: TransferTransaction => + addresses.addAll(Seq(tx.sender.toAddress) ++ (tx.recipient match { + case addr: Address => Some(addr) + case _ => None + })) + case tx: Authorized => addresses.addOne(tx.sender.toAddress) + case _ => () + } + + loadCacheData(addresses.result(), orders.result()) + } } diff --git a/node/src/main/scala/com/wavesplatform/state/diffs/CommonValidation.scala b/node/src/main/scala/com/wavesplatform/state/diffs/CommonValidation.scala index 37deb6c64f3..205e19c787a 100644 --- a/node/src/main/scala/com/wavesplatform/state/diffs/CommonValidation.scala +++ b/node/src/main/scala/com/wavesplatform/state/diffs/CommonValidation.scala @@ -2,6 +2,7 @@ package com.wavesplatform.state.diffs import cats.implicits.toBifunctorOps import com.wavesplatform.account.{Address, AddressScheme} +import com.wavesplatform.database.patch.DisableHijackedAliases import com.wavesplatform.features.OverdraftValidationProvider.* import com.wavesplatform.features.{BlockchainFeature, BlockchainFeatures, RideVersionProvider} import com.wavesplatform.lang.ValidationError @@ -108,7 +109,8 @@ object CommonValidation { } else Right(tx) def disallowDuplicateIds[T <: Transaction](blockchain: Blockchain, tx: T): Either[ValidationError, T] = tx match { - case _: PaymentTransaction => Right(tx) + case _: PaymentTransaction => Right(tx) + case _: CreateAliasTransaction if blockchain.height < DisableHijackedAliases.height => Right(tx) case _ => val id = tx.id() Either.cond(!blockchain.containsTransaction(tx), tx, AlreadyInTheState(id, blockchain.transactionMeta(id).get.height)) diff --git a/node/src/main/scala/com/wavesplatform/state/diffs/DataTransactionDiff.scala b/node/src/main/scala/com/wavesplatform/state/diffs/DataTransactionDiff.scala index 14c6da56290..36d637139fb 100644 --- a/node/src/main/scala/com/wavesplatform/state/diffs/DataTransactionDiff.scala +++ b/node/src/main/scala/com/wavesplatform/state/diffs/DataTransactionDiff.scala @@ -14,7 +14,7 @@ object DataTransactionDiff { _ <- DataTxValidator.payloadSizeValidation(blockchain, tx).toEither.leftMap(_.head) } yield Diff( portfolios = Map(sender -> Portfolio(-tx.fee.value)), - accountData = Map(sender -> AccountDataInfo(tx.data.map(item => item.key -> item).toMap)), + accountData = Map(sender -> tx.data.map(item => item.key -> item).toMap), scriptsRun = DiffsCommon.countScriptRuns(blockchain, tx) ) } diff --git a/node/src/main/scala/com/wavesplatform/state/diffs/EthereumTransactionDiff.scala b/node/src/main/scala/com/wavesplatform/state/diffs/EthereumTransactionDiff.scala index 213f9267f70..ee61c91ba07 100644 --- a/node/src/main/scala/com/wavesplatform/state/diffs/EthereumTransactionDiff.scala +++ b/node/src/main/scala/com/wavesplatform/state/diffs/EthereumTransactionDiff.scala @@ -50,14 +50,16 @@ object EthereumTransactionDiff { resultEi.getOrElse(Diff.empty) } - def apply(blockchain: Blockchain, currentBlockTs: Long, limitedExecution: Boolean)(e: EthereumTransaction): TracedResult[ValidationError, Diff] = { + def apply(blockchain: Blockchain, currentBlockTs: Long, limitedExecution: Boolean, enableExecutionLog: Boolean)( + e: EthereumTransaction + ): TracedResult[ValidationError, Diff] = { val baseDiff = e.payload match { case et: EthereumTransaction.Transfer => for { _ <- checkLeadingZeros(e, blockchain) asset <- TracedResult(et.tryResolveAsset(blockchain)) transfer <- TracedResult(et.toTransferLike(e, blockchain)) - assetDiff <- TransactionDiffer.assetsVerifierDiff(blockchain, transfer, verify = true, Diff(), Int.MaxValue) + assetDiff <- TransactionDiffer.assetsVerifierDiff(blockchain, transfer, verify = true, Diff(), Int.MaxValue, enableExecutionLog) diff <- TransferDiff(blockchain)(e.senderAddress(), et.recipient, et.amount, asset, e.fee, e.feeAssetId) result <- assetDiff.combineE(diff) } yield result @@ -66,8 +68,8 @@ object EthereumTransactionDiff { for { _ <- checkLeadingZeros(e, blockchain) invocation <- TracedResult(ei.toInvokeScriptLike(e, blockchain)) - diff <- InvokeScriptTransactionDiff(blockchain, currentBlockTs, limitedExecution)(invocation) - result <- TransactionDiffer.assetsVerifierDiff(blockchain, invocation, verify = true, diff, Int.MaxValue) + diff <- InvokeScriptTransactionDiff(blockchain, currentBlockTs, limitedExecution, enableExecutionLog)(invocation) + result <- TransactionDiffer.assetsVerifierDiff(blockchain, invocation, verify = true, diff, Int.MaxValue, enableExecutionLog) } yield result } diff --git a/node/src/main/scala/com/wavesplatform/state/diffs/TransactionDiffer.scala b/node/src/main/scala/com/wavesplatform/state/diffs/TransactionDiffer.scala index 8ac26fa5b20..fdafc68e27f 100644 --- a/node/src/main/scala/com/wavesplatform/state/diffs/TransactionDiffer.scala +++ b/node/src/main/scala/com/wavesplatform/state/diffs/TransactionDiffer.scala @@ -27,32 +27,41 @@ import com.wavesplatform.transaction.transfer.{MassTransferTransaction, Transfer import play.api.libs.json.Json object TransactionDiffer { - def apply(prevBlockTs: Option[Long], currentBlockTs: Long, verify: Boolean = true)( + def apply(prevBlockTs: Option[Long], currentBlockTs: Long, verify: Boolean = true, enableExecutionLog: Boolean = false)( blockchain: Blockchain, tx: Transaction ): TracedResult[ValidationError, Diff] = - validate(prevBlockTs, currentBlockTs, verify, limitedExecution = false)(blockchain, tx) match { + validate(prevBlockTs, currentBlockTs, verify, limitedExecution = false, enableExecutionLog = enableExecutionLog)(blockchain, tx) match { case isFailedTransaction((complexity, scriptResult, trace, attributes)) if acceptFailed(blockchain) => TracedResult(failedTransactionDiff(blockchain, tx, complexity, scriptResult), trace, attributes) case result => result } - def forceValidate(prevBlockTs: Option[Long], currentBlockTs: Long)( + def forceValidate(prevBlockTs: Option[Long], currentBlockTs: Long, enableExecutionLog: Boolean = false)( blockchain: Blockchain, tx: Transaction ): TracedResult[ValidationError, Diff] = - validate(prevBlockTs, currentBlockTs, verify = true, limitedExecution = false)(blockchain, tx) + validate(prevBlockTs, currentBlockTs, verify = true, limitedExecution = false, enableExecutionLog = enableExecutionLog)(blockchain, tx) - def limitedExecution(prevBlockTimestamp: Option[Long], currentBlockTimestamp: Long, unlimited: Boolean, verify: Boolean = true)( + def limitedExecution( + prevBlockTimestamp: Option[Long], + currentBlockTimestamp: Long, + unlimited: Boolean, + verify: Boolean = true, + enableExecutionLog: Boolean = false + )( blockchain: Blockchain, tx: Transaction ): TracedResult[ValidationError, Diff] = { val limitedExecution = if (unlimited) false else transactionMayFail(tx) && acceptFailed(blockchain) - validate(prevBlockTimestamp, currentBlockTimestamp, verify = verify, limitedExecution)( - blockchain, - tx - ) + validate( + prevBlockTimestamp, + currentBlockTimestamp, + verify = verify, + limitedExecution = limitedExecution, + enableExecutionLog = enableExecutionLog + )(blockchain, tx) } /** Validates transaction. @@ -61,7 +70,13 @@ object TransactionDiffer { * @param verify * validate common checks, proofs and asset scripts execution. If `skipFailing` is true asset scripts will not be executed */ - private def validate(prevBlockTimestamp: Option[Long], currentBlockTimestamp: Long, verify: Boolean, limitedExecution: Boolean)( + private def validate( + prevBlockTimestamp: Option[Long], + currentBlockTimestamp: Long, + verify: Boolean, + limitedExecution: Boolean, + enableExecutionLog: Boolean + )( blockchain: Blockchain, tx: Transaction ): TracedResult[ValidationError, Diff] = { @@ -69,8 +84,8 @@ object TransactionDiffer { val result = for { _ <- validateCommon(blockchain, tx, prevBlockTimestamp, currentBlockTimestamp, verify).traced _ <- validateFunds(blockchain, tx).traced - verifierDiff <- if (runVerifiers) verifierDiff(blockchain, tx) else Right(Diff.empty).traced - transactionDiff <- transactionDiff(blockchain, tx, verifierDiff, currentBlockTimestamp, limitedExecution) + verifierDiff <- if (runVerifiers) verifierDiff(blockchain, tx, enableExecutionLog) else Right(Diff.empty).traced + transactionDiff <- transactionDiff(blockchain, tx, verifierDiff, currentBlockTimestamp, limitedExecution, enableExecutionLog) remainingComplexity = if (limitedExecution) ContractLimits.FailFreeInvokeComplexity - transactionDiff.scriptsComplexity.toInt else Int.MaxValue _ <- validateBalance(blockchain, tx.tpe, transactionDiff).traced.leftMap { err => def acceptFailedByBalance(): Boolean = @@ -81,7 +96,7 @@ object TransactionDiffer { else err } - diff <- assetsVerifierDiff(blockchain, tx, runVerifiers, transactionDiff, remainingComplexity) + diff <- assetsVerifierDiff(blockchain, tx, runVerifiers, transactionDiff, remainingComplexity, enableExecutionLog) } yield diff result @@ -143,18 +158,19 @@ object TransactionDiffer { } } yield () - private[this] def verifierDiff(blockchain: Blockchain, tx: Transaction): TracedResult[ValidationError, Diff] = - Verifier(blockchain)(tx).map(complexity => Diff(scriptsComplexity = complexity)) + private[this] def verifierDiff(blockchain: Blockchain, tx: Transaction, enableExecutionLog: Boolean): TracedResult[ValidationError, Diff] = + Verifier(blockchain, enableExecutionLog = enableExecutionLog)(tx).map(complexity => Diff(scriptsComplexity = complexity)) def assetsVerifierDiff( blockchain: Blockchain, tx: TransactionBase, verify: Boolean, initDiff: Diff, - remainingComplexity: Int + remainingComplexity: Int, + enableExecutionLog: Boolean ): TracedResult[ValidationError, Diff] = { val diff = if (verify) { - Verifier.assets(blockchain, remainingComplexity)(tx).leftMap { + Verifier.assets(blockchain, remainingComplexity, enableExecutionLog)(tx).leftMap { case (spentComplexity, ScriptExecutionError(error, log, Some(assetId))) if transactionMayFail(tx) && acceptFailed(blockchain) => FailedTransactionError.assetExecution(error, spentComplexity, log, assetId) case (spentComplexity, TransactionNotAllowedByScript(log, Some(assetId))) if transactionMayFail(tx) && acceptFailed(blockchain) => @@ -177,14 +193,15 @@ object TransactionDiffer { tx: Transaction, initDiff: Diff, currentBlockTs: TxTimestamp, - limitedExecution: Boolean + limitedExecution: Boolean, + enableExecutionLog: Boolean ): TracedResult[ValidationError, Diff] = stats.transactionDiffValidation .measureForType(tx.tpe) { tx match { case gtx: GenesisTransaction => GenesisTransactionDiff(blockchain.height)(gtx).traced case ptx: PaymentTransaction => PaymentTransactionDiff(blockchain)(ptx).traced - case ci: InvokeTransaction => InvokeScriptTransactionDiff(blockchain, currentBlockTs, limitedExecution)(ci) + case ci: InvokeTransaction => InvokeScriptTransactionDiff(blockchain, currentBlockTs, limitedExecution, enableExecutionLog)(ci) case etx: ExchangeTransaction => ExchangeTransactionDiff(blockchain)(etx).traced case itx: IssueTransaction => AssetTransactionsDiff.issue(blockchain)(itx).traced case rtx: ReissueTransaction => AssetTransactionsDiff.reissue(blockchain, currentBlockTs)(rtx).traced @@ -199,7 +216,7 @@ object TransactionDiffer { case sstx: SetScriptTransaction => SetScriptTransactionDiff(blockchain)(sstx).traced case sstx: SetAssetScriptTransaction => AssetTransactionsDiff.setAssetScript(blockchain)(sstx).traced case stx: SponsorFeeTransaction => AssetTransactionsDiff.sponsor(blockchain)(stx).traced - case et: EthereumTransaction => EthereumTransactionDiff(blockchain, currentBlockTs, limitedExecution)(et) + case et: EthereumTransaction => EthereumTransactionDiff(blockchain, currentBlockTs, limitedExecution, enableExecutionLog)(et) case _ => UnsupportedTransactionType.asLeft.traced } } @@ -297,12 +314,15 @@ object TransactionDiffer { case e: EthereumTransaction => EthereumTransactionDiff.meta(blockchain)(e) case _ => Diff.empty } - Diff.withTransactions( - Vector(NewTransactionInfo(tx, affectedAddresses, applied = false, spentComplexity)), - portfolios = portfolios, - scriptResults = scriptResult.fold(Map.empty[ByteStr, InvokeScriptResult])(sr => Map(tx.id() -> sr)), - scriptsComplexity = spentComplexity - ).combineF(ethereumMetaDiff).getOrElse(Diff.empty) + Diff + .withTransactions( + Vector(NewTransactionInfo(tx, affectedAddresses, applied = false, spentComplexity)), + portfolios = portfolios, + scriptResults = scriptResult.fold(Map.empty[ByteStr, InvokeScriptResult])(sr => Map(tx.id() -> sr)), + scriptsComplexity = spentComplexity + ) + .combineF(ethereumMetaDiff) + .getOrElse(Diff.empty) } } diff --git a/node/src/main/scala/com/wavesplatform/state/diffs/invoke/InvokeDiffsCommon.scala b/node/src/main/scala/com/wavesplatform/state/diffs/invoke/InvokeDiffsCommon.scala index ee549287b3c..36145a9a824 100644 --- a/node/src/main/scala/com/wavesplatform/state/diffs/invoke/InvokeDiffsCommon.scala +++ b/node/src/main/scala/com/wavesplatform/state/diffs/invoke/InvokeDiffsCommon.scala @@ -169,6 +169,7 @@ object InvokeDiffsCommon { limitedExecution: Boolean, totalComplexityLimit: Int, otherIssues: Seq[Issue], + enableExecutionLog: Boolean, log: Log[Id] ): TracedResult[ValidationError, Diff] = { val verifierCount = if (blockchain.hasPaidVerifier(tx.sender.toAddress)) 1 else 0 @@ -202,7 +203,7 @@ object InvokeDiffsCommon { complexityLimit = if (limitedExecution) ContractLimits.FailFreeInvokeComplexity - storingComplexity else Int.MaxValue - compositeDiff <- foldActions(blockchain, blockTime, tx, dAppAddress, dAppPublicKey)(actions.list, paymentsAndFeeDiff, complexityLimit) + compositeDiff <- foldActions(blockchain, blockTime, tx, dAppAddress, dAppPublicKey, enableExecutionLog)(actions.list, paymentsAndFeeDiff, complexityLimit) .leftMap { case failed: FailedTransactionError => failed.addComplexity(storingComplexity).withLog(log) case other => other @@ -442,7 +443,8 @@ object InvokeDiffsCommon { blockTime: Long, tx: InvokeScriptLike, dAppAddress: Address, - pk: PublicKey + pk: PublicKey, + enableExecutionLog: Boolean )( actions: List[CallableAction], paymentsDiff: Diff, @@ -513,7 +515,8 @@ object InvokeDiffsCommon { assetVerifierDiff, script, complexity, - complexityLimit + complexityLimit, + enableExecutionLog ) } yield assetValidationDiff val errorOpt = assetValidationDiff.fold(Some(_), _ => None) @@ -528,7 +531,7 @@ object InvokeDiffsCommon { } def applyDataItem(item: DataOp): TracedResult[FailedTransactionError, Diff] = - TracedResult.wrapValue(Diff(accountData = Map(dAppAddress -> AccountDataInfo(Map(item.key -> dataItemToEntry(item)))))) + TracedResult.wrapValue(Diff(accountData = Map(dAppAddress -> Map(item.key -> dataItemToEntry(item))))) def applyIssue(itx: InvokeScriptLike, pk: PublicKey, issue: Issue): TracedResult[ValidationError, Diff] = { val asset = IssuedAsset(issue.id) @@ -547,7 +550,7 @@ object InvokeDiffsCommon { TracedResult(Left(FailOrRejectError(error))) } } else { - val staticInfo = AssetStaticInfo(TransactionId @@ itx.txId, pk, issue.decimals, blockchain.isNFT(issue)) + val staticInfo = AssetStaticInfo(asset.id, TransactionId @@ itx.txId, pk, issue.decimals, blockchain.isNFT(issue)) val volumeInfo = AssetVolumeInfo(issue.isReissuable, BigInt(issue.quantity)) val info = AssetInfo(ByteString.copyFromUtf8(issue.name), ByteString.copyFromUtf8(issue.description), Height @@ blockchain.height) Right( @@ -622,7 +625,8 @@ object InvokeDiffsCommon { result, script, complexity, - complexityLimit + complexityLimit, + enableExecutionLog ) } yield validatedResult val errorOpt = assetValidationDiff.fold(Some(_), _ => None) @@ -671,7 +675,8 @@ object InvokeDiffsCommon { nextDiff: Diff, script: Script, estimatedComplexity: Long, - complexityLimit: Int + complexityLimit: Int, + enableExecutionLog: Boolean ): Either[FailedTransactionError, Diff] = Try { val (log, evaluatedComplexity, result) = ScriptRunner( @@ -682,6 +687,7 @@ object InvokeDiffsCommon { scriptContainerAddress = if (blockchain.passCorrectAssetId) Coproduct[Environment.Tthis](Environment.AssetId(assetId.arr)) else Coproduct[Environment.Tthis](Environment.AssetId(tx.dApp.bytes)), + enableExecutionLog = enableExecutionLog, complexityLimit ) val complexity = if (blockchain.storeEvaluatedComplexity) evaluatedComplexity else estimatedComplexity diff --git a/node/src/main/scala/com/wavesplatform/state/diffs/invoke/InvokeScriptDiff.scala b/node/src/main/scala/com/wavesplatform/state/diffs/invoke/InvokeScriptDiff.scala index 26372789ab2..228d28444fa 100644 --- a/node/src/main/scala/com/wavesplatform/state/diffs/invoke/InvokeScriptDiff.scala +++ b/node/src/main/scala/com/wavesplatform/state/diffs/invoke/InvokeScriptDiff.scala @@ -48,6 +48,7 @@ object InvokeScriptDiff { blockchain: Blockchain, blockTime: Long, limitedExecution: Boolean, + enableExecutionLog: Boolean, totalComplexityLimit: Int, remainingComplexity: Int, remainingCalls: Int, @@ -156,6 +157,7 @@ object InvokeScriptDiff { script.script, isAssetScript = true, scriptContainerAddress = Coproduct[Environment.Tthis](Environment.AssetId(assetId.arr)), + enableExecutionLog = enableExecutionLog, nextRemainingComplexity ) val scriptComplexity = if (blockchain.storeEvaluatedComplexity) evaluatedComplexity else script.complexity.toInt @@ -218,6 +220,7 @@ object InvokeScriptDiff { pk, calledAddresses, limitedExecution, + enableExecutionLog, totalComplexityLimit, remainingCalls - 1, remainingActions, @@ -239,7 +242,8 @@ object InvokeScriptDiff { invocation, environment, complexityAfterPayments, - remainingComplexity + remainingComplexity, + enableExecutionLog ).map(TracedResult(_)) ) diff <- traced(environment.currentDiff.combineF(paymentsPartToResolve).leftMap(GenericError(_))) @@ -277,6 +281,7 @@ object InvokeScriptDiff { limitedExecution, totalComplexityLimit, Seq(), + enableExecutionLog, log ) ) @@ -407,7 +412,8 @@ object InvokeScriptDiff { invocation: ContractEvaluator.Invocation, environment: Environment[Id], limit: Int, - startComplexityLimit: Int + startComplexityLimit: Int, + enableExecutionLog: Boolean ): Coeval[Either[ValidationError, (ScriptResult, Log[Id])]] = { val evaluationCtx = CachedDAppCTX.get(version, blockchain).completeContext(environment) ContractEvaluator @@ -419,7 +425,8 @@ object InvokeScriptDiff { version, limit, blockchain.correctFunctionCallScope, - blockchain.newEvaluatorMode + blockchain.newEvaluatorMode, + enableExecutionLog ) .map( _.leftMap[ValidationError] { diff --git a/node/src/main/scala/com/wavesplatform/state/diffs/invoke/InvokeScriptTransactionDiff.scala b/node/src/main/scala/com/wavesplatform/state/diffs/invoke/InvokeScriptTransactionDiff.scala index 8fd0661df99..72023d68555 100644 --- a/node/src/main/scala/com/wavesplatform/state/diffs/invoke/InvokeScriptTransactionDiff.scala +++ b/node/src/main/scala/com/wavesplatform/state/diffs/invoke/InvokeScriptTransactionDiff.scala @@ -47,7 +47,7 @@ object InvokeScriptTransactionDiff { r.issues ++ r.invokes.flatMap(s => allIssues(s.stateChanges)) } - def apply(blockchain: Blockchain, blockTime: Long, limitedExecution: Boolean)( + def apply(blockchain: Blockchain, blockTime: Long, limitedExecution: Boolean, enableExecutionLog: Boolean)( tx: InvokeScriptTransactionLike ): TracedResult[ValidationError, Diff] = { @@ -112,7 +112,8 @@ object InvokeScriptTransactionDiff { failFreeLimit, invocationComplexity, paymentsComplexity, - blockchain + blockchain, + enableExecutionLog ) } yield MainScriptResult( environment.currentDiff, @@ -175,6 +176,7 @@ object InvokeScriptTransactionDiff { limitedExecution, ContractLimits.MaxTotalInvokeComplexity(version), otherIssues, + enableExecutionLog, log ) @@ -287,6 +289,7 @@ object InvokeScriptTransactionDiff { pk, Set(tx.sender.toAddress), limitedExecution, + enableExecutionLog, ContractLimits.MaxTotalInvokeComplexity(version), ContractLimits.MaxSyncDAppCalls(version), ContractLimits.MaxCallableActionsAmountBeforeV6(version), @@ -361,7 +364,8 @@ object InvokeScriptTransactionDiff { failFreeLimit: Int, estimatedComplexity: Int, paymentsComplexity: Int, - blockchain: Blockchain + blockchain: Blockchain, + enableExecutionLog: Boolean ): Either[ValidationError, (ScriptResult, Log[Id])] = { val evaluationCtx = CachedDAppCTX.get(version, blockchain).completeContext(environment) val startLimit = limit - paymentsComplexity @@ -374,7 +378,8 @@ object InvokeScriptTransactionDiff { version, startLimit, blockchain.correctFunctionCallScope, - blockchain.newEvaluatorMode + blockchain.newEvaluatorMode, + enableExecutionLog ) .runAttempt() .leftMap(error => (error.getMessage: ExecutionError, 0, Nil: Log[Id])) diff --git a/node/src/main/scala/com/wavesplatform/state/reader/CompositeBlockchain.scala b/node/src/main/scala/com/wavesplatform/state/reader/CompositeBlockchain.scala index fe16188773e..50cac6bcf32 100644 --- a/node/src/main/scala/com/wavesplatform/state/reader/CompositeBlockchain.scala +++ b/node/src/main/scala/com/wavesplatform/state/reader/CompositeBlockchain.scala @@ -40,9 +40,25 @@ final class CompositeBlockchain private ( override def balance(address: Address, assetId: Asset): Long = inner.balance(address, assetId) + diff.portfolios.get(address).fold(0L)(_.balanceOf(assetId)) + override def balances(req: Seq[(Address, Asset)]): Map[(Address, Asset), Long] = { + inner.balances(req).map { case ((address, asset), balance) => + (address, asset) -> (balance + diff.portfolios.get(address).fold(0L)(_.balanceOf(asset))) + } + } + + override def wavesBalances(addresses: Seq[Address]): Map[Address, Long] = + inner.wavesBalances(addresses).map { case (address, balance) => + address -> (balance + diff.portfolios.get(address).fold(0L)(_.balanceOf(Waves))) + } + override def leaseBalance(address: Address): LeaseBalance = inner.leaseBalance(address).combineF[Id](diff.portfolios.getOrElse(address, Portfolio.empty).lease) + override def leaseBalances(addresses: Seq[Address]): Map[Address, LeaseBalance] = + inner.leaseBalances(addresses).map { case (address, leaseBalance) => + address -> leaseBalance.combineF[Id](diff.portfolios.getOrElse(address, Portfolio.empty).lease) + } + override def assetScript(asset: IssuedAsset): Option[AssetScriptInfo] = maybeDiff .flatMap(_.assetScripts.get(asset)) @@ -77,6 +93,15 @@ final class CompositeBlockchain private ( .map(t => (TxMeta(Height(this.height), t.applied, t.spentComplexity), t.transaction)) .orElse(inner.transactionInfo(id)) + override def transactionInfos(ids: Seq[ByteStr]): Seq[Option[(TxMeta, Transaction)]] = { + inner.transactionInfos(ids).zip(ids).map { case (info, id) => + diff.transactions + .find(_.transaction.id() == id) + .map(t => (TxMeta(Height(this.height), t.applied, t.spentComplexity), t.transaction)) + .orElse(info) + } + } + override def transactionMeta(id: ByteStr): Option[TxMeta] = diff .transaction(id) @@ -106,7 +131,7 @@ final class CompositeBlockchain private ( } override def balanceSnapshots(address: Address, from: Int, to: Option[BlockId]): Seq[BalanceSnapshot] = - if (maybeDiff.isEmpty || to.exists(id => inner.heightOf(id).isDefined)) { + if (maybeDiff.isEmpty) { inner.balanceSnapshots(address, from, to) } else { val balance = this.balance(address) @@ -114,7 +139,7 @@ final class CompositeBlockchain private ( val bs = BalanceSnapshot(height, Portfolio(balance, lease)) val height2Fix = this.height == 1 && inner.isFeatureActivated(RideV6) && from < this.height + 1 if (inner.height > 0 && (from < this.height || height2Fix)) - bs +: inner.balanceSnapshots(address, from, to) + bs +: inner.balanceSnapshots(address, from, None) // to == this liquid block, so no need to pass block id to inner blockchain else Seq(bs) } @@ -134,7 +159,10 @@ final class CompositeBlockchain private ( } override def accountData(acc: Address, key: String): Option[DataEntry[?]] = - diff.accountData.get(acc).orEmpty.data.get(key).orElse(inner.accountData(acc, key)).filterNot(_.isEmpty) + (for { + d <- diff.accountData.get(acc) + e <- d.get(key) + } yield e).orElse(inner.accountData(acc, key)).filterNot(_.isEmpty) override def hasData(acc: Address): Boolean = { diff.accountData.contains(acc) || inner.hasData(acc) diff --git a/node/src/main/scala/com/wavesplatform/transaction/BlockchainUpdater.scala b/node/src/main/scala/com/wavesplatform/transaction/BlockchainUpdater.scala index c6490b7239e..77d85efd830 100644 --- a/node/src/main/scala/com/wavesplatform/transaction/BlockchainUpdater.scala +++ b/node/src/main/scala/com/wavesplatform/transaction/BlockchainUpdater.scala @@ -7,7 +7,7 @@ import com.wavesplatform.state.Diff import monix.reactive.Observable trait BlockchainUpdater { - def processBlock(block: Block, hitSource: ByteStr, verify: Boolean = true): Either[ValidationError, Seq[Diff]] + def processBlock(block: Block, hitSource: ByteStr, verify: Boolean = true, txSignParCheck: Boolean = true): Either[ValidationError, Seq[Diff]] def processMicroBlock(microBlock: MicroBlock, verify: Boolean = true): Either[ValidationError, BlockId] def removeAfter(blockId: ByteStr): Either[ValidationError, DiscardedBlocks] def lastBlockInfo: Observable[LastBlockInfo] diff --git a/node/src/main/scala/com/wavesplatform/transaction/PaymentTransaction.scala b/node/src/main/scala/com/wavesplatform/transaction/PaymentTransaction.scala index cc4b472dd21..0f24102e8e4 100644 --- a/node/src/main/scala/com/wavesplatform/transaction/PaymentTransaction.scala +++ b/node/src/main/scala/com/wavesplatform/transaction/PaymentTransaction.scala @@ -1,7 +1,6 @@ package com.wavesplatform.transaction import scala.util.Try - import com.wavesplatform.account.{Address, KeyPair, PublicKey} import com.wavesplatform.common.state.ByteStr import com.wavesplatform.crypto @@ -21,7 +20,6 @@ case class PaymentTransaction( signature: ByteStr, chainId: Byte ) extends Transaction(TransactionType.Payment) - with Signed with ProvenTransaction with TxWithFee.InWaves { @@ -29,8 +27,6 @@ case class PaymentTransaction( def proofs: Proofs = Proofs(signature) - protected val signatureValid: Coeval[Boolean] = Coeval.evalOnce(crypto.verify(signature, bodyBytes(), sender)) - override val id: Coeval[ByteStr] = Coeval.evalOnce(signature) override val bytes: Coeval[Array[Byte]] = Coeval.evalOnce(PaymentTxSerializer.toBytes(this)) diff --git a/node/src/main/scala/com/wavesplatform/transaction/Proven.scala b/node/src/main/scala/com/wavesplatform/transaction/Proven.scala index 4465eb32c72..fa1a9c4be36 100644 --- a/node/src/main/scala/com/wavesplatform/transaction/Proven.scala +++ b/node/src/main/scala/com/wavesplatform/transaction/Proven.scala @@ -1,10 +1,24 @@ package com.wavesplatform.transaction import com.wavesplatform.common.state.ByteStr +import com.wavesplatform.crypto +import com.wavesplatform.transaction.TxValidationError.GenericError import monix.eval.Coeval trait Proven extends Authorized { def proofs: Proofs val bodyBytes: Coeval[Array[Byte]] + + protected def verifyFirstProof(isRideV6Activated: Boolean): Either[GenericError, Unit] = + if (proofs.size != 1) Left(GenericError("Transactions from non-scripted accounts must have exactly 1 proof")) + else + Either.cond( + crypto.verify(proofs.proofs.head, bodyBytes(), sender, isRideV6Activated), + (), + GenericError(s"Proof doesn't validate as signature for $this") + ) + + lazy val firstProofIsValidSignatureBeforeV6: Either[GenericError, Unit] = verifyFirstProof(false) + lazy val firstProofIsValidSignatureAfterV6: Either[GenericError, Unit] = verifyFirstProof(true) } object Proven { diff --git a/node/src/main/scala/com/wavesplatform/transaction/TransactionType.scala b/node/src/main/scala/com/wavesplatform/transaction/TransactionType.scala index ad280105833..4246c301cf5 100644 --- a/node/src/main/scala/com/wavesplatform/transaction/TransactionType.scala +++ b/node/src/main/scala/com/wavesplatform/transaction/TransactionType.scala @@ -2,25 +2,8 @@ package com.wavesplatform.transaction object TransactionType extends Enumeration(1) { type TransactionType = Value - val Genesis, - Payment, - Issue, - Transfer, - Reissue, - Burn, - Exchange, - Lease, - LeaseCancel, - CreateAlias, - MassTransfer, - Data, - SetScript, - SponsorFee, - SetAssetScript, - InvokeScript, - UpdateAssetInfo, - Ethereum, - InvokeExpression = Value + val Genesis, Payment, Issue, Transfer, Reissue, Burn, Exchange, Lease, LeaseCancel, CreateAlias, MassTransfer, Data, SetScript, SponsorFee, + SetAssetScript, InvokeScript, UpdateAssetInfo, Ethereum, InvokeExpression = Value implicit class ValueExt(val tpe: TransactionType) extends AnyVal { def transactionName: String = s"${tpe}Transaction" diff --git a/node/src/main/scala/com/wavesplatform/transaction/assets/exchange/ExchangeTransaction.scala b/node/src/main/scala/com/wavesplatform/transaction/assets/exchange/ExchangeTransaction.scala index 224fe4aa44d..d4c371a0fdc 100644 --- a/node/src/main/scala/com/wavesplatform/transaction/assets/exchange/ExchangeTransaction.scala +++ b/node/src/main/scala/com/wavesplatform/transaction/assets/exchange/ExchangeTransaction.scala @@ -11,6 +11,7 @@ import monix.eval.Coeval import play.api.libs.json.JsObject import scala.util.Try +import scala.util.chaining.scalaUtilChainingOps case class ExchangeTransaction( version: TxVersion, @@ -34,6 +35,17 @@ case class ExchangeTransaction( val (buyOrder, sellOrder) = if (order1.orderType == OrderType.BUY) (order1, order2) else (order2, order1) + override protected def verifyFirstProof(isRideV6Activated: Boolean): Either[GenericError, Unit] = + super.verifyFirstProof(isRideV6Activated).tap { _ => + if (isRideV6Activated) { + order1.firstProofIsValidSignatureAfterV6 + order2.firstProofIsValidSignatureAfterV6 + } else { + order1.firstProofIsValidSignatureBeforeV6 + order2.firstProofIsValidSignatureBeforeV6 + } + } + override val sender: PublicKey = buyOrder.matcherPublicKey override val bodyBytes: Coeval[Array[Byte]] = Coeval.evalOnce(ExchangeTxSerializer.bodyBytes(this)) diff --git a/node/src/main/scala/com/wavesplatform/transaction/assets/exchange/Order.scala b/node/src/main/scala/com/wavesplatform/transaction/assets/exchange/Order.scala index cecac30c749..e3dc04130c1 100644 --- a/node/src/main/scala/com/wavesplatform/transaction/assets/exchange/Order.scala +++ b/node/src/main/scala/com/wavesplatform/transaction/assets/exchange/Order.scala @@ -107,6 +107,14 @@ case class Order( val json: Coeval[JsObject] = Coeval.evalOnce(OrderSerializer.toJson(this)) + override protected def verifyFirstProof(isRideV6Activated: Boolean): Either[GenericError, Unit] = + eip712Signature match { + case Some(ethSignature) => + val signerKey = EthOrders.recoverEthSignerKey(this, ethSignature.arr) + Either.cond(signerKey == senderPublicKey, (), GenericError(s"Ethereum signature invalid for $this")) + case _ => super.verifyFirstProof(isRideV6Activated) + } + override def toString: String = { val matcherFeeAssetIdStr = if (version == 3) s" matcherFeeAssetId=${matcherFeeAssetId.fold("Waves")(_.toString)}," else "" s"OrderV$version(id=${idStr()}, sender=$senderPublicKey, matcher=$matcherPublicKey, pair=$assetPair, type=$orderType, amount=$amount, " + diff --git a/node/src/main/scala/com/wavesplatform/transaction/serialization/impl/IssueTxSerializer.scala b/node/src/main/scala/com/wavesplatform/transaction/serialization/impl/IssueTxSerializer.scala index 9e562215c6a..888ef21f794 100644 --- a/node/src/main/scala/com/wavesplatform/transaction/serialization/impl/IssueTxSerializer.scala +++ b/node/src/main/scala/com/wavesplatform/transaction/serialization/impl/IssueTxSerializer.scala @@ -13,7 +13,7 @@ import scala.util.Try object IssueTxSerializer { def toJson(tx: IssueTransaction): JsObject = { - import tx._ + import tx.* BaseTxJson.toJson(tx) ++ Json.obj( "assetId" -> id().toString, "name" -> name.toStringUtf8, @@ -22,7 +22,7 @@ object IssueTxSerializer { "decimals" -> decimals.value, "description" -> description.toStringUtf8 ) ++ (if (version >= TxVersion.V2) Json.obj("script" -> script.map(_.bytes().base64)) else JsObject.empty) ++ - (if (version == TxVersion.V2) Json.obj("chainId" -> chainId) else JsObject.empty) + (if (version == TxVersion.V2) Json.obj("chainId" -> chainId) else JsObject.empty) } def bodyBytes(tx: IssueTransaction): Array[Byte] = { diff --git a/node/src/main/scala/com/wavesplatform/transaction/smart/Verifier.scala b/node/src/main/scala/com/wavesplatform/transaction/smart/Verifier.scala index 907a38ff32e..30e81be1c31 100644 --- a/node/src/main/scala/com/wavesplatform/transaction/smart/Verifier.scala +++ b/node/src/main/scala/com/wavesplatform/transaction/smart/Verifier.scala @@ -5,7 +5,6 @@ import cats.syntax.either.* import cats.syntax.functor.* import com.google.common.base.Throwables import com.wavesplatform.common.state.ByteStr -import com.wavesplatform.crypto import com.wavesplatform.features.BlockchainFeatures import com.wavesplatform.features.EstimatorProvider.EstimatorBlockchainExt import com.wavesplatform.lang.ValidationError @@ -21,9 +20,9 @@ import com.wavesplatform.lang.v1.traits.Environment import com.wavesplatform.lang.v1.traits.domain.Recipient import com.wavesplatform.metrics.* import com.wavesplatform.state.* +import com.wavesplatform.transaction.* import com.wavesplatform.transaction.Asset.IssuedAsset import com.wavesplatform.transaction.TxValidationError.{GenericError, ScriptExecutionError, TransactionNotAllowedByScript} -import com.wavesplatform.transaction.* import com.wavesplatform.transaction.assets.exchange.{EthOrders, ExchangeTransaction, Order} import com.wavesplatform.transaction.smart.script.ScriptRunner import com.wavesplatform.transaction.smart.script.ScriptRunner.TxOrd @@ -44,17 +43,25 @@ object Verifier extends ScorexLogging { type ValidationResult[T] = Either[ValidationError, T] - def apply(blockchain: Blockchain, limitedExecution: Boolean = false)(tx: Transaction): TracedResult[ValidationError, Int] = (tx: @unchecked) match { + def apply(blockchain: Blockchain, limitedExecution: Boolean = false, enableExecutionLog: Boolean = false)( + tx: Transaction + ): TracedResult[ValidationError, Int] = (tx: @unchecked) match { case _: GenesisTransaction => Right(0) case _: EthereumTransaction => Right(0) case pt: ProvenTransaction => (pt, blockchain.accountScript(pt.sender.toAddress)) match { case (stx: PaymentTransaction, None) => stats.signatureVerification - .measureForType(stx.tpe)(stx.signaturesValid()) + .measureForType(stx.tpe)(stx.firstProofIsValidSignatureBeforeV6) .as(0) case (et: ExchangeTransaction, scriptOpt) => - verifyExchange(et, blockchain, scriptOpt, if (limitedExecution) ContractLimits.FailFreeInvokeComplexity else Int.MaxValue) + verifyExchange( + et, + blockchain, + scriptOpt, + if (limitedExecution) ContractLimits.FailFreeInvokeComplexity else Int.MaxValue, + enableExecutionLog + ) case (tx: SigProofsSwitch, Some(_)) if tx.usesLegacySignature => Left(GenericError("Can't process transaction with signature from scripted account")) case (_: PaymentTransaction, Some(_)) => @@ -65,7 +72,9 @@ object Verifier extends ScorexLogging { ) case (_, Some(script)) => stats.accountScriptExecution - .measureForType(pt.tpe)(verifyTx(blockchain, script.script, script.verifierComplexity.toInt, pt, None)) + .measureForType(pt.tpe)( + verifyTx(blockchain, script.script, script.verifierComplexity.toInt, pt, None, enableExecutionLog) + ) case _ => stats.signatureVerification .measureForType(tx.tpe)(verifyAsEllipticCurveSignature(pt, blockchain.isFeatureActivated(BlockchainFeatures.RideV6))) @@ -81,7 +90,9 @@ object Verifier extends ScorexLogging { } /** Verifies asset scripts and returns diff with complexity. In case of error returns spent complexity */ - def assets(blockchain: Blockchain, remainingComplexity: Int)(tx: TransactionBase): TracedResult[(Long, ValidationError), Diff] = { + def assets(blockchain: Blockchain, remainingComplexity: Int, enableExecutionLog: Boolean)( + tx: TransactionBase + ): TracedResult[(Long, ValidationError), Diff] = { case class AssetForCheck(asset: IssuedAsset, script: AssetScriptInfo, assetType: AssetContext) @tailrec @@ -97,7 +108,16 @@ object Verifier extends ScorexLogging { if (remainingComplexity == Int.MaxValue) remainingComplexity else remainingComplexity - fullComplexity.toInt - def verify = verifyTx(blockchain, script, estimatedComplexity.toInt, tx, Some(asset.id), complexityLimit, context) + def verify = verifyTx( + blockchain, + script, + estimatedComplexity.toInt, + tx, + Some(asset.id), + enableExecutionLog, + complexityLimit, + context + ) stats.assetScriptExecution.measureForType(tx.tpe)(verify) match { case TracedResult(e @ Left(_), trace, attributes) => @@ -154,6 +174,7 @@ object Verifier extends ScorexLogging { estimatedComplexity: Int, transaction: TransactionBase, assetIdOpt: Option[ByteStr], + enableExecutionLog: Boolean, complexityLimit: Int = Int.MaxValue, assetContext: AssetContext.Value = AssetContext.Unknown ): TracedResult[ValidationError, Int] = { @@ -166,7 +187,15 @@ object Verifier extends ScorexLogging { Coproduct[Environment.Tthis](Environment.AssetId(v.arr)) ) val (log, evaluatedComplexity, result) = - ScriptRunner(Coproduct[TxOrd](transaction), blockchain, script, isAsset, containerAddress, complexityLimit) + ScriptRunner( + Coproduct[TxOrd](transaction), + blockchain, + script, + isAsset, + containerAddress, + enableExecutionLog, + complexityLimit + ) val complexity = if (blockchain.storeEvaluatedComplexity) evaluatedComplexity else estimatedComplexity val resultE = result match { case Left(execError) => Left(ScriptExecutionError(execError.message, log, assetIdOpt)) @@ -197,7 +226,13 @@ object Verifier extends ScorexLogging { } } - private def verifyOrder(blockchain: Blockchain, script: AccountScriptInfo, order: Order, complexityLimit: Int): ValidationResult[Int] = + private def verifyOrder( + blockchain: Blockchain, + script: AccountScriptInfo, + order: Order, + complexityLimit: Int, + enableExecutionLog: Boolean + ): ValidationResult[Int] = Try( ScriptRunner( Coproduct[ScriptRunner.TxOrd](order), @@ -205,6 +240,7 @@ object Verifier extends ScorexLogging { script.script, isAssetScript = false, Coproduct[Environment.Tthis](Recipient.Address(ByteStr(order.sender.toAddress.bytes))), + enableExecutionLog, complexityLimit ) ).toEither @@ -226,7 +262,8 @@ object Verifier extends ScorexLogging { et: ExchangeTransaction, blockchain: Blockchain, matcherScriptOpt: Option[AccountScriptInfo], - complexityLimit: Int + complexityLimit: Int, + enableExecutionLog: Boolean ): TracedResult[ValidationError, Int] = { val typeId = et.tpe @@ -238,7 +275,17 @@ object Verifier extends ScorexLogging { .map { script => if (et.version != 1) { stats.accountScriptExecution - .measureForType(typeId)(verifyTx(blockchain, script.script, script.verifierComplexity.toInt, et, None, complexityLimit)) + .measureForType(typeId)( + verifyTx( + blockchain, + script.script, + script.verifierComplexity.toInt, + et, + None, + enableExecutionLog, + complexityLimit + ) + ) } else { TracedResult(Left(GenericError("Can't process transaction with signature from scripted account"))) } @@ -253,7 +300,7 @@ object Verifier extends ScorexLogging { .accountScript(order.sender.toAddress) .map { asi => if (order.version != 1) { - stats.orderValidation.withoutTags().measure(verifyOrder(blockchain, asi, order, complexityLimit)) + stats.orderValidation.withoutTags().measure(verifyOrder(blockchain, asi, order, complexityLimit, enableExecutionLog)) } else { Left(GenericError("Can't process order with signature from scripted account")) } @@ -278,21 +325,21 @@ object Verifier extends ScorexLogging { } yield matcherComplexity + sellerComplexity + buyerComplexity } - def verifyOrderSignature(order: Order, checkWeakPk: Boolean): Either[GenericError, Order] = + def verifyOrderSignature(order: Order, isRideV6Activated: Boolean): Either[GenericError, Order] = order.eip712Signature match { case Some(ethSignature) => val signerKey = EthOrders.recoverEthSignerKey(order, ethSignature.arr) Either.cond(signerKey == order.senderPublicKey, order, GenericError(s"Ethereum signature invalid for $order")) - case _ => verifyAsEllipticCurveSignature(order, checkWeakPk) + case _ => verifyAsEllipticCurveSignature(order, isRideV6Activated) } - def verifyAsEllipticCurveSignature[T <: Proven & Authorized](pt: T, checkWeakPk: Boolean): Either[GenericError, T] = - pt.proofs.proofs match { - case p +: Nil => - Either.cond(crypto.verify(p, pt.bodyBytes(), pt.sender, checkWeakPk), pt, GenericError(s"Proof doesn't validate as signature for $pt")) - case _ => Left(GenericError("Transactions from non-scripted accounts must have exactly 1 proof")) - } + def verifyAsEllipticCurveSignature[T <: Proven](pt: T, isRideV6Activated: Boolean): Either[GenericError, T] = + (if (isRideV6Activated) { + pt.firstProofIsValidSignatureAfterV6 + } else { + pt.firstProofIsValidSignatureBeforeV6 + }).map(_ => pt) @VisibleForTesting private[smart] def buildLogs( diff --git a/node/src/main/scala/com/wavesplatform/transaction/smart/WavesEnvironment.scala b/node/src/main/scala/com/wavesplatform/transaction/smart/WavesEnvironment.scala index 5be966186f5..98ff974555c 100644 --- a/node/src/main/scala/com/wavesplatform/transaction/smart/WavesEnvironment.scala +++ b/node/src/main/scala/com/wavesplatform/transaction/smart/WavesEnvironment.scala @@ -320,6 +320,7 @@ class DAppEnvironment( currentDAppPk: com.wavesplatform.account.PublicKey, calledAddresses: Set[com.wavesplatform.account.Address], limitedExecution: Boolean, + enableExecutionLog: Boolean, totalComplexityLimit: Int, var remainingCalls: Int, var availableActions: Int, @@ -378,6 +379,7 @@ class DAppEnvironment( mutableBlockchain, blockchain.settings.functionalitySettings.allowInvalidReissueInSameBlockUntilTimestamp + 1, limitedExecution, + enableExecutionLog, totalComplexityLimit, availableComplexity, remainingCalls, @@ -404,7 +406,11 @@ class DAppEnvironment( availablePayments = remainingPayments availableData = remainingData availableDataSize = remainingDataSize - (evaluated, diff.scriptsComplexity.toInt, DiffToLogConverter.convert(diff, tx.id(), func, availableComplexity)) + ( + evaluated, + diff.scriptsComplexity.toInt, + if (enableExecutionLog) DiffToLogConverter.convert(diff, tx.id(), func, availableComplexity) else List.empty + ) } r.v.map { diff --git a/node/src/main/scala/com/wavesplatform/transaction/smart/script/ScriptRunner.scala b/node/src/main/scala/com/wavesplatform/transaction/smart/script/ScriptRunner.scala index 5b445a07933..9a5975f76ee 100644 --- a/node/src/main/scala/com/wavesplatform/transaction/smart/script/ScriptRunner.scala +++ b/node/src/main/scala/com/wavesplatform/transaction/smart/script/ScriptRunner.scala @@ -34,6 +34,7 @@ object ScriptRunner { script: Script, isAssetScript: Boolean, scriptContainerAddress: Environment.Tthis, + enableExecutionLog: Boolean, complexityLimit: Int = Int.MaxValue, default: EVALUATED = TRUE ): (Log[Id], Int, Either[ExecutionError, EVALUATED]) = @@ -52,6 +53,7 @@ object ScriptRunner { blockchain.checkEstimatorSumOverflow, blockchain.newEvaluatorMode, blockchain.isFeatureActivated(RideV6), + enableExecutionLog, blockchain.isFeatureActivated(ConsensusImprovements) ) @@ -69,6 +71,7 @@ object ScriptRunner { checkEstimatorSumOverflow: Boolean, newEvaluatorMode: Boolean, checkWeakPk: Boolean, + enableExecutionLog: Boolean, fixBigScriptField: Boolean ): (Log[Id], Int, Either[ExecutionError, EVALUATED]) = { @@ -131,7 +134,8 @@ object ScriptRunner { limit, correctFunctionCallScope = checkEstimatorSumOverflow, newMode = newEvaluatorMode, - onExceed + onExceed, + enableExecutionLog ) (log, limit - unusedComplexity, result) diff --git a/node/src/main/scala/com/wavesplatform/utils/ObservedLoadingCache.scala b/node/src/main/scala/com/wavesplatform/utils/ObservedLoadingCache.scala index 6dd4f9b5bdc..3937a7da2d4 100644 --- a/node/src/main/scala/com/wavesplatform/utils/ObservedLoadingCache.scala +++ b/node/src/main/scala/com/wavesplatform/utils/ObservedLoadingCache.scala @@ -37,5 +37,4 @@ class ObservedLoadingCache[K, V](override val delegate: LoadingCache[K, V], chan case k: K => changed.onNext(k) case _ => } - } diff --git a/node/src/main/scala/com/wavesplatform/utils/generator/BlockchainGeneratorApp.scala b/node/src/main/scala/com/wavesplatform/utils/generator/BlockchainGeneratorApp.scala index 49157157c40..10fc26ebe37 100644 --- a/node/src/main/scala/com/wavesplatform/utils/generator/BlockchainGeneratorApp.scala +++ b/node/src/main/scala/com/wavesplatform/utils/generator/BlockchainGeneratorApp.scala @@ -2,23 +2,20 @@ package com.wavesplatform.utils.generator import java.io.{File, FileOutputStream, PrintWriter} import java.util.concurrent.TimeUnit -import scala.collection.mutable.ArrayBuffer -import scala.concurrent.duration.* -import scala.language.reflectiveCalls + import cats.implicits.* import com.typesafe.config.{ConfigFactory, ConfigParseOptions} import com.wavesplatform.{GenesisBlockGenerator, Version} import com.wavesplatform.account.{Address, SeedKeyPair} import com.wavesplatform.block.Block import com.wavesplatform.consensus.PoSSelector -import com.wavesplatform.database.openDB +import com.wavesplatform.database.RDB import com.wavesplatform.events.{BlockchainUpdateTriggers, UtxEvent} import com.wavesplatform.history.StorageFactory import com.wavesplatform.lang.ValidationError import com.wavesplatform.mining.{Miner, MinerImpl} import com.wavesplatform.settings.* import com.wavesplatform.state.appender.BlockAppender -import com.wavesplatform.transaction.Asset import com.wavesplatform.transaction.TxValidationError.GenericError import com.wavesplatform.utils.{Schedulers, ScorexLogging, Time} import com.wavesplatform.utx.UtxPoolImpl @@ -30,6 +27,10 @@ import net.ceedubs.ficus.readers.ArbitraryTypeReader.* import play.api.libs.json.Json import scopt.OParser +import scala.collection.mutable.ArrayBuffer +import scala.concurrent.duration.* +import scala.language.reflectiveCalls + object BlockchainGeneratorApp extends ScorexLogging { final case class BlockchainGeneratorAppSettings( genesisConfigFile: File = null, @@ -115,16 +116,14 @@ object BlockchainGeneratorApp extends ScorexLogging { override def getTimestamp(): Long = time } - val spendableBalance = ConcurrentSubject.publish[(Address, Asset)] val blockchain = { - val db = openDB(wavesSettings.dbSettings.directory, recreate = true) - val (blockchainUpdater, leveldb) = - StorageFactory(wavesSettings, db, fakeTime, spendableBalance, BlockchainUpdateTriggers.noop) + val rdb = RDB.open(wavesSettings.dbSettings) + val (blockchainUpdater, rocksdb) = + StorageFactory(wavesSettings, rdb, fakeTime, BlockchainUpdateTriggers.noop) com.wavesplatform.checkGenesis(wavesSettings, blockchainUpdater, Miner.Disabled) sys.addShutdownHook(synchronized { blockchainUpdater.shutdown() - leveldb.close() - db.close() + rdb.close() }) blockchainUpdater } diff --git a/node/src/main/scala/com/wavesplatform/utx/UtxPool.scala b/node/src/main/scala/com/wavesplatform/utx/UtxPool.scala index 9e7c67de339..ae2bcfee333 100644 --- a/node/src/main/scala/com/wavesplatform/utx/UtxPool.scala +++ b/node/src/main/scala/com/wavesplatform/utx/UtxPool.scala @@ -1,15 +1,19 @@ package com.wavesplatform.utx +import scala.concurrent.duration.FiniteDuration import com.wavesplatform.common.state.ByteStr import com.wavesplatform.lang.ValidationError import com.wavesplatform.mining.{MiningConstraint, MultiDimensionalMiningConstraint} +import com.wavesplatform.state.Diff import com.wavesplatform.transaction.* import com.wavesplatform.transaction.smart.script.trace.TracedResult import com.wavesplatform.utx.UtxPool.PackStrategy -import scala.concurrent.duration.FiniteDuration +trait UtxForAppender { + def setPriorityDiffs(diffs: Seq[Diff]): Unit +} -trait UtxPool extends AutoCloseable { +trait UtxPool extends UtxForAppender with AutoCloseable { def putIfNew(tx: Transaction, forceValidate: Boolean = false): TracedResult[ValidationError, Boolean] def removeAll(txs: Iterable[Transaction]): Unit def all: Seq[Transaction] diff --git a/node/src/main/scala/com/wavesplatform/utx/UtxPoolImpl.scala b/node/src/main/scala/com/wavesplatform/utx/UtxPoolImpl.scala index 958f96d4203..b3a661b11fd 100644 --- a/node/src/main/scala/com/wavesplatform/utx/UtxPoolImpl.scala +++ b/node/src/main/scala/com/wavesplatform/utx/UtxPoolImpl.scala @@ -199,9 +199,18 @@ case class UtxPoolImpl( val diffEi = { def calculateDiff(): TracedResult[ValidationError, Diff] = { if (forceValidate) - TransactionDiffer.forceValidate(blockchain.lastBlockTimestamp, time.correctedTime())(priorityPool.compositeBlockchain, tx) + TransactionDiffer.forceValidate(blockchain.lastBlockTimestamp, time.correctedTime(), enableExecutionLog = true)( + priorityPool.compositeBlockchain, + tx + ) else - TransactionDiffer.limitedExecution(blockchain.lastBlockTimestamp, time.correctedTime(), utxSettings.alwaysUnlimitedExecution, verify)( + TransactionDiffer.limitedExecution( + blockchain.lastBlockTimestamp, + time.correctedTime(), + utxSettings.alwaysUnlimitedExecution, + verify, + enableExecutionLog = true + )( priorityPool.compositeBlockchain, tx ) @@ -255,7 +264,7 @@ case class UtxPoolImpl( strategy: PackStrategy, cancelled: () => Boolean ): (Option[Seq[Transaction]], MultiDimensionalMiningConstraint) = { - pack(TransactionDiffer(blockchain.lastBlockTimestamp, time.correctedTime()))(initialConstraint, strategy, cancelled) + pack(TransactionDiffer(blockchain.lastBlockTimestamp, time.correctedTime(), enableExecutionLog = true))(initialConstraint, strategy, cancelled) } def cleanUnconfirmed(): Unit = { @@ -269,9 +278,17 @@ case class UtxPoolImpl( TxStateActions.removeExpired(tx) } else { val differ = if (!isMiningEnabled && utxSettings.forceValidateInCleanup) { - TransactionDiffer.forceValidate(blockchain.lastBlockTimestamp, time.correctedTime())(priorityPool.compositeBlockchain, _) + TransactionDiffer.forceValidate(blockchain.lastBlockTimestamp, time.correctedTime(), enableExecutionLog = true)( + priorityPool.compositeBlockchain, + _ + ) } else { - TransactionDiffer.limitedExecution(blockchain.lastBlockTimestamp, time.correctedTime(), utxSettings.alwaysUnlimitedExecution)( + TransactionDiffer.limitedExecution( + blockchain.lastBlockTimestamp, + time.correctedTime(), + utxSettings.alwaysUnlimitedExecution, + enableExecutionLog = true + )( priorityPool.compositeBlockchain, _ ) diff --git a/node/src/test/resources/application.conf b/node/src/test/resources/application.conf index baab6776c0e..9d6e914149c 100644 --- a/node/src/test/resources/application.conf +++ b/node/src/test/resources/application.conf @@ -1,4 +1,10 @@ waves { utx.allow-transactions-from-smart-accounts = true wallet.password = "some string as password" + db.rocksdb { + main-cache-size = 1K + tx-cache-size = 1K + tx-meta-cache-size = 1K + write-buffer-size = 1M + } } diff --git a/node/src/test/scala/com/wavesplatform/BlockchainStubHelpers.scala b/node/src/test/scala/com/wavesplatform/BlockchainStubHelpers.scala index 0411a1902c0..55aae9acdd4 100644 --- a/node/src/test/scala/com/wavesplatform/BlockchainStubHelpers.scala +++ b/node/src/test/scala/com/wavesplatform/BlockchainStubHelpers.scala @@ -11,7 +11,18 @@ import com.wavesplatform.lang.script.Script import com.wavesplatform.lang.ValidationError import com.wavesplatform.network.TransactionPublisher import com.wavesplatform.settings.WavesSettings -import com.wavesplatform.state.{AccountScriptInfo, AssetDescription, AssetScriptInfo, Blockchain, Diff, Height, LeaseBalance, NG, TxMeta, VolumeAndFee} +import com.wavesplatform.state.{ + AccountScriptInfo, + AssetDescription, + AssetScriptInfo, + Blockchain, + Diff, + Height, + LeaseBalance, + NG, + TxMeta, + VolumeAndFee +} import com.wavesplatform.state.diffs.TransactionDiffer import com.wavesplatform.transaction.{Asset, ERC20Address, Transaction, TxHelpers} import com.wavesplatform.transaction.Asset.{IssuedAsset, Waves} @@ -51,11 +62,15 @@ trait BlockchainStubHelpers { self: MockFactoryBase => (blockchain.filledVolumeAndFee _).when(*).returns(VolumeAndFee.empty) (blockchain.assetDescription _).when(*).returns(None) (blockchain.balance _).when(TxHelpers.defaultAddress, Waves).returns(Long.MaxValue / 3) + (blockchain.wavesBalances _).when(Seq(TxHelpers.defaultAddress)).returns(Map(TxHelpers.defaultAddress -> Long.MaxValue / 3)) blockchain } - def createTxPublisherStub(blockchain: Blockchain): TransactionPublisher = { (transaction, _) => - Future.successful(TransactionDiffer(blockchain.lastBlockTimestamp, System.currentTimeMillis())(blockchain, transaction).map(_ => true)) + def createTxPublisherStub(blockchain: Blockchain, enableExecutionLog: Boolean): TransactionPublisher = { (transaction, _) => + Future.successful( + TransactionDiffer(blockchain.lastBlockTimestamp, System.currentTimeMillis(), enableExecutionLog = enableExecutionLog)(blockchain, transaction) + .map(_ => true) + ) } implicit class BlockchainStubOps(blockchain: Blockchain) { diff --git a/node/src/test/scala/com/wavesplatform/WithDB.scala b/node/src/test/scala/com/wavesplatform/WithDB.scala deleted file mode 100644 index 15adad4de69..00000000000 --- a/node/src/test/scala/com/wavesplatform/WithDB.scala +++ /dev/null @@ -1,37 +0,0 @@ -package com.wavesplatform - -import java.nio.file.Files - -import com.wavesplatform.account.Address -import com.wavesplatform.database.LevelDBFactory -import com.wavesplatform.events.BlockchainUpdateTriggers -import com.wavesplatform.transaction.Asset -import monix.reactive.subjects.{PublishSubject, Subject} -import org.iq80.leveldb.{DB, Options} -import org.scalatest.{BeforeAndAfterEach, Suite} - -trait WithDB extends BeforeAndAfterEach { - this: Suite => - - private val path = Files.createTempDirectory("lvl").toAbsolutePath - private var currentDBInstance: DB = _ - - protected val ignoreSpendableBalanceChanged: Subject[(Address, Asset), (Address, Asset)] = PublishSubject() - - protected val ignoreBlockchainUpdateTriggers: BlockchainUpdateTriggers = BlockchainUpdateTriggers.noop - - def db: DB = currentDBInstance - - override def beforeEach(): Unit = { - currentDBInstance = LevelDBFactory.factory.open(path.toFile, new Options().createIfMissing(true)) - super.beforeEach() - } - - override def afterEach(): Unit = - try { - super.afterEach() - db.close() - } finally { - TestHelpers.deleteRecursively(path) - } -} diff --git a/node/src/test/scala/com/wavesplatform/WithNewDBForEachTest.scala b/node/src/test/scala/com/wavesplatform/WithNewDBForEachTest.scala new file mode 100644 index 00000000000..8bd5f4ac889 --- /dev/null +++ b/node/src/test/scala/com/wavesplatform/WithNewDBForEachTest.scala @@ -0,0 +1,32 @@ +package com.wavesplatform + +import java.nio.file.Files + +import com.wavesplatform.database.RDB +import com.wavesplatform.db.DBCacheSettings +import com.wavesplatform.events.BlockchainUpdateTriggers +import org.scalatest.{BeforeAndAfterEach, Suite} + +trait WithNewDBForEachTest extends BeforeAndAfterEach with DBCacheSettings { + this: Suite => + + private val path = Files.createTempDirectory("rocks").toAbsolutePath + private var currentDBInstance: RDB = _ + + protected val ignoreBlockchainUpdateTriggers: BlockchainUpdateTriggers = BlockchainUpdateTriggers.noop + + def db: RDB = currentDBInstance + + override def beforeEach(): Unit = { + currentDBInstance = RDB.open(dbSettings.copy(directory = path.toAbsolutePath.toString)) + super.beforeEach() + } + + override def afterEach(): Unit = + try { + super.afterEach() + db.close() + } finally { + TestHelpers.deleteRecursively(path) + } +} diff --git a/node/src/test/scala/com/wavesplatform/api/common/AddressTransactionsSpec.scala b/node/src/test/scala/com/wavesplatform/api/common/AddressTransactionsSpec.scala index a0b67ecef28..a8dcbb35966 100644 --- a/node/src/test/scala/com/wavesplatform/api/common/AddressTransactionsSpec.scala +++ b/node/src/test/scala/com/wavesplatform/api/common/AddressTransactionsSpec.scala @@ -9,7 +9,7 @@ class AddressTransactionsSpec extends FreeSpec { "with pagination" - { "after txs is in the middle of ngState" in pending "after txs is the last of ngState" in pending - "after txs is in levelDb" in pending + "after txs is in rocksDb" in pending } "return txs in correct ordering without fromId" in pending diff --git a/node/src/test/scala/com/wavesplatform/api/common/CommonAccountApiSpec.scala b/node/src/test/scala/com/wavesplatform/api/common/CommonAccountApiSpec.scala index 0b4ac1df431..39b6d7805b7 100644 --- a/node/src/test/scala/com/wavesplatform/api/common/CommonAccountApiSpec.scala +++ b/node/src/test/scala/com/wavesplatform/api/common/CommonAccountApiSpec.scala @@ -9,7 +9,7 @@ import com.wavesplatform.lang.directives.values.V5 import com.wavesplatform.lang.v1.compiler.TestCompiler import com.wavesplatform.lang.v1.traits.domain.{Lease, Recipient} import com.wavesplatform.settings.TestFunctionalitySettings -import com.wavesplatform.state.{DataEntry, Diff, EmptyDataEntry, StringDataEntry, diffs} +import com.wavesplatform.state.{DataEntry, EmptyDataEntry, StringDataEntry, diffs} import com.wavesplatform.test.DomainPresets.RideV4 import com.wavesplatform.test.FreeSpec import com.wavesplatform.transaction.TxHelpers.data @@ -35,8 +35,8 @@ class CommonAccountApiSpec extends FreeSpec with WithDomain with BlocksTransacti val data5 = data(acc, Seq(EmptyDataEntry("test2"), entry1, entry2), version = V2) withDomain(RideV4) { d => - val commonAccountsApi = CommonAccountsApi(() => d.blockchainUpdater.bestLiquidDiff.getOrElse(Diff.empty), d.db, d.blockchainUpdater) - def dataList(): Set[DataEntry[_]] = commonAccountsApi.dataStream(acc.toAddress, None).toListL.runSyncUnsafe().toSet + val commonAccountsApi = CommonAccountsApi(() => d.blockchainUpdater.getCompositeBlockchain, d.rdb, d.blockchainUpdater) + def dataList(): Set[DataEntry[?]] = commonAccountsApi.dataStream(acc.toAddress, None).toListL.runSyncUnsafe().toSet d.appendBlock(genesis) d.appendMicroBlock(data1) @@ -71,8 +71,8 @@ class CommonAccountApiSpec extends FreeSpec with WithDomain with BlocksTransacti forAll(preconditions) { case (acc, block1, mb1, block2, mb2) => withDomain(domainSettingsWithFS(TestFunctionalitySettings.withFeatures(BlockchainFeatures.NG, BlockchainFeatures.DataTransaction))) { d => - val commonAccountsApi = CommonAccountsApi(() => d.blockchainUpdater.bestLiquidDiff.getOrElse(Diff.empty), d.db, d.blockchainUpdater) - def dataList(): Set[DataEntry[_]] = commonAccountsApi.dataStream(acc.toAddress, Some("test_.*")).toListL.runSyncUnsafe().toSet + val commonAccountsApi = CommonAccountsApi(() => d.blockchainUpdater.getCompositeBlockchain, d.rdb, d.blockchainUpdater) + def dataList(): Set[DataEntry[?]] = commonAccountsApi.dataStream(acc.toAddress, Some("test_.*")).toListL.runSyncUnsafe().toSet d.appendBlock(block1) dataList() shouldBe empty @@ -114,7 +114,7 @@ class CommonAccountApiSpec extends FreeSpec with WithDomain with BlocksTransacti "includes NFT balances when ReducedNFTFee feature is inactive" in pending "excludes NFT balances when ReducedNFTFee feature is active" - { "from diff" in pending - "from leveldb" in pending + "from rocksdb" in pending } } @@ -145,7 +145,7 @@ class CommonAccountApiSpec extends FreeSpec with WithDomain with BlocksTransacti invoke ) - val api = CommonAccountsApi(() => Diff.empty, d.db, d.blockchain) + val api = CommonAccountsApi(() => d.blockchain.getCompositeBlockchain, d.rdb, d.blockchain) val leaseId = Lease.calculateId( Lease( Recipient.Address(ByteStr(TxHelpers.defaultAddress.bytes)), diff --git a/node/src/test/scala/com/wavesplatform/api/http/CustomJsonMarshallerSpec.scala b/node/src/test/scala/com/wavesplatform/api/http/CustomJsonMarshallerSpec.scala index 4dd1bf88b49..f8a279db98f 100644 --- a/node/src/test/scala/com/wavesplatform/api/http/CustomJsonMarshallerSpec.scala +++ b/node/src/test/scala/com/wavesplatform/api/http/CustomJsonMarshallerSpec.scala @@ -12,7 +12,7 @@ import com.wavesplatform.features.BlockchainFeatures import com.wavesplatform.history.DefaultBlockchainSettings import com.wavesplatform.http.{ApiErrorMatchers, RestAPISettingsHelper} import com.wavesplatform.network.TransactionPublisher -import com.wavesplatform.state.reader.LeaseDetails +import com.wavesplatform.state.reader.{CompositeBlockchain, LeaseDetails} import com.wavesplatform.state.{Blockchain, Height} import com.wavesplatform.test.PropSpec import com.wavesplatform.transaction.Asset @@ -67,6 +67,7 @@ class CustomJsonMarshallerSpec transactionsApi, testWallet, blockchain, + mock[() => CompositeBlockchain], () => utx.size, publisher, ntpTime, @@ -115,6 +116,7 @@ class CustomJsonMarshallerSpec testWallet, publisher, blockchain, + mock[() => CompositeBlockchain], ntpTime, accountsApi, assetsApi, diff --git a/node/src/test/scala/com/wavesplatform/consensus/FPPoSSelectorTest.scala b/node/src/test/scala/com/wavesplatform/consensus/FPPoSSelectorTest.scala index d4678b49eb4..6a4839943bb 100644 --- a/node/src/test/scala/com/wavesplatform/consensus/FPPoSSelectorTest.scala +++ b/node/src/test/scala/com/wavesplatform/consensus/FPPoSSelectorTest.scala @@ -7,26 +7,25 @@ import com.wavesplatform.account.KeyPair import com.wavesplatform.block.Block import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.EitherExt2 -import com.wavesplatform.database.LevelDBFactory +import com.wavesplatform.database.RDB import com.wavesplatform.db.DBCacheSettings import com.wavesplatform.features.BlockchainFeatures import com.wavesplatform.lagonaki.mocks.TestBlock -import com.wavesplatform.settings.{WavesSettings, _} -import com.wavesplatform.state._ +import com.wavesplatform.settings.* +import com.wavesplatform.state.* import com.wavesplatform.state.diffs.ENOUGH_AMT -import com.wavesplatform.state.utils.TestLevelDB -import com.wavesplatform.test._ +import com.wavesplatform.state.utils.TestRocksDB +import com.wavesplatform.test.* import com.wavesplatform.transaction.{BlockchainUpdater, GenesisTransaction} import com.wavesplatform.utils.Time -import com.wavesplatform.{TestHelpers, WithDB, crypto} -import org.iq80.leveldb.Options +import com.wavesplatform.{TestHelpers, WithNewDBForEachTest, crypto} import org.scalacheck.{Arbitrary, Gen} -import scala.concurrent.duration._ +import scala.concurrent.duration.* import scala.util.Random -class FPPoSSelectorTest extends FreeSpec with WithDB with DBCacheSettings { - import FPPoSSelectorTest._ +class FPPoSSelectorTest extends FreeSpec with WithNewDBForEachTest with DBCacheSettings { + import FPPoSSelectorTest.* private val generationSignatureMethods = Table( ("method", "block version", "vrf activated"), @@ -35,178 +34,162 @@ class FPPoSSelectorTest extends FreeSpec with WithDB with DBCacheSettings { ) "block delay" - { - "same on the same height in different forks" in forAll(generationSignatureMethods) { - case (_, blockVersion: Byte, vrfActivated: Boolean) => - withEnv(chainGen(List(ENOUGH_AMT / 2, ENOUGH_AMT / 3), 110, blockVersion), vrfActivated) { - case Env(_, blockchain, miners, blocks) => - val miner1 = miners.head - val miner2 = miners.tail.head - - val miner1Balance = blockchain.effectiveBalance(miner1.toAddress, 0) - - val fork1 = mkFork(100, miner1, blockchain, blocks.last, blockVersion) - val fork2 = mkFork(100, miner2, blockchain, blocks.last, blockVersion) - - val fork1Delay = { - val blockForHit = - fork1 - .lift(100) - .orElse( - blockchain - .blockHeader(blockchain.height + fork1.length - 100) - .map((_, blockchain.hitSource(blockchain.height + fork1.length - 100).get)) - ) - .getOrElse(fork1.head) - - val gs = - if (vrfActivated) blockForHit._2.arr - else PoSCalculator.generationSignature(blockForHit._2, miner1.publicKey) - calcDelay(gs, fork1.head._1.header.baseTarget, miner1Balance) - } - - val fork2Delay = { - val blockForHit = - fork2 - .lift(100) - .orElse( - blockchain - .blockHeader(blockchain.height + fork2.length - 100) - .map((_, blockchain.hitSource(blockchain.height + fork2.length - 100).get)) - ) - .getOrElse(fork2.head) - - val gs = - if (vrfActivated) blockForHit._2.arr - else PoSCalculator.generationSignature(blockForHit._2, miner1.publicKey) - calcDelay(gs, fork2.head._1.header.baseTarget, miner1Balance) - } - - fork1Delay shouldEqual fork2Delay + "same on the same height in different forks" in forAll(generationSignatureMethods) { case (_, blockVersion: Byte, vrfActivated: Boolean) => + withEnv(chainGen(List(ENOUGH_AMT / 2, ENOUGH_AMT / 3), 110, blockVersion), vrfActivated) { case Env(_, blockchain, miners, blocks) => + val miner1 = miners.head + val miner2 = miners.tail.head + + val miner1Balance = blockchain.effectiveBalance(miner1.toAddress, 0) + + val fork1 = mkFork(100, miner1, blockchain, blocks.last, blockVersion) + val fork2 = mkFork(100, miner2, blockchain, blocks.last, blockVersion) + + val fork1Delay = { + val blockForHit = + fork1 + .lift(100) + .orElse( + blockchain + .blockHeader(blockchain.height + fork1.length - 100) + .map((_, blockchain.hitSource(blockchain.height + fork1.length - 100).get)) + ) + .getOrElse(fork1.head) + + val gs = + if (vrfActivated) blockForHit._2.arr + else PoSCalculator.generationSignature(blockForHit._2, miner1.publicKey) + calcDelay(gs, fork1.head._1.header.baseTarget, miner1Balance) + } + + val fork2Delay = { + val blockForHit = + fork2 + .lift(100) + .orElse( + blockchain + .blockHeader(blockchain.height + fork2.length - 100) + .map((_, blockchain.hitSource(blockchain.height + fork2.length - 100).get)) + ) + .getOrElse(fork2.head) + + val gs = + if (vrfActivated) blockForHit._2.arr + else PoSCalculator.generationSignature(blockForHit._2, miner1.publicKey) + calcDelay(gs, fork2.head._1.header.baseTarget, miner1Balance) } + + fork1Delay shouldEqual fork2Delay + } } } "block delay validation" - { - "succeed when delay is correct" in forAll(generationSignatureMethods) { - case (_, blockVersion: Byte, vrfActivated: Boolean) => - withEnv(chainGen(List(ENOUGH_AMT), 10, blockVersion), vrfActivated) { - case Env(pos, blockchain, miners, _) => - val miner = miners.head - val height = blockchain.height - val minerBalance = blockchain.effectiveBalance(miner.toAddress, 0) - val lastBlock = blockchain.lastBlockHeader.get - val block = forgeBlock(miner, blockchain, pos, blockVersion)() - - pos.validateBlockDelay(height, block.header, lastBlock.header, minerBalance) should beRight - } + "succeed when delay is correct" in forAll(generationSignatureMethods) { case (_, blockVersion: Byte, vrfActivated: Boolean) => + withEnv(chainGen(List(ENOUGH_AMT), 10, blockVersion), vrfActivated) { case Env(pos, blockchain, miners, _) => + val miner = miners.head + val height = blockchain.height + val minerBalance = blockchain.effectiveBalance(miner.toAddress, 0) + val lastBlock = blockchain.lastBlockHeader.get + val block = forgeBlock(miner, blockchain, pos, blockVersion)() + + pos.validateBlockDelay(height, block.header, lastBlock.header, minerBalance) should beRight + } } - "failed when delay less than expected" in forAll(generationSignatureMethods) { - case (_, blockVersion: Byte, vrfActivated: Boolean) => - withEnv(chainGen(List(ENOUGH_AMT), 10, blockVersion), vrfActivated) { - case Env(pos, blockchain, miners, _) => - val miner = miners.head - val height = blockchain.height - val minerBalance = blockchain.effectiveBalance(miner.toAddress, 0) - val lastBlock = blockchain.lastBlockHeader.get - val block = forgeBlock(miner, blockchain, pos, blockVersion)(updateDelay = _ - 1) - - pos - .validateBlockDelay( - height, - block.header, - lastBlock.header, - minerBalance - ) should produce("less than min valid timestamp") - } + "failed when delay less than expected" in forAll(generationSignatureMethods) { case (_, blockVersion: Byte, vrfActivated: Boolean) => + withEnv(chainGen(List(ENOUGH_AMT), 10, blockVersion), vrfActivated) { case Env(pos, blockchain, miners, _) => + val miner = miners.head + val height = blockchain.height + val minerBalance = blockchain.effectiveBalance(miner.toAddress, 0) + val lastBlock = blockchain.lastBlockHeader.get + val block = forgeBlock(miner, blockchain, pos, blockVersion)(updateDelay = _ - 1) + + pos + .validateBlockDelay( + height, + block.header, + lastBlock.header, + minerBalance + ) should produce("less than min valid timestamp") + } } } "base target validation" - { - "succeed when BT is correct 1" in forAll(generationSignatureMethods) { - case (_, blockVersion: Byte, vrfActivated: Boolean) => - withEnv(chainGen(List(ENOUGH_AMT), 10, blockVersion), vrfActivated) { - case Env(pos, blockchain, miners, _) => - val miner = miners.head - val height = blockchain.height - val lastBlock = blockchain.lastBlockHeader.get - val block = forgeBlock(miner, blockchain, pos, blockVersion)() - - pos - .validateBaseTarget( - height + 1, - block, - lastBlock.header, - blockchain.blockHeader(height - 2).map(_.header) - ) shouldBe Right(()) - } + "succeed when BT is correct 1" in forAll(generationSignatureMethods) { case (_, blockVersion: Byte, vrfActivated: Boolean) => + withEnv(chainGen(List(ENOUGH_AMT), 10, blockVersion), vrfActivated) { case Env(pos, blockchain, miners, _) => + val miner = miners.head + val height = blockchain.height + val lastBlock = blockchain.lastBlockHeader.get + val block = forgeBlock(miner, blockchain, pos, blockVersion)() + + pos + .validateBaseTarget( + height + 1, + block, + lastBlock.header, + blockchain.blockHeader(height - 2).map(_.header) + ) shouldBe Right(()) + } } - "failed when BT less than expected" in forAll(generationSignatureMethods) { - case (_, blockVersion: Byte, vrfActivated: Boolean) => - withEnv(chainGen(List(ENOUGH_AMT), 10, blockVersion), vrfActivated) { - case Env(pos, blockchain, miners, _) => - val miner = miners.head - val height = blockchain.height - val lastBlock = blockchain.lastBlockHeader.get.header - val block = forgeBlock(miner, blockchain, pos, blockVersion)(updateBT = _ - 1) - - pos - .validateBaseTarget( - height + 1, - block, - lastBlock, - blockchain.blockHeader(height - 2).map(_.header) - ) should produce("does not match calculated baseTarget") - } + "failed when BT less than expected" in forAll(generationSignatureMethods) { case (_, blockVersion: Byte, vrfActivated: Boolean) => + withEnv(chainGen(List(ENOUGH_AMT), 10, blockVersion), vrfActivated) { case Env(pos, blockchain, miners, _) => + val miner = miners.head + val height = blockchain.height + val lastBlock = blockchain.lastBlockHeader.get.header + val block = forgeBlock(miner, blockchain, pos, blockVersion)(updateBT = _ - 1) + + pos + .validateBaseTarget( + height + 1, + block, + lastBlock, + blockchain.blockHeader(height - 2).map(_.header) + ) should produce("does not match calculated baseTarget") + } } - "failed when BT greater than expected" in forAll(generationSignatureMethods) { - case (_, blockVersion: Byte, vrfActivated: Boolean) => - withEnv(chainGen(List(ENOUGH_AMT), 10, blockVersion), vrfActivated) { - case Env(pos, blockchain, miners, _) => - val miner = miners.head - val height = blockchain.height - val lastBlock = blockchain.lastBlockHeader.get - val block = forgeBlock(miner, blockchain, pos, blockVersion)(updateBT = _ + 1) - - pos - .validateBaseTarget( - height + 1, - block, - lastBlock.header, - blockchain.blockHeader(height - 2).map(_.header) - ) should produce("does not match calculated baseTarget") - } + "failed when BT greater than expected" in forAll(generationSignatureMethods) { case (_, blockVersion: Byte, vrfActivated: Boolean) => + withEnv(chainGen(List(ENOUGH_AMT), 10, blockVersion), vrfActivated) { case Env(pos, blockchain, miners, _) => + val miner = miners.head + val height = blockchain.height + val lastBlock = blockchain.lastBlockHeader.get + val block = forgeBlock(miner, blockchain, pos, blockVersion)(updateBT = _ + 1) + + pos + .validateBaseTarget( + height + 1, + block, + lastBlock.header, + blockchain.blockHeader(height - 2).map(_.header) + ) should produce("does not match calculated baseTarget") + } } } "generation signature validation" - { - "succeed when GS is correct" in forAll(generationSignatureMethods) { - case (_, blockVersion: Byte, vrfActivated: Boolean) => - withEnv(chainGen(List(ENOUGH_AMT), 10, blockVersion), vrfActivated) { - case Env(pos, blockchain, miners, _) => - val miner = miners.head - val block = forgeBlock(miner, blockchain, pos, blockVersion)() - - pos - .validateGenerationSignature(block) - .isRight shouldBe true - } + "succeed when GS is correct" in forAll(generationSignatureMethods) { case (_, blockVersion: Byte, vrfActivated: Boolean) => + withEnv(chainGen(List(ENOUGH_AMT), 10, blockVersion), vrfActivated) { case Env(pos, blockchain, miners, _) => + val miner = miners.head + val block = forgeBlock(miner, blockchain, pos, blockVersion)() + + pos + .validateGenerationSignature(block) + .isRight shouldBe true + } } - "failed when GS is incorrect" in forAll(generationSignatureMethods) { - case (_, blockVersion: Byte, vrfActivated: Boolean) => - withEnv(chainGen(List(ENOUGH_AMT), 100, blockVersion), vrfActivated) { - case Env(pos, blockchain, miners, _) => - val miner = miners.head - val block = forgeBlock(miner, blockchain, pos, blockVersion)(updateGS = gs => ByteStr(gs.arr |< Random.nextBytes)) - - pos - .validateGenerationSignature( - block - ) should (if (!vrfActivated) produce("Generation signatures does not match") else produce("Could not verify VRF proof")) - } + "failed when GS is incorrect" in forAll(generationSignatureMethods) { case (_, blockVersion: Byte, vrfActivated: Boolean) => + withEnv(chainGen(List(ENOUGH_AMT), 100, blockVersion), vrfActivated) { case Env(pos, blockchain, miners, _) => + val miner = miners.head + val block = forgeBlock(miner, blockchain, pos, blockVersion)(updateGS = gs => ByteStr(gs.arr |< Random.nextBytes)) + + pos + .validateGenerationSignature( + block + ) should (if (!vrfActivated) produce("Generation signatures does not match") else produce("Could not verify VRF proof")) + } } } @@ -243,16 +226,17 @@ class FPPoSSelectorTest extends FreeSpec with WithDB with DBCacheSettings { def withEnv(gen: Time => Gen[(Seq[KeyPair], Seq[Block])], VRFActivated: Boolean = false)(f: Env => Unit): Unit = { // we are not using the db instance from WithDB trait as it should be recreated between property checks val path = Files.createTempDirectory("lvl").toAbsolutePath - val db = LevelDBFactory.factory.open(path.toFile, new Options().createIfMissing(true)) - val defaultWriter = TestLevelDB.withFunctionalitySettings( - db, - ignoreSpendableBalanceChanged, - TestFunctionalitySettings.Stub.copy(preActivatedFeatures = Map(BlockchainFeatures.FairPoS.id -> 0) ++ (if (VRFActivated) Map(BlockchainFeatures.BlockV5.id -> 0) else Map())) + val rdb = RDB.open(dbSettings.copy(directory = path.toAbsolutePath.toString)) + val defaultWriter = TestRocksDB.withFunctionalitySettings( + rdb, + TestFunctionalitySettings.Stub.copy(preActivatedFeatures = + Map(BlockchainFeatures.FairPoS.id -> 0) ++ (if (VRFActivated) Map(BlockchainFeatures.BlockV5.id -> 0) else Map()) + ) ) val settings0 = WavesSettings.fromRootConfig(loadConfig(ConfigFactory.load())) val settings = settings0.copy(featuresSettings = settings0.featuresSettings.copy(autoShutdownOnUnsupportedFeature = false)) val bcu = - new BlockchainUpdaterImpl(defaultWriter, ignoreSpendableBalanceChanged, settings, ntpTime, ignoreBlockchainUpdateTriggers, (_, _) => Seq.empty) + new BlockchainUpdaterImpl(defaultWriter, settings, ntpTime, ignoreBlockchainUpdateTriggers, (_, _) => Seq.empty) val pos = PoSSelector(bcu, settings.synchronizationSettings.maxBaseTarget) try { val (accounts, blocks) = gen(ntpTime).sample.get @@ -273,7 +257,7 @@ class FPPoSSelectorTest extends FreeSpec with WithDB with DBCacheSettings { object FPPoSSelectorTest { - //noinspection ScalaStyle + // noinspection ScalaStyle implicit class KComb[A](a: A) { def |<(f: A => Unit): A = { f(a) diff --git a/node/src/test/scala/com/wavesplatform/database/LevelDBWriterSpec.scala b/node/src/test/scala/com/wavesplatform/database/RocksDBWriterSpec.scala similarity index 91% rename from node/src/test/scala/com/wavesplatform/database/LevelDBWriterSpec.scala rename to node/src/test/scala/com/wavesplatform/database/RocksDBWriterSpec.scala index 80a32b98471..6be8335c84a 100644 --- a/node/src/test/scala/com/wavesplatform/database/LevelDBWriterSpec.scala +++ b/node/src/test/scala/com/wavesplatform/database/RocksDBWriterSpec.scala @@ -15,23 +15,23 @@ import com.wavesplatform.transaction.TxHelpers import com.wavesplatform.transaction.TxValidationError.AliasDoesNotExist import com.wavesplatform.transaction.smart.SetScriptTransaction -class LevelDBWriterSpec extends FreeSpec with WithDomain { +class RocksDBWriterSpec extends FreeSpec with WithDomain { "Slice" - { "drops tail" in { - LevelDBWriter.slice(Seq(10, 7, 4), 7, 10) shouldEqual Seq(10, 7) + RocksDBWriter.slice(Seq(10, 7, 4), 7, 10) shouldEqual Seq(10, 7) } "drops head" in { - LevelDBWriter.slice(Seq(10, 7, 4), 4, 8) shouldEqual Seq(7, 4) + RocksDBWriter.slice(Seq(10, 7, 4), 4, 8) shouldEqual Seq(7, 4) } "includes Genesis" in { - LevelDBWriter.slice(Seq(10, 7), 5, 11) shouldEqual Seq(10, 7, 1) + RocksDBWriter.slice(Seq(10, 7), 5, 11) shouldEqual Seq(10, 7, 1) } } "Merge" - { "correctly joins height ranges" in { - LevelDBWriter.merge(Seq(15, 12, 3), Seq(12, 5)) shouldEqual Seq((15, 12), (12, 12), (3, 5)) - LevelDBWriter.merge(Seq(12, 5), Seq(15, 12, 3)) shouldEqual Seq((12, 15), (12, 12), (5, 3)) - LevelDBWriter.merge(Seq(8, 4), Seq(8, 4)) shouldEqual Seq((8, 8), (4, 4)) + RocksDBWriter.merge(Seq(15, 12, 3), Seq(12, 5)) shouldEqual Seq((15, 12), (12, 12), (3, 5)) + RocksDBWriter.merge(Seq(12, 5), Seq(15, 12, 3)) shouldEqual Seq((12, 15), (12, 12), (5, 3)) + RocksDBWriter.merge(Seq(8, 4), Seq(8, 4)) shouldEqual Seq((8, 8), (4, 4)) } } @@ -100,7 +100,7 @@ class LevelDBWriterSpec extends FreeSpec with WithDomain { Seq(AddrWithBalance(invoker.toAddress, 100.waves), AddrWithBalance(dapp.toAddress, 100.waves)) ) { d => val successfulInvoke = TxHelpers.invoke(dapp.toAddress, Some("foo"), Seq(CONST_BOOLEAN(true)), invoker = invoker) - val failedInvoke = TxHelpers.invoke(dapp.toAddress, Some("foo"), Seq(CONST_BOOLEAN(false)), invoker = invoker) + val failedInvoke = TxHelpers.invoke(dapp.toAddress, Some("foo"), Seq(CONST_BOOLEAN(false)), invoker = invoker) d.appendBlock( TxHelpers.setScript( dapp, diff --git a/node/src/test/scala/com/wavesplatform/database/TestStorageFactory.scala b/node/src/test/scala/com/wavesplatform/database/TestStorageFactory.scala index 75deb815d36..fa5d656b32f 100644 --- a/node/src/test/scala/com/wavesplatform/database/TestStorageFactory.scala +++ b/node/src/test/scala/com/wavesplatform/database/TestStorageFactory.scala @@ -1,36 +1,21 @@ package com.wavesplatform.database -import com.google.common.hash.{Funnels, BloomFilter => GBloomFilter} -import com.wavesplatform.account.Address import com.wavesplatform.events.BlockchainUpdateTriggers import com.wavesplatform.settings.WavesSettings import com.wavesplatform.state.BlockchainUpdaterImpl -import com.wavesplatform.transaction.Asset import com.wavesplatform.utils.Time -import monix.reactive.Observer -import org.iq80.leveldb.DB object TestStorageFactory { - private def wrappedFilter(use: Boolean): BloomFilter = - if (use) new Wrapper(GBloomFilter.create(Funnels.byteArrayFunnel(), 1000L)) else BloomFilter.AlwaysEmpty - def apply( settings: WavesSettings, - db: DB, + rdb: RDB, time: Time, - spendableBalanceChanged: Observer[(Address, Asset)], blockchainUpdateTriggers: BlockchainUpdateTriggers - ): (BlockchainUpdaterImpl, LevelDBWriter) = { - val useBloomFilter = settings.dbSettings.useBloomFilter - val levelDBWriter: LevelDBWriter = new LevelDBWriter(db, spendableBalanceChanged, settings.blockchainSettings, settings.dbSettings) { - override val orderFilter: BloomFilter = wrappedFilter(useBloomFilter) - override val dataKeyFilter: BloomFilter = wrappedFilter(useBloomFilter) - override val wavesBalanceFilter: BloomFilter = wrappedFilter(useBloomFilter) - override val assetBalanceFilter: BloomFilter = wrappedFilter(useBloomFilter) - } + ): (BlockchainUpdaterImpl, RocksDBWriter) = { + val rocksDBWriter: RocksDBWriter = new RocksDBWriter(rdb, settings.blockchainSettings, settings.dbSettings, 100) ( - new BlockchainUpdaterImpl(levelDBWriter, spendableBalanceChanged, settings, time, blockchainUpdateTriggers, loadActiveLeases(db, _, _)), - levelDBWriter + new BlockchainUpdaterImpl(rocksDBWriter, settings, time, blockchainUpdateTriggers, loadActiveLeases(rdb, _, _)), + rocksDBWriter ) } } diff --git a/node/src/test/scala/com/wavesplatform/db/InterferableDB.scala b/node/src/test/scala/com/wavesplatform/db/InterferableDB.scala index 04c8ea83af2..9fe7b0f8e21 100644 --- a/node/src/test/scala/com/wavesplatform/db/InterferableDB.scala +++ b/node/src/test/scala/com/wavesplatform/db/InterferableDB.scala @@ -1,44 +1,29 @@ package com.wavesplatform.db -import org.iq80.leveldb -import org.iq80.leveldb.* -import java.util.Map +import org.rocksdb.{ReadOptions, RocksDB, RocksIterator, Snapshot} + import java.util.concurrent.locks.ReentrantLock -case class InterferableDB(db: DB, startRead: ReentrantLock) extends DB { - override def get(key: Array[Byte]): Array[Byte] = { startRead.lock(); db.get(key) } - override def get(key: Array[Byte], options: ReadOptions): Array[Byte] = db.get(key, options) - override def put(key: Array[Byte], value: Array[Byte]): Unit = db.put(key, value) - override def getSnapshot: Snapshot = db.getSnapshot - override def close(): Unit = db.close() +case class InterferableDB(db: RocksDB, startRead: ReentrantLock) extends RocksDB(db.getNativeHandle) { + override def getSnapshot: Snapshot = db.getSnapshot + override def close(): Unit = db.close() - override def delete(key: Array[Byte]): Unit = ??? - override def write(updates: WriteBatch): Unit = ??? - override def createWriteBatch(): WriteBatch = ??? - override def put(key: Array[Byte], value: Array[Byte], options: WriteOptions): Snapshot = ??? - override def delete(key: Array[Byte], options: WriteOptions): Snapshot = ??? - override def write(updates: WriteBatch, options: WriteOptions): Snapshot = ??? - override def getApproximateSizes(ranges: leveldb.Range*): Array[Long] = ??? - override def getProperty(name: String): String = ??? - override def suspendCompactions(): Unit = ??? - override def resumeCompactions(): Unit = ??? - override def compactRange(begin: Array[Byte], end: Array[Byte]): Unit = ??? - override def iterator(): DBIterator = ??? + override def get(key: Array[Byte]): Array[Byte] = ??? + override def delete(key: Array[Byte]): Unit = ??? + override def getProperty(name: String): String = ??? + override def compactRange(begin: Array[Byte], end: Array[Byte]): Unit = ??? + override def newIterator(): RocksIterator = ??? - override def iterator(options: ReadOptions): DBIterator = new DBIterator { - private val iterator = db.iterator() + override def newIterator(options: ReadOptions): RocksIterator = new RocksIterator(db, db.newIterator(options).getNativeHandle) { startRead.lock() - override def next(): Map.Entry[Array[Byte], Array[Byte]] = iterator.next() - override def close(): Unit = iterator.close() - override def seek(key: Array[Byte]): Unit = iterator.seek(key) - override def hasNext: Boolean = iterator.hasNext + override def next(): Unit = super.next() + override def close(): Unit = super.close() + override def seek(key: Array[Byte]): Unit = super.seek(key) + override def isValid: Boolean = super.isValid - override def seekToFirst(): Unit = ??? - override def peekNext(): Map.Entry[Array[Byte], Array[Byte]] = ??? - override def hasPrev: Boolean = ??? - override def prev(): Map.Entry[Array[Byte], Array[Byte]] = ??? - override def peekPrev(): Map.Entry[Array[Byte], Array[Byte]] = ??? - override def seekToLast(): Unit = ??? + override def seekToFirst(): Unit = ??? + override def prev(): Unit = ??? + override def seekToLast(): Unit = ??? } } diff --git a/node/src/test/scala/com/wavesplatform/db/ScriptCacheTest.scala b/node/src/test/scala/com/wavesplatform/db/ScriptCacheTest.scala index 100655e5d2d..14a9fcbcfbb 100644 --- a/node/src/test/scala/com/wavesplatform/db/ScriptCacheTest.scala +++ b/node/src/test/scala/com/wavesplatform/db/ScriptCacheTest.scala @@ -1,6 +1,7 @@ package com.wavesplatform.db import com.typesafe.config.ConfigFactory +import com.wavesplatform.WithNewDBForEachTest import com.wavesplatform.account.KeyPair import com.wavesplatform.block.Block import com.wavesplatform.common.utils.EitherExt2 @@ -8,17 +9,16 @@ import com.wavesplatform.lagonaki.mocks.TestBlock import com.wavesplatform.lang.script.Script import com.wavesplatform.lang.v1.estimator.v2.ScriptEstimatorV2 import com.wavesplatform.settings.{TestFunctionalitySettings, WavesSettings, loadConfig} -import com.wavesplatform.state.utils.TestLevelDB -import com.wavesplatform.state.{BlockchainUpdaterImpl, _} +import com.wavesplatform.state.* +import com.wavesplatform.state.utils.TestRocksDB +import com.wavesplatform.test.FreeSpec import com.wavesplatform.transaction.smart.SetScriptTransaction import com.wavesplatform.transaction.smart.script.ScriptCompiler import com.wavesplatform.transaction.{BlockchainUpdater, GenesisTransaction} import com.wavesplatform.utils.Time -import com.wavesplatform.WithDB -import com.wavesplatform.test.FreeSpec import org.scalacheck.Gen -class ScriptCacheTest extends FreeSpec with WithDB { +class ScriptCacheTest extends FreeSpec with WithNewDBForEachTest { val CACHE_SIZE = 1 val AMOUNT = 10000000000L @@ -52,11 +52,10 @@ class ScriptCacheTest extends FreeSpec with WithDB { val setScriptTxs = (accounts zip scripts) - .map { - case (account, (script, _)) => - SetScriptTransaction - .selfSigned(1.toByte, account, Some(script), FEE, ts + accounts.length + accounts.indexOf(account) + 1) - .explicitGet() + .map { case (account, (script, _)) => + SetScriptTransaction + .selfSigned(1.toByte, account, Some(script), FEE, ts + accounts.length + accounts.indexOf(account) + 1) + .explicitGet() } val genesisBlock = TestBlock.create(genesisTxs) @@ -77,24 +76,22 @@ class ScriptCacheTest extends FreeSpec with WithDB { "return correct script after overflow" in { val scripts = mkScripts(CACHE_SIZE * 10) - withBlockchain(blockGen(scripts, _)) { - case (accounts, bc) => - val allScriptCorrect = (accounts zip scripts) - .map { - case (account, (script, _)) => - val address = account.toAddress + withBlockchain(blockGen(scripts, _)) { case (accounts, bc) => + val allScriptCorrect = (accounts zip scripts) + .map { case (account, (script, _)) => + val address = account.toAddress - val scriptFromCache = - bc.accountScript(address) - .map(_.script) - .toRight(s"No script for acc: $account") - .explicitGet() + val scriptFromCache = + bc.accountScript(address) + .map(_.script) + .toRight(s"No script for acc: $account") + .explicitGet() - scriptFromCache == script && bc.hasAccountScript(address) - } - .forall(identity) + scriptFromCache == script && bc.hasAccountScript(address) + } + .forall(identity) - allScriptCorrect shouldBe true + allScriptCorrect shouldBe true } } @@ -102,46 +99,44 @@ class ScriptCacheTest extends FreeSpec with WithDB { val scripts = mkScripts(1) val (script, complexity) = scripts.head - withBlockchain(blockGen(scripts, _)) { - case (accounts, bcu) => - val account = accounts.head - bcu.accountScript(account.toAddress) shouldEqual Some(AccountScriptInfo(account.publicKey, script, complexity)) + withBlockchain(blockGen(scripts, _)) { case (accounts, bcu) => + val account = accounts.head + bcu.accountScript(account.toAddress) shouldEqual Some(AccountScriptInfo(account.publicKey, script, complexity)) - val lastBlockHeader = bcu.lastBlockHeader.get + val lastBlockHeader = bcu.lastBlockHeader.get - val newScriptTx = SetScriptTransaction - .selfSigned(1.toByte, account, None, FEE, lastBlockHeader.header.timestamp + 1) - .explicitGet() + val newScriptTx = SetScriptTransaction + .selfSigned(1.toByte, account, None, FEE, lastBlockHeader.header.timestamp + 1) + .explicitGet() - val blockWithEmptyScriptTx = TestBlock - .create( - time = lastBlockHeader.header.timestamp + 2, - ref = lastBlockHeader.id(), - txs = Seq(newScriptTx) - ) + val blockWithEmptyScriptTx = TestBlock + .create( + time = lastBlockHeader.header.timestamp + 2, + ref = lastBlockHeader.id(), + txs = Seq(newScriptTx) + ) - bcu - .processBlock(blockWithEmptyScriptTx, blockWithEmptyScriptTx.header.generationSignature) - .explicitGet() + bcu + .processBlock(blockWithEmptyScriptTx, blockWithEmptyScriptTx.header.generationSignature) + .explicitGet() - bcu.accountScript(account.toAddress) shouldEqual None - bcu.removeAfter(lastBlockHeader.id()) - bcu.accountScript(account.toAddress).map(_.script) shouldEqual Some(script) + bcu.accountScript(account.toAddress) shouldEqual None + bcu.removeAfter(lastBlockHeader.id()) + bcu.accountScript(account.toAddress).map(_.script) shouldEqual Some(script) } } } - def withBlockchain(gen: Time => Gen[(Seq[KeyPair], Seq[Block])])(f: (Seq[KeyPair], Blockchain with BlockchainUpdater) => Unit): Unit = { + def withBlockchain(gen: Time => Gen[(Seq[KeyPair], Seq[Block])])(f: (Seq[KeyPair], Blockchain & BlockchainUpdater) => Unit): Unit = { val settings0 = WavesSettings.fromRootConfig(loadConfig(ConfigFactory.load())) val settings = settings0.copy(featuresSettings = settings0.featuresSettings.copy(autoShutdownOnUnsupportedFeature = false)) - val defaultWriter = TestLevelDB.withFunctionalitySettings( + val defaultWriter = TestRocksDB.withFunctionalitySettings( db, - ignoreSpendableBalanceChanged, TestFunctionalitySettings.Stub ) val bcu = - new BlockchainUpdaterImpl(defaultWriter, ignoreSpendableBalanceChanged, settings, ntpTime, ignoreBlockchainUpdateTriggers, (_, _) => Seq.empty) + new BlockchainUpdaterImpl(defaultWriter, settings, ntpTime, ignoreBlockchainUpdateTriggers, (_, _) => Seq.empty) try { val (accounts, blocks) = gen(ntpTime).sample.get diff --git a/node/src/test/scala/com/wavesplatform/db/WithState.scala b/node/src/test/scala/com/wavesplatform/db/WithState.scala index d9aab2bf8e9..66289d0b1df 100644 --- a/node/src/test/scala/com/wavesplatform/db/WithState.scala +++ b/node/src/test/scala/com/wavesplatform/db/WithState.scala @@ -1,9 +1,12 @@ package com.wavesplatform.db +import com.google.common.primitives.Shorts + +import java.nio.file.Files import com.wavesplatform.account.{Address, KeyPair} import com.wavesplatform.block.Block import com.wavesplatform.common.utils.EitherExt2 -import com.wavesplatform.database.{LevelDBFactory, LevelDBWriter, TestStorageFactory, loadActiveLeases} +import com.wavesplatform.database.{KeyTags, RDB, RocksDBWriter, TestStorageFactory, loadActiveLeases} import com.wavesplatform.db.WithState.AddrWithBalance import com.wavesplatform.events.BlockchainUpdateTriggers import com.wavesplatform.features.BlockchainFeatures @@ -16,68 +19,75 @@ import com.wavesplatform.mining.MiningConstraint import com.wavesplatform.settings.{TestFunctionalitySettings as TFS, *} import com.wavesplatform.state.diffs.{BlockDiffer, ENOUGH_AMT} import com.wavesplatform.state.reader.CompositeBlockchain -import com.wavesplatform.state.utils.TestLevelDB +import com.wavesplatform.state.utils.TestRocksDB import com.wavesplatform.state.{Blockchain, BlockchainUpdaterImpl, Diff, NgState, Portfolio} import com.wavesplatform.test.* import com.wavesplatform.transaction.smart.script.trace.TracedResult -import com.wavesplatform.transaction.{Asset, Transaction, TxHelpers} +import com.wavesplatform.transaction.{Transaction, TxHelpers} import com.wavesplatform.{NTPTime, TestHelpers} -import monix.reactive.Observer -import monix.reactive.subjects.{PublishSubject, Subject} -import org.iq80.leveldb.{DB, Options} -import org.scalatest.Suite +import org.rocksdb.RocksDB import org.scalatest.matchers.should.Matchers +import org.scalatest.{BeforeAndAfterAll, Suite} -import java.nio.file.Files +trait WithState extends BeforeAndAfterAll with DBCacheSettings with Matchers with NTPTime { _: Suite => + protected val ignoreBlockchainUpdateTriggers: BlockchainUpdateTriggers = BlockchainUpdateTriggers.noop -trait WithState extends DBCacheSettings with Matchers with NTPTime { _: Suite => - protected val ignoreSpendableBalanceChanged: Subject[(Address, Asset), (Address, Asset)] = PublishSubject() - protected val ignoreBlockchainUpdateTriggers: BlockchainUpdateTriggers = BlockchainUpdateTriggers.noop + private val path = Files.createTempDirectory("rocks-temp").toAbsolutePath + protected val rdb = RDB.open(dbSettings.copy(directory = path.toAbsolutePath.toString)) - private[this] val currentDbInstance = new ThreadLocal[DB] - protected def db: DB = currentDbInstance.get() + private val MaxKey = Shorts.toByteArray(KeyTags.maxId.toShort) + private val MinKey = new Array[Byte](2) - protected def tempDb[A](f: DB => A): A = { - val path = Files.createTempDirectory("lvl-temp").toAbsolutePath - val db = LevelDBFactory.factory.open(path.toFile, new Options().createIfMissing(true)) - currentDbInstance.set(db) + protected def tempDb[A](f: RDB => A): A = { + val path = Files.createTempDirectory("rocks-temp").toAbsolutePath + val rdb = RDB.open(dbSettings.copy(directory = path.toAbsolutePath.toString)) try { - f(db) + f(rdb) } finally { - db.close() - currentDbInstance.remove() + rdb.close() TestHelpers.deleteRecursively(path) } } - protected def withLevelDBWriter[A](ws: WavesSettings)(test: LevelDBWriter => A): A = tempDb { db => - val (_, ldb) = TestStorageFactory( - ws, - db, - ntpTime, - ignoreSpendableBalanceChanged, - ignoreBlockchainUpdateTriggers - ) - test(ldb) + override protected def afterAll(): Unit = { + super.afterAll() + rdb.close() + TestHelpers.deleteRecursively(path) + } + + protected def withRocksDBWriter[A](ws: WavesSettings)(test: RocksDBWriter => A): A = { + try { + val (_, rdw) = TestStorageFactory( + ws, + rdb, + ntpTime, + ignoreBlockchainUpdateTriggers + ) + test(rdw) + } finally { + Seq(rdb.db.getDefaultColumnFamily, rdb.txHandle.handle, rdb.txMetaHandle.handle).foreach { cfh => + rdb.db.deleteRange(cfh, MinKey, MaxKey) + } + } } - protected def withLevelDBWriter[A](bs: BlockchainSettings)(test: LevelDBWriter => A): A = - withLevelDBWriter(TestSettings.Default.copy(blockchainSettings = bs))(test) + protected def withRocksDBWriter[A](bs: BlockchainSettings)(test: RocksDBWriter => A): A = + withRocksDBWriter(TestSettings.Default.copy(blockchainSettings = bs))(test) - def withLevelDBWriter[A](fs: FunctionalitySettings)(test: LevelDBWriter => A): A = - withLevelDBWriter(TestLevelDB.createTestBlockchainSettings(fs))(test) + def withRocksDBWriter[A](fs: FunctionalitySettings)(test: RocksDBWriter => A): A = + withRocksDBWriter(TestRocksDB.createTestBlockchainSettings(fs))(test) - def assertDiffEi(preconditions: Seq[Block], block: Block, fs: FunctionalitySettings = TFS.Enabled)( + def assertDiffEi(preconditions: Seq[Block], block: Block, fs: FunctionalitySettings = TFS.Enabled, enableExecutionLog: Boolean = false)( assertion: Either[ValidationError, Diff] => Unit - ): Unit = withLevelDBWriter(fs) { state => - assertDiffEi(preconditions, block, state)(assertion) + ): Unit = withRocksDBWriter(fs) { state => + assertDiffEi(preconditions, block, state, enableExecutionLog)(assertion) } - def assertDiffEi(preconditions: Seq[Block], block: Block, state: LevelDBWriter)( + def assertDiffEi(preconditions: Seq[Block], block: Block, state: RocksDBWriter, enableExecutionLog: Boolean)( assertion: Either[ValidationError, Diff] => Unit ): Unit = { def differ(blockchain: Blockchain, b: Block) = - BlockDiffer.fromBlock(blockchain, None, b, MiningConstraint.Unlimited, b.header.generationSignature) + BlockDiffer.fromBlock(blockchain, None, b, MiningConstraint.Unlimited, b.header.generationSignature, enableExecutionLog = enableExecutionLog) preconditions.foreach { precondition => val BlockDiffer.Result(preconditionDiff, preconditionFees, totalFee, _, _) = differ(state, precondition).explicitGet() @@ -87,11 +97,21 @@ trait WithState extends DBCacheSettings with Matchers with NTPTime { _: Suite => assertion(totalDiff1.map(_.diff)) } - def assertDiffEiTraced(preconditions: Seq[Block], block: Block, fs: FunctionalitySettings = TFS.Enabled)( + def assertDiffEiTraced(preconditions: Seq[Block], block: Block, fs: FunctionalitySettings = TFS.Enabled, enableExecutionLog: Boolean = false)( assertion: TracedResult[ValidationError, Diff] => Unit - ): Unit = withLevelDBWriter(fs) { state => + ): Unit = withRocksDBWriter(fs) { state => def differ(blockchain: Blockchain, b: Block) = - BlockDiffer.fromBlockTraced(blockchain, None, b, MiningConstraint.Unlimited, b.header.generationSignature, verify = true) + BlockDiffer.fromBlockTraced( + blockchain, + None, + b, + MiningConstraint.Unlimited, + b.header.generationSignature, + (_, _) => (), + verify = true, + enableExecutionLog = enableExecutionLog, + txSignParCheck = true + ) preconditions.foreach { precondition => val BlockDiffer.Result(preconditionDiff, preconditionFees, totalFee, _, _) = differ(state, precondition).resultE.explicitGet() @@ -103,7 +123,7 @@ trait WithState extends DBCacheSettings with Matchers with NTPTime { _: Suite => private def assertDiffAndState(preconditions: Seq[Block], block: Block, fs: FunctionalitySettings, withNg: Boolean)( assertion: (Diff, Blockchain) => Unit - ): Unit = withLevelDBWriter(fs) { state => + ): Unit = withRocksDBWriter(fs) { state => def differ(blockchain: Blockchain, prevBlock: Option[Block], b: Block): Either[ValidationError, BlockDiffer.Result] = BlockDiffer.fromBlock(blockchain, if (withNg) prevBlock else None, b, MiningConstraint.Unlimited, b.header.generationSignature) @@ -133,7 +153,7 @@ trait WithState extends DBCacheSettings with Matchers with NTPTime { _: Suite => assertDiffAndState(preconditions, block, fs, withNg = false)(assertion) def assertDiffAndState(fs: FunctionalitySettings)(test: (Seq[Transaction] => Either[ValidationError, Unit]) => Unit): Unit = - withLevelDBWriter(fs) { state => + withRocksDBWriter(fs) { state => def differ(blockchain: Blockchain, b: Block) = BlockDiffer.fromBlock(blockchain, None, b, MiningConstraint.Unlimited, b.header.generationSignature) @@ -169,27 +189,30 @@ trait WithDomain extends WithState { _: Suite => settings: WavesSettings = DomainPresets.SettingsFromDefaultConfig.addFeatures(BlockchainFeatures.SmartAccounts), // SmartAccounts to allow V2 transfers by default balances: Seq[AddrWithBalance] = Seq.empty, - wrapDB: DB => DB = identity + wrapDB: RocksDB => RocksDB = identity )(test: Domain => A): A = - withLevelDBWriter(settings) { blockchain => + withRocksDBWriter(settings) { blockchain => var domain: Domain = null val bcu = new BlockchainUpdaterImpl( blockchain, - Observer.stopped, settings, ntpTime, BlockchainUpdateTriggers.combined(domain.triggers), - loadActiveLeases(db, _, _) + loadActiveLeases(rdb, _, _) ) - domain = Domain(wrapDB(db), bcu, blockchain, settings) - val genesis = balances.map { case AddrWithBalance(address, amount) => - TxHelpers.genesis(address, amount) - } - if (genesis.nonEmpty) { - domain.appendBlock(genesis*) - } - try test(domain) - finally bcu.shutdown() + + try { + val wrappedDb = wrapDB(rdb.db) + assert(wrappedDb.getNativeHandle == rdb.db.getNativeHandle, "wrap function should not create new database instance") + domain = Domain(new RDB(wrappedDb, rdb.txMetaHandle, rdb.txHandle, Seq.empty), bcu, blockchain, settings) + val genesis = balances.map { case AddrWithBalance(address, amount) => + TxHelpers.genesis(address, amount) + } + if (genesis.nonEmpty) { + domain.appendBlock(genesis*) + } + test(domain) + } finally bcu.shutdown() } private val allVersions = DirectiveDictionary[StdLibVersion].all diff --git a/node/src/test/scala/com/wavesplatform/history/BlockRewardSpec.scala b/node/src/test/scala/com/wavesplatform/history/BlockRewardSpec.scala index d240f02e93a..a88bc649876 100644 --- a/node/src/test/scala/com/wavesplatform/history/BlockRewardSpec.scala +++ b/node/src/test/scala/com/wavesplatform/history/BlockRewardSpec.scala @@ -262,15 +262,15 @@ class BlockRewardSpec extends FreeSpec with WithDomain { "when NG state is empty" in forAll(ngEmptyScenario) { case (miner1, miner2, b2s, b3, m3s) => withDomain(rewardSettings) { d => b2s.foldLeft[Option[Block]](None) { (prevBlock, curBlock) => - val BlockDiffer.Result(diff, carryFee, totalFee, _, _) = differ(d.levelDBWriter, prevBlock, curBlock) - d.levelDBWriter.append(diff, carryFee, totalFee, None, curBlock.header.generationSignature, curBlock) + val BlockDiffer.Result(diff, carryFee, totalFee, _, _) = differ(d.rocksDBWriter, prevBlock, curBlock) + d.rocksDBWriter.append(diff, carryFee, totalFee, None, curBlock.header.generationSignature, curBlock) Some(curBlock) } - d.levelDBWriter.height shouldBe BlockRewardActivationHeight - 1 - d.levelDBWriter.balance(miner1.toAddress) shouldBe InitialMinerBalance + OneFee - d.db.get(Keys.blockMetaAt(Height(BlockRewardActivationHeight - 1))).map(_.totalFeeInWaves) shouldBe OneTotalFee.some - d.levelDBWriter.carryFee shouldBe OneCarryFee + d.rocksDBWriter.height shouldBe BlockRewardActivationHeight - 1 + d.rocksDBWriter.balance(miner1.toAddress) shouldBe InitialMinerBalance + OneFee + d.rdb.db.get(Keys.blockMetaAt(Height(BlockRewardActivationHeight - 1))).map(_.totalFeeInWaves) shouldBe OneTotalFee.some + d.rocksDBWriter.carryFee shouldBe OneCarryFee d.blockchainUpdater.processBlock(b3) should beRight d.blockchainUpdater.balance(miner2.toAddress) shouldBe InitialMinerBalance + InitialReward + OneCarryFee diff --git a/node/src/test/scala/com/wavesplatform/history/BlockchainUpdaterBlockMicroblockSequencesSameTransactionsTest.scala b/node/src/test/scala/com/wavesplatform/history/BlockchainUpdaterBlockMicroblockSequencesSameTransactionsTest.scala index dcd417201e0..3055e9f77ea 100644 --- a/node/src/test/scala/com/wavesplatform/history/BlockchainUpdaterBlockMicroblockSequencesSameTransactionsTest.scala +++ b/node/src/test/scala/com/wavesplatform/history/BlockchainUpdaterBlockMicroblockSequencesSameTransactionsTest.scala @@ -5,35 +5,32 @@ import com.wavesplatform.block.Block import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.EitherExt2 import com.wavesplatform.history.Domain.BlockchainUpdaterExt -import com.wavesplatform.state.diffs._ -import com.wavesplatform.test._ -import com.wavesplatform.transaction._ -import com.wavesplatform.transaction.transfer._ +import com.wavesplatform.state.diffs.* +import com.wavesplatform.test.* +import com.wavesplatform.transaction.* +import com.wavesplatform.transaction.transfer.* import org.scalacheck.Gen class BlockchainUpdaterBlockMicroblockSequencesSameTransactionsTest extends PropSpec with DomainScenarioDrivenPropertyCheck { - import BlockchainUpdaterBlockMicroblockSequencesSameTransactionsTest._ + import BlockchainUpdaterBlockMicroblockSequencesSameTransactionsTest.* type Setup = (GenesisTransaction, TransferTransaction, TransferTransaction, TransferTransaction) property("resulting miner balance should not depend on tx distribution among blocks and microblocks") { - forAll(g(100, 5)) { - case (gen, rest) => - val finalMinerBalances = rest.map { - case (bmb: BlockAndMicroblockSequence, last: Block) => - withDomain(MicroblocksActivatedAt0WavesSettings) { d => - d.blockchainUpdater.processBlock(gen) should beRight - bmb.foreach { - case (b, mbs) => - d.blockchainUpdater.processBlock(b) should beRight - mbs.foreach(mb => d.blockchainUpdater.processMicroBlock(mb) should beRight) - } - d.blockchainUpdater.processBlock(last) - d.balance(last.header.generator.toAddress) - } + forAll(g(100, 5)) { case (gen, rest) => + val finalMinerBalances = rest.map { case (bmb: BlockAndMicroblockSequence, last: Block) => + withDomain(MicroblocksActivatedAt0WavesSettings) { d => + d.blockchainUpdater.processBlock(gen) should beRight + bmb.foreach { case (b, mbs) => + d.blockchainUpdater.processBlock(b) should beRight + mbs.foreach(mb => d.blockchainUpdater.processMicroBlock(mb) should beRight) + } + d.blockchainUpdater.processBlock(last) + d.balance(last.header.generator.toAddress) } - finalMinerBalances.toSet.size shouldBe 1 + } + finalMinerBalances.toSet.size shouldBe 1 } } @@ -47,18 +44,17 @@ class BlockchainUpdaterBlockMicroblockSequencesSameTransactionsTest extends Prop genesis: GenesisTransaction = GenesisTransaction.create(master.toAddress, ENOUGH_AMT, ts).explicitGet() payment: TransferTransaction = createWavesTransfer(master, master.toAddress, amt, fee, ts).explicitGet() } yield (miner, genesis, payment, ts) - scenario(preconditionsAndPayments, MicroblocksActivatedAt0WavesSettings) { - case (domain, (miner, genesis, payment, ts)) => - val genBlock = buildBlockOfTxs(randomSig, Seq(genesis)) - val (base, micros) = chainBaseAndMicro(genBlock.id(), Seq.empty, Seq(Seq(payment)), miner, 3, ts) - val emptyBlock = customBuildBlockOfTxs(micros.last.totalResBlockSig, Seq.empty, miner, 3, ts) - domain.blockchainUpdater.processBlock(genBlock) should beRight - domain.blockchainUpdater.processBlock(base) should beRight - domain.blockchainUpdater.processMicroBlock(micros.head) should beRight - domain.blockchainUpdater.processBlock(emptyBlock) should beRight - - domain.balance(miner.toAddress) shouldBe payment.fee.value - domain.balance(genesis.recipient) shouldBe (genesis.amount.value - payment.fee.value) + scenario(preconditionsAndPayments, MicroblocksActivatedAt0WavesSettings) { case (domain, (miner, genesis, payment, ts)) => + val genBlock = buildBlockOfTxs(randomSig, Seq(genesis)) + val (base, micros) = chainBaseAndMicro(genBlock.id(), Seq.empty, Seq(Seq(payment)), miner, 3, ts) + val emptyBlock = customBuildBlockOfTxs(micros.last.totalResBlockSig, Seq.empty, miner, 3, ts) + domain.blockchainUpdater.processBlock(genBlock) should beRight + domain.blockchainUpdater.processBlock(base) should beRight + domain.blockchainUpdater.processMicroBlock(micros.head) should beRight + domain.blockchainUpdater.processBlock(emptyBlock) should beRight + + domain.balance(miner.toAddress) shouldBe payment.fee.value + domain.balance(genesis.recipient) shouldBe (genesis.amount.value - payment.fee.value) } } @@ -78,17 +74,16 @@ class BlockchainUpdaterBlockMicroblockSequencesSameTransactionsTest extends Prop .grouped(microBlockCount) .toSeq } yield (miner, genesis, microBlockTxs, ts) - scenario(preconditionsAndPayments, MicroblocksActivatedAt0WavesSettings) { - case (domain, (miner, genesis, microBlockTxs, ts)) => - val genBlock = buildBlockOfTxs(randomSig, Seq(genesis)) - val (base, micros) = chainBaseAndMicro(genBlock.id(), Seq.empty, microBlockTxs, miner, 3, ts) - val emptyBlock = customBuildBlockOfTxs(micros.last.totalResBlockSig, Seq.empty, miner, 3, ts) - domain.blockchainUpdater.processBlock(genBlock) should beRight - domain.blockchainUpdater.processBlock(base) should beRight - micros.foreach(domain.blockchainUpdater.processMicroBlock(_) should beRight) - domain.blockchainUpdater.processBlock(emptyBlock) should beRight - - domain.levelDBWriter.lastBlock.get.transactionData shouldBe microBlockTxs.flatten + scenario(preconditionsAndPayments, MicroblocksActivatedAt0WavesSettings) { case (domain, (miner, genesis, microBlockTxs, ts)) => + val genBlock = buildBlockOfTxs(randomSig, Seq(genesis)) + val (base, micros) = chainBaseAndMicro(genBlock.id(), Seq.empty, microBlockTxs, miner, 3, ts) + val emptyBlock = customBuildBlockOfTxs(micros.last.totalResBlockSig, Seq.empty, miner, 3, ts) + domain.blockchainUpdater.processBlock(genBlock) should beRight + domain.blockchainUpdater.processBlock(base) should beRight + micros.foreach(domain.blockchainUpdater.processMicroBlock(_) should beRight) + domain.blockchainUpdater.processBlock(emptyBlock) should beRight + + domain.rocksDBWriter.lastBlock.get.transactionData shouldBe microBlockTxs.flatten } } @@ -181,10 +176,9 @@ object BlockchainUpdaterBlockMicroblockSequencesSameTransactionsTest { def take(txs: Seq[Transaction], sizes: BlockAndMicroblockSize): ((Seq[Transaction], Seq[Seq[Transaction]]), Seq[Transaction]) = { val (blockAmt, microsAmts) = sizes val (blockTxs, rest) = txs.splitAt(blockAmt) - val (reversedMicroblockTxs, res) = microsAmts.foldLeft((Seq.empty[Seq[Transaction]], rest)) { - case ((acc, pool), amt) => - val (step, next) = pool.splitAt(amt) - (step +: acc, next) + val (reversedMicroblockTxs, res) = microsAmts.foldLeft((Seq.empty[Seq[Transaction]], rest)) { case ((acc, pool), amt) => + val (step, next) = pool.splitAt(amt) + (step +: acc, next) } ((blockTxs, reversedMicroblockTxs.reverse), res) } @@ -215,11 +209,10 @@ object BlockchainUpdaterBlockMicroblockSequencesSameTransactionsTest { timestamp: Long ): BlockAndMicroblockSequence = { sizes - .foldLeft((Seq.empty[BlockAndMicroblocks], txs)) { - case ((acc, rest), s) => - val prev = acc.headOption.map(bestRef).getOrElse(initial) - val (step, next) = stepR(rest, s, prev, signer, version, timestamp) - (step +: acc, next) + .foldLeft((Seq.empty[BlockAndMicroblocks], txs)) { case ((acc, rest), s) => + val prev = acc.headOption.map(bestRef).getOrElse(initial) + val (step, next) = stepR(rest, s, prev, signer, version, timestamp) + (step +: acc, next) } ._1 .reverse diff --git a/node/src/test/scala/com/wavesplatform/history/BlockchainUpdaterNFTTest.scala b/node/src/test/scala/com/wavesplatform/history/BlockchainUpdaterNFTTest.scala index a83b328ed02..a3885c905df 100644 --- a/node/src/test/scala/com/wavesplatform/history/BlockchainUpdaterNFTTest.scala +++ b/node/src/test/scala/com/wavesplatform/history/BlockchainUpdaterNFTTest.scala @@ -90,7 +90,7 @@ class BlockchainUpdaterNFTTest extends PropSpec with DomainScenarioDrivenPropert d.nftList(secondAccount) shouldBe Nil val persistedNfts = Seq.newBuilder[IssuedAsset] - d.db.readOnly { ro => + d.rdb.db.readOnly { ro => val addressId = ro.get(Keys.addressId(firstAccount)).get ro.iterateOver(KeyTags.NftPossession.prefixBytes ++ addressId.toByteArray) { e => persistedNfts += IssuedAsset(ByteStr(e.getKey.takeRight(32))) diff --git a/node/src/test/scala/com/wavesplatform/history/Domain.scala b/node/src/test/scala/com/wavesplatform/history/Domain.scala index ab76c3da2ed..eacd5fddd03 100644 --- a/node/src/test/scala/com/wavesplatform/history/Domain.scala +++ b/node/src/test/scala/com/wavesplatform/history/Domain.scala @@ -9,7 +9,7 @@ import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.EitherExt2 import com.wavesplatform.consensus.nxt.NxtLikeConsensusBlockData import com.wavesplatform.consensus.{PoSCalculator, PoSSelector} -import com.wavesplatform.database.{DBExt, Keys, LevelDBWriter} +import com.wavesplatform.database.{DBExt, Keys, RDB, RocksDBWriter} import com.wavesplatform.events.BlockchainUpdateTriggers import com.wavesplatform.features.BlockchainFeatures.{BlockV5, RideV6} import com.wavesplatform.lagonaki.mocks.TestBlock @@ -26,7 +26,7 @@ import com.wavesplatform.utx.UtxPoolImpl import com.wavesplatform.wallet.Wallet import com.wavesplatform.{Application, TestValues, crypto, database} import monix.execution.Scheduler.Implicits.global -import org.iq80.leveldb.DB +import org.rocksdb.RocksDB import org.scalatest.matchers.should.Matchers.* import play.api.libs.json.{JsNull, JsValue, Json} @@ -36,7 +36,7 @@ import scala.concurrent.duration.* import scala.util.Try import scala.util.control.NonFatal -case class Domain(db: DB, blockchainUpdater: BlockchainUpdaterImpl, levelDBWriter: LevelDBWriter, settings: WavesSettings) { +case class Domain(rdb: RDB, blockchainUpdater: BlockchainUpdaterImpl, rocksDBWriter: RocksDBWriter, settings: WavesSettings) { import Domain.* val blockchain: BlockchainUpdaterImpl = blockchainUpdater @@ -49,6 +49,9 @@ case class Domain(db: DB, blockchainUpdater: BlockchainUpdaterImpl, levelDBWrite val transactionDiffer: Transaction => TracedResult[ValidationError, Diff] = TransactionDiffer(blockchain.lastBlockTimestamp, System.currentTimeMillis())(blockchain, _) + val transactionDifferWithLog: Transaction => TracedResult[ValidationError, Diff] = + TransactionDiffer(blockchain.lastBlockTimestamp, System.currentTimeMillis(), enableExecutionLog = true)(blockchain, _) + def createDiffE(tx: Transaction): Either[ValidationError, Diff] = transactionDiffer(tx).resultE def createDiff(tx: Transaction): Diff = createDiffE(tx).explicitGet() @@ -87,11 +90,11 @@ case class Domain(db: DB, blockchainUpdater: BlockchainUpdaterImpl, levelDBWrite lazy val transactions: CommonTransactionsApi = CommonTransactionsApi( blockchainUpdater.bestLiquidDiff.map(diff => Height(blockchainUpdater.height) -> diff), - db, + rdb, blockchain, utxPool, tx => Future.successful(utxPool.putIfNew(tx)), - Application.loadBlockAt(db, blockchain) + Application.loadBlockAt(rdb, blockchain) ) } @@ -117,12 +120,12 @@ case class Domain(db: DB, blockchainUpdater: BlockchainUpdaterImpl, levelDBWrite } def solidStateHeight: Int = { - db.get(Keys.height) + rdb.db.get(Keys.height) } def solidStateSnapshot(): SortedMap[String, String] = { val builder = SortedMap.newBuilder[String, String] - db.iterateOver(Array.emptyByteArray)(e => + rdb.db.iterateOver(Array.emptyByteArray, None)(e => builder.addOne(EthEncoding.toHexString(e.getKey).drop(2) -> EthEncoding.toHexString(e.getValue).drop(2)) ) builder.result() @@ -131,7 +134,7 @@ case class Domain(db: DB, blockchainUpdater: BlockchainUpdaterImpl, levelDBWrite def lastBlock: Block = { blockchainUpdater.lastBlockId .flatMap(blockchainUpdater.liquidBlock) - .orElse(levelDBWriter.lastBlock) + .orElse(rocksDBWriter.lastBlock) .getOrElse(TestBlock.create(Nil)) } @@ -157,26 +160,28 @@ case class Domain(db: DB, blockchainUpdater: BlockchainUpdaterImpl, levelDBWrite def balance(address: Address): Long = blockchainUpdater.balance(address) def balance(address: Address, asset: Asset): Long = blockchainUpdater.balance(address, asset) - def nftList(address: Address): Seq[(IssuedAsset, AssetDescription)] = db.withResource { resource => + def nftList(address: Address): Seq[(IssuedAsset, AssetDescription)] = rdb.db.withResource { resource => AddressPortfolio .nftIterator(resource, address, blockchainUpdater.bestLiquidDiff.getOrElse(Diff.empty), None, blockchainUpdater.assetDescription) .toSeq + .flatten } def addressTransactions(address: Address, from: Option[ByteStr] = None): Seq[(Height, Transaction)] = AddressTransactions .allAddressTransactions( - db, + rdb, blockchainUpdater.bestLiquidDiff.map(diff => Height(blockchainUpdater.height) -> diff), address, None, Set.empty, from ) - .map { case (m, tx) => m.height -> tx } - .toSeq + .map { case (m, tx, _) => m.height -> tx } + .toListL + .runSyncUnsafe() - def portfolio(address: Address): Seq[(IssuedAsset, Long)] = Domain.portfolio(address, db, blockchainUpdater) + def portfolio(address: Address): Seq[(IssuedAsset, Long)] = Domain.portfolio(address, rdb.db, blockchainUpdater) def appendAndAssertSucceed(txs: Transaction*): Block = { val block = createBlock(Block.PlainBlockVersion, txs) @@ -228,7 +233,7 @@ case class Domain(db: DB, blockchainUpdater: BlockchainUpdaterImpl, levelDBWrite } def appendBlock(txs: Transaction*): Block = - appendBlock(Block.PlainBlockVersion, txs: _*) + appendBlock(Block.PlainBlockVersion, txs*) def appendKeyBlock(ref: Option[ByteStr] = None): Block = { val block = createBlock(Block.NgBlockVersion, Nil, ref.orElse(Some(lastBlockId))) @@ -260,7 +265,7 @@ case class Domain(db: DB, blockchainUpdater: BlockchainUpdaterImpl, levelDBWrite } def appendMicroBlock(txs: Transaction*): BlockId = { - val mb = createMicroBlock(txs: _*) + val mb = createMicroBlock(txs*) blockchainUpdater.processMicroBlock(mb).explicitGet() } @@ -338,19 +343,21 @@ case class Domain(db: DB, blockchainUpdater: BlockchainUpdaterImpl, levelDBWrite } val blocksApi: CommonBlocksApi = { - def loadBlockMetaAt(db: DB, blockchainUpdater: BlockchainUpdaterImpl)(height: Int): Option[BlockMeta] = - blockchainUpdater.liquidBlockMeta.filter(_ => blockchainUpdater.height == height).orElse(db.get(Keys.blockMetaAt(Height(height)))) + def loadBlockMetaAt(db: RocksDB, blockchainUpdater: BlockchainUpdaterImpl)(height: Int): Option[BlockMeta] = + blockchainUpdater.liquidBlockMeta + .filter(_ => blockchainUpdater.height == height) + .orElse(db.get(Keys.blockMetaAt(Height(height))).flatMap(BlockMeta.fromPb)) - def loadBlockInfoAt(db: DB, blockchainUpdater: BlockchainUpdaterImpl)( + def loadBlockInfoAt(db: RocksDB, blockchainUpdater: BlockchainUpdaterImpl)( height: Int ): Option[(BlockMeta, Seq[(TxMeta, Transaction)])] = loadBlockMetaAt(db, blockchainUpdater)(height).map { meta => meta -> blockchainUpdater .liquidTransactions(meta.id) - .getOrElse(db.readOnly(ro => database.loadTransactions(Height(height), ro))) + .getOrElse(database.loadTransactions(Height(height), rdb)) } - CommonBlocksApi(blockchainUpdater, loadBlockMetaAt(db, blockchainUpdater), loadBlockInfoAt(db, blockchainUpdater)) + CommonBlocksApi(blockchainUpdater, loadBlockMetaAt(rdb.db, blockchainUpdater), loadBlockInfoAt(rdb.db, blockchainUpdater)) } // noinspection ScalaStyle @@ -393,7 +400,7 @@ case class Domain(db: DB, blockchainUpdater: BlockchainUpdaterImpl, levelDBWrite val transactionsApi: CommonTransactionsApi = CommonTransactionsApi( blockchainUpdater.bestLiquidDiff.map(Height(blockchainUpdater.height) -> _), - db, + rdb, blockchain, utxPool, _ => Future.successful(TracedResult(Right(true))), @@ -401,20 +408,20 @@ case class Domain(db: DB, blockchainUpdater: BlockchainUpdaterImpl, levelDBWrite ) val accountsApi: CommonAccountsApi = CommonAccountsApi( - () => blockchainUpdater.bestLiquidDiff.getOrElse(Diff.empty), - db, + () => blockchainUpdater.getCompositeBlockchain, + rdb, blockchain ) val assetsApi: CommonAssetsApi = CommonAssetsApi( () => blockchainUpdater.bestLiquidDiff.getOrElse(Diff.empty), - db, + rdb.db, blockchain ) } object Domain { - implicit class BlockchainUpdaterExt[A <: BlockchainUpdater with Blockchain](bcu: A) { + implicit class BlockchainUpdaterExt[A <: BlockchainUpdater & Blockchain](bcu: A) { def processBlock(block: Block): Either[ValidationError, Seq[Diff]] = { val hitSource = if (bcu.height == 0 || !bcu.activatedFeaturesAt(bcu.height + 1).contains(BlockV5.id)) @@ -427,7 +434,7 @@ object Domain { } } - def portfolio(address: Address, db: DB, blockchainUpdater: BlockchainUpdaterImpl): Seq[(IssuedAsset, Long)] = db.withResource { resource => + def portfolio(address: Address, db: RocksDB, blockchainUpdater: BlockchainUpdaterImpl): Seq[(IssuedAsset, Long)] = db.withResource { resource => AddressPortfolio .assetBalanceIterator( resource, @@ -436,5 +443,6 @@ object Domain { id => blockchainUpdater.assetDescription(id).exists(!_.nft) ) .toSeq + .flatten } } diff --git a/node/src/test/scala/com/wavesplatform/history/LeasingExpirySpec.scala b/node/src/test/scala/com/wavesplatform/history/LeasingExpirySpec.scala index afd844bcdb6..514622cf529 100644 --- a/node/src/test/scala/com/wavesplatform/history/LeasingExpirySpec.scala +++ b/node/src/test/scala/com/wavesplatform/history/LeasingExpirySpec.scala @@ -10,7 +10,7 @@ import com.wavesplatform.history.Domain.BlockchainUpdaterExt import com.wavesplatform.lagonaki.mocks.TestBlock import com.wavesplatform.settings.{Constants, FunctionalitySettings} import com.wavesplatform.state.{Blockchain, LeaseBalance} -import com.wavesplatform.test._ +import com.wavesplatform.test.* import com.wavesplatform.transaction.GenesisTransaction import com.wavesplatform.transaction.lease.LeaseTransaction import org.scalacheck.Gen @@ -22,10 +22,16 @@ class LeasingExpirySpec extends FreeSpec with WithDomain { private val leasingSettings = settings.copy( blockchainSettings = DefaultBlockchainSettings.copy( - functionalitySettings = FunctionalitySettings(featureCheckBlocksPeriod = 100, blocksForFeatureActivation = 80, preActivatedFeatures = Map( + functionalitySettings = FunctionalitySettings( + featureCheckBlocksPeriod = 100, + blocksForFeatureActivation = 80, + preActivatedFeatures = Map( BlockchainFeatures.SmartAccounts.id -> 0, BlockchainFeatures.LeaseExpiration.id -> LeasingExpiryActivationHeight - ), doubleFeaturesPeriodsAfterHeight = Int.MaxValue, leaseExpiration = LeasingValidity) + ), + doubleFeaturesPeriodsAfterHeight = Int.MaxValue, + leaseExpiration = LeasingValidity + ) ) ) @@ -82,8 +88,8 @@ class LeasingExpirySpec extends FreeSpec with WithDomain { private def leaseRecipients(blocks: Seq[Block]): Set[AddressOrAlias] = blocks .flatMap(_.transactionData) - .collect { - case lt: LeaseTransaction => lt.recipient + .collect { case lt: LeaseTransaction => + lt.recipient } .toSet @@ -96,31 +102,30 @@ class LeasingExpirySpec extends FreeSpec with WithDomain { } yield (lessor, alias, genesisBlock, b2, Seq(b3, b4, b5)) "Upon feature activation" - { - "expired leases are cancelled" in forAll(simpleScenario) { - case (lessor, alias, genesis, b, emptyBlocks) => - withDomain(leasingSettings) { d => - d.blockchainUpdater.processBlock(genesis) should beRight - ensureNoLeases(d.blockchainUpdater, Set(lessor.toAddress, alias)) - d.blockchainUpdater.processBlock(b) should beRight - val leasesToBeCancelled = b.transactionData.collect { case lt: LeaseTransaction => lt } - leasesToBeCancelled.foreach { - case lt: LeaseTransaction => d.blockchainUpdater.leaseDetails(lt.id()).map(_.isActive) shouldBe Some(true) - case _ => - } - emptyBlocks.take(2).foreach(b => d.blockchainUpdater.processBlock(b) should beRight) - // activation height: leases should still be active - d.blockchainUpdater.height shouldEqual LeasingExpiryActivationHeight - // balance snapshots, however, already reflect cancelled leases - for (a <- leasesToBeCancelled.map(lt => d.blockchainUpdater.resolveAlias(lt.recipient).explicitGet())) { - d.blockchainUpdater.balanceSnapshots(a, 1, d.blockchainUpdater.lastBlockId).last.leaseIn shouldBe 0L - } - // once new block is appended, leases become cancelled - d.blockchainUpdater.processBlock(emptyBlocks.last) - leasesToBeCancelled.foreach { - case lt: LeaseTransaction => d.blockchainUpdater.leaseDetails(lt.id()).map(_.isActive) shouldBe Some(false) - case _ => - } + "expired leases are cancelled" in forAll(simpleScenario) { case (lessor, alias, genesis, b, emptyBlocks) => + withDomain(leasingSettings) { d => + d.blockchainUpdater.processBlock(genesis) should beRight + ensureNoLeases(d.blockchainUpdater, Set(lessor.toAddress, alias)) + d.blockchainUpdater.processBlock(b) should beRight + val leasesToBeCancelled = b.transactionData.collect { case lt: LeaseTransaction => lt } + leasesToBeCancelled.foreach { + case lt: LeaseTransaction => d.blockchainUpdater.leaseDetails(lt.id()).map(_.isActive) shouldBe Some(true) + case _ => + } + emptyBlocks.take(2).foreach(b => d.blockchainUpdater.processBlock(b) should beRight) + // activation height: leases should still be active + d.blockchainUpdater.height shouldEqual LeasingExpiryActivationHeight + // balance snapshots, however, already reflect cancelled leases + for (a <- leasesToBeCancelled.map(lt => d.blockchainUpdater.resolveAlias(lt.recipient).explicitGet())) { + d.blockchainUpdater.balanceSnapshots(a, 1, d.blockchainUpdater.lastBlockId).last.leaseIn shouldBe 0L } + // once new block is appended, leases become cancelled + d.blockchainUpdater.processBlock(emptyBlocks.last) + leasesToBeCancelled.foreach { + case lt: LeaseTransaction => d.blockchainUpdater.leaseDetails(lt.id()).map(_.isActive) shouldBe Some(false) + case _ => + } + } } } @@ -184,31 +189,30 @@ class LeasingExpirySpec extends FreeSpec with WithDomain { b7 = mkEmptyBlock(b6.id()) } yield (alias, Seq(genesisBlock, b2, b3, b4, b5, b6, b7)) - "should be applied only for expired leases" ignore forAll(manyLeases) { - case (alias, blocks) => - withDomain(leasingSettings) { d => - import d.blockchainUpdater + "should be applied only for expired leases" ignore forAll(manyLeases) { case (alias, blocks) => + withDomain(leasingSettings) { d => + import d.blockchainUpdater - // blocks before activation - blocks.slice(0, 3).foreach(b => blockchainUpdater.processBlock(b) should beRight) - ensureEffectiveBalance(blockchainUpdater, alias, 0L) + // blocks before activation + blocks.slice(0, 3).foreach(b => blockchainUpdater.processBlock(b) should beRight) + ensureEffectiveBalance(blockchainUpdater, alias, 0L) - // block at activation height with lease - blockchainUpdater.processBlock(blocks(3)) should beRight - ensureEffectiveBalance(blockchainUpdater, alias, amount) + // block at activation height with lease + blockchainUpdater.processBlock(blocks(3)) should beRight + ensureEffectiveBalance(blockchainUpdater, alias, amount) - // block after activation and before cancellation, including new lease - blockchainUpdater.processBlock(blocks(4)) - ensureEffectiveBalance(blockchainUpdater, alias, amount + halfAmount) + // block after activation and before cancellation, including new lease + blockchainUpdater.processBlock(blocks(4)) + ensureEffectiveBalance(blockchainUpdater, alias, amount + halfAmount) - // block at height of first lease cancellation, effective balance reflects it - blockchainUpdater.processBlock(blocks(5)) - ensureEffectiveBalance(blockchainUpdater, alias, halfAmount) + // block at height of first lease cancellation, effective balance reflects it + blockchainUpdater.processBlock(blocks(5)) + ensureEffectiveBalance(blockchainUpdater, alias, halfAmount) - // block at height of second lease cancellation, effective balance reflects it - blockchainUpdater.processBlock(blocks(6)) - ensureEffectiveBalance(blockchainUpdater, alias, 0L) - } + // block at height of second lease cancellation, effective balance reflects it + blockchainUpdater.processBlock(blocks(6)) + ensureEffectiveBalance(blockchainUpdater, alias, 0L) + } } } @@ -266,29 +270,28 @@ class LeasingExpirySpec extends FreeSpec with WithDomain { b6 = mkEmptyBlock(b5.id()) } yield (miner, Seq(genesisBlock, b2, b3, b4, b5, b6)) - "can generate block where lease is cancelled" ignore forAll(blockWhereLeaseCancelled) { - case (miner, blocks) => - withDomain(leasingSettings) { d => - import d.blockchainUpdater + "can generate block where lease is cancelled" ignore forAll(blockWhereLeaseCancelled) { case (miner, blocks) => + withDomain(leasingSettings) { d => + import d.blockchainUpdater - // blocks before activation - blocks.slice(0, 3).foreach(b => blockchainUpdater.processBlock(b) should beRight) - ensureEffectiveBalance(blockchainUpdater, miner, 0L) + // blocks before activation + blocks.slice(0, 3).foreach(b => blockchainUpdater.processBlock(b) should beRight) + ensureEffectiveBalance(blockchainUpdater, miner, 0L) - // effective balance reflects new leases - blockchainUpdater.processBlock(blocks(3)) should beRight - ensureEffectiveBalance(blockchainUpdater, miner, amount) + // effective balance reflects new leases + blockchainUpdater.processBlock(blocks(3)) should beRight + ensureEffectiveBalance(blockchainUpdater, miner, amount) - // blocks after activation and before cancellation - blockchainUpdater.processBlock(blocks(4)) should beRight + // blocks after activation and before cancellation + blockchainUpdater.processBlock(blocks(4)) should beRight - // miner allowed to generate block at cancellation height - ensureEffectiveBalance(blockchainUpdater, miner, amount) - blockchainUpdater.processBlock(blocks(5)) should beRight + // miner allowed to generate block at cancellation height + ensureEffectiveBalance(blockchainUpdater, miner, amount) + blockchainUpdater.processBlock(blocks(5)) should beRight - // miner not allowed to generate block after cancellation - ensureEffectiveBalance(blockchainUpdater, miner, 0L) - } + // miner not allowed to generate block after cancellation + ensureEffectiveBalance(blockchainUpdater, miner, 0L) + } } } } diff --git a/node/src/test/scala/com/wavesplatform/http/AddressRouteSpec.scala b/node/src/test/scala/com/wavesplatform/http/AddressRouteSpec.scala index 57c46bb9196..affc497633d 100644 --- a/node/src/test/scala/com/wavesplatform/http/AddressRouteSpec.scala +++ b/node/src/test/scala/com/wavesplatform/http/AddressRouteSpec.scala @@ -80,7 +80,10 @@ class AddressRouteSpec extends RouteSpec("/addresses") with PathMockFactory with routePath("/balance/{address}/{confirmations}") in withDomain(balances = Seq(AddrWithBalance(TxHelpers.defaultAddress))) { d => val route = addressApiRoute - .copy(blockchain = d.blockchainUpdater, commonAccountsApi = CommonAccountsApi(() => d.liquidDiff, d.db, d.blockchainUpdater)) + .copy( + blockchain = d.blockchainUpdater, + commonAccountsApi = CommonAccountsApi(() => d.blockchainUpdater.getCompositeBlockchain, d.rdb, d.blockchainUpdater) + ) .route val address = TxHelpers.signer(1).toAddress @@ -95,6 +98,44 @@ class AddressRouteSpec extends RouteSpec("/addresses") with PathMockFactory with } } + routePath("/balance") in withDomain(balances = Seq(AddrWithBalance(TxHelpers.defaultAddress))) { d => + val route = + addressApiRoute + .copy( + blockchain = d.blockchainUpdater, + commonAccountsApi = CommonAccountsApi(() => d.blockchainUpdater.getCompositeBlockchain, d.rdb, d.blockchainUpdater) + ) + .route + val address = TxHelpers.signer(1).toAddress + val transferCount = 5 + + val issue = TxHelpers.issue(TxHelpers.defaultSigner) + d.appendBlock(issue) + + for (_ <- 1 until transferCount) + d.appendBlock( + TxHelpers.transfer(TxHelpers.defaultSigner, address, amount = 1), + TxHelpers.transfer(TxHelpers.defaultSigner, address, asset = issue.asset, amount = 2) + ) + + Get(routePath(s"/balance?address=$address&height=$transferCount")) ~> route ~> check { + responseAs[JsValue] shouldBe Json.arr(Json.obj("id" -> address.toString, "balance" -> (transferCount - 2))) + } + Post(routePath(s"/balance"), Json.obj("height" -> transferCount, "addresses" -> Seq(address.toString))) ~> route ~> check { + responseAs[JsValue] shouldBe Json.arr(Json.obj("id" -> address.toString, "balance" -> (transferCount - 2))) + } + + Get(routePath(s"/balance?address=$address&height=$transferCount&asset=${issue.assetId}")) ~> route ~> check { + responseAs[JsValue] shouldBe Json.arr(Json.obj("id" -> address.toString, "balance" -> 2 * (transferCount - 2))) + } + Post( + routePath(s"/balance"), + Json.obj("height" -> transferCount, "addresses" -> Seq(address.toString), "asset" -> issue.assetId) + ) ~> route ~> check { + responseAs[JsValue] shouldBe Json.arr(Json.obj("id" -> address.toString, "balance" -> 2 * (transferCount - 2))) + } + } + routePath("/seq/{from}/{to}") in { val r1 = Get(routePath("/seq/1/4")) ~> route ~> check { val response = responseAs[Seq[String]] @@ -403,7 +444,10 @@ class AddressRouteSpec extends RouteSpec("/addresses") with PathMockFactory with val route = addressApiRoute - .copy(blockchain = d.blockchainUpdater, commonAccountsApi = CommonAccountsApi(() => d.liquidDiff, d.db, d.blockchainUpdater)) + .copy( + blockchain = d.blockchainUpdater, + commonAccountsApi = CommonAccountsApi(() => d.blockchainUpdater.getCompositeBlockchain, d.rdb, d.blockchainUpdater) + ) .route val requestBody = Json.obj("keys" -> Seq("test")) @@ -451,7 +495,10 @@ class AddressRouteSpec extends RouteSpec("/addresses") with PathMockFactory with val route = addressApiRoute - .copy(blockchain = d.blockchainUpdater, commonAccountsApi = CommonAccountsApi(() => d.liquidDiff, d.db, d.blockchainUpdater)) + .copy( + blockchain = d.blockchainUpdater, + commonAccountsApi = CommonAccountsApi(() => d.blockchainUpdater.getCompositeBlockchain, d.rdb, d.blockchainUpdater) + ) .route val maxLimitKeys = Seq.fill(addressApiRoute.settings.dataKeysRequestLimit)(key) diff --git a/node/src/test/scala/com/wavesplatform/http/AssetsBroadcastRouteSpec.scala b/node/src/test/scala/com/wavesplatform/http/AssetsBroadcastRouteSpec.scala index f22f80cdd4e..92378fff96d 100644 --- a/node/src/test/scala/com/wavesplatform/http/AssetsBroadcastRouteSpec.scala +++ b/node/src/test/scala/com/wavesplatform/http/AssetsBroadcastRouteSpec.scala @@ -11,6 +11,7 @@ import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.{Base58, EitherExt2} import com.wavesplatform.state.Blockchain import com.wavesplatform.state.diffs.TransactionDiffer.TransactionValidationError +import com.wavesplatform.state.reader.CompositeBlockchain import com.wavesplatform.test.* import com.wavesplatform.transaction.TxValidationError.GenericError import com.wavesplatform.transaction.assets.IssueTransaction @@ -31,6 +32,7 @@ class AssetsBroadcastRouteSpec extends RouteSpec("/assets/broadcast/") with Requ stub[Wallet], DummyTransactionPublisher.rejecting(tx => TransactionValidationError(GenericError("foo"), tx)), stub[Blockchain], + stub[() => CompositeBlockchain], stub[Time], stub[CommonAccountsApi], stub[CommonAssetsApi], @@ -176,6 +178,7 @@ class AssetsBroadcastRouteSpec extends RouteSpec("/assets/broadcast/") with Requ stub[Wallet], DummyTransactionPublisher.accepting, stub[Blockchain], + stub[() => CompositeBlockchain], stub[Time], stub[CommonAccountsApi], stub[CommonAssetsApi], diff --git a/node/src/test/scala/com/wavesplatform/http/AssetsRouteSpec.scala b/node/src/test/scala/com/wavesplatform/http/AssetsRouteSpec.scala index edc1da8154c..1004b002873 100644 --- a/node/src/test/scala/com/wavesplatform/http/AssetsRouteSpec.scala +++ b/node/src/test/scala/com/wavesplatform/http/AssetsRouteSpec.scala @@ -29,7 +29,7 @@ import com.wavesplatform.transaction.Asset.IssuedAsset import com.wavesplatform.transaction.TxHelpers.* import com.wavesplatform.transaction.assets.IssueTransaction import com.wavesplatform.transaction.smart.SetScriptTransaction -import com.wavesplatform.transaction.transfer.* +import com.wavesplatform.transaction.transfer.{MassTransferTransaction, TransferTransaction} import com.wavesplatform.transaction.utils.EthTxGenerator import com.wavesplatform.transaction.utils.EthTxGenerator.Arg import com.wavesplatform.transaction.{AssetIdLength, GenesisTransaction, Transaction, TxHelpers, TxNonNegativeAmount, TxVersion} @@ -56,6 +56,7 @@ class AssetsRouteSpec extends RouteSpec("/assets") with Eventually with RestAPIS testWallet, DummyTransactionPublisher.accepting, d.blockchain, + () => d.blockchain.getCompositeBlockchain, TestTime(), d.accountsApi, d.assetsApi, @@ -592,7 +593,7 @@ class AssetsRouteSpec extends RouteSpec("/assets") with Eventually with RestAPIS val tx2 = issue(secondSigner, 1, name = s"NFT$i", reissuable = false) (i, Seq(tx1, tx2)) } - d.appendBlock(txs.flatMap(_._2): _*) + d.appendBlock(txs.flatMap(_._2)*) txs.map(_._1) } Seq(defaultAddress, secondAddress).foreach { address => diff --git a/node/src/test/scala/com/wavesplatform/http/DebugApiRouteSpec.scala b/node/src/test/scala/com/wavesplatform/http/DebugApiRouteSpec.scala index 911a73025b7..e824fbe0bac 100644 --- a/node/src/test/scala/com/wavesplatform/http/DebugApiRouteSpec.scala +++ b/node/src/test/scala/com/wavesplatform/http/DebugApiRouteSpec.scala @@ -128,6 +128,47 @@ class DebugApiRouteSpec } } + routePath("/balances/history/{address}") - { + val acc1 = TxHelpers.defaultSigner + val acc2 = TxHelpers.secondSigner + + val initBalance = 5.waves + + "works" in withDomain(balances = Seq(AddrWithBalance(acc2.toAddress, initBalance), AddrWithBalance(acc1.toAddress))) { d => + val tx1 = TxHelpers.transfer(acc2, acc1.toAddress, 1.waves) + val tx2 = TxHelpers.transfer(acc1, acc2.toAddress, 3.waves) + val tx3 = TxHelpers.transfer(acc2, acc1.toAddress, 4.waves) + val tx4 = TxHelpers.transfer(acc1, acc2.toAddress, 5.waves) + + d.appendBlock(tx1) + d.appendBlock(tx2) + d.appendBlock() + d.appendBlock(tx3) + d.appendBlock(tx4) + d.appendBlock() + + val expectedBalance2 = initBalance - tx1.fee.value - tx1.amount.value + val expectedBalance3 = expectedBalance2 + tx2.amount.value + val expectedBalance5 = expectedBalance3 - tx3.fee.value - tx3.amount.value + val expectedBalance6 = expectedBalance5 + tx4.amount.value + + Get(routePath(s"/balances/history/${acc2.toAddress}")) ~> routeWithBlockchain(d) ~> check { + status shouldBe StatusCodes.OK + responseAs[JsArray] shouldBe Json.toJson( + Seq( + 6 -> expectedBalance6, + 5 -> expectedBalance5, + 3 -> expectedBalance3, + 2 -> expectedBalance2, + 1 -> initBalance + ).map { case (height, balance) => + Json.obj("height" -> height, "balance" -> balance) + } + ) + } + } + } + routePath("/stateHash") - { "works" - { val settingsWithStateHashes = DomainPresets.SettingsFromDefaultConfig.copy( @@ -152,7 +193,7 @@ class DebugApiRouteSpec d.appendBlock(blockAt2) d.appendBlock(TestBlock.create(0, blockAt2.id(), Nil)) - val stateHashAt2 = d.levelDBWriter.loadStateHash(2).value + val stateHashAt2 = d.rocksDBWriter.loadStateHash(2).value Get(routePath(s"/stateHash/$suffix")) ~> routeWithBlockchain(d) ~> check { status shouldBe StatusCodes.OK responseAs[JsObject] shouldBe (Json.toJson(stateHashAt2).as[JsObject] ++ Json.obj( @@ -184,6 +225,7 @@ class DebugApiRouteSpec "valid tx" in { val blockchain = createBlockchainStub() (blockchain.balance _).when(TxHelpers.defaultSigner.publicKey.toAddress, *).returns(Long.MaxValue) + (blockchain.wavesBalances _).when(Seq(TxHelpers.defaultAddress)).returns(Map(TxHelpers.defaultAddress -> Long.MaxValue)) val route = routeWithBlockchain(blockchain) @@ -198,6 +240,7 @@ class DebugApiRouteSpec "invalid tx" in { val blockchain = createBlockchainStub() (blockchain.balance _).when(TxHelpers.defaultSigner.publicKey.toAddress, *).returns(0) + (blockchain.wavesBalances _).when(Seq(TxHelpers.defaultAddress)).returns(Map(TxHelpers.defaultAddress -> 0)) val route = routeWithBlockchain(blockchain) @@ -224,6 +267,7 @@ class DebugApiRouteSpec "exchange tx with fail script" in { val blockchain = createBlockchainStub { blockchain => (blockchain.balance _).when(TxHelpers.defaultAddress, *).returns(Long.MaxValue) + (blockchain.wavesBalances _).when(Seq(TxHelpers.defaultAddress)).returns(Map(TxHelpers.defaultAddress -> Long.MaxValue)) val (assetScript, comp) = ScriptCompiler.compile("if true then throw(\"error\") else false", ScriptEstimatorV3(fixOverflow = true, overhead = true)).explicitGet() @@ -268,6 +312,7 @@ class DebugApiRouteSpec "invoke tx with asset failing" in { val blockchain = createBlockchainStub { blockchain => (blockchain.balance _).when(*, *).returns(Long.MaxValue / 2) + (blockchain.wavesBalances _).when(*).returns(Map(TxHelpers.defaultAddress -> Long.MaxValue / 2)) val (assetScript, assetScriptComplexity) = ScriptCompiler .compile( @@ -1590,6 +1635,7 @@ class DebugApiRouteSpec val blockchain = createBlockchainStub { blockchain => (blockchain.balance _).when(*, *).returns(Long.MaxValue) + (blockchain.wavesBalances _).when(*).returns(Map(TxHelpers.defaultAddress -> Long.MaxValue)) (blockchain.resolveAlias _).when(Alias.create(recipient2.name).explicitGet()).returning(Right(TxHelpers.secondAddress)) @@ -2738,6 +2784,7 @@ class DebugApiRouteSpec val blockchain = createBlockchainStub { blockchain => (blockchain.balance _).when(*, *).returns(Long.MaxValue) + (blockchain.wavesBalances _).when(*).returns(Map(TxHelpers.defaultAddress -> Long.MaxValue)) blockchain.stub.activateAllFeatures() val (dAppScript, _) = ScriptCompiler @@ -3167,6 +3214,7 @@ class DebugApiRouteSpec "transfer transaction with asset fail" in { val blockchain = createBlockchainStub { blockchain => (blockchain.balance _).when(*, *).returns(Long.MaxValue / 2) + (blockchain.wavesBalances _).when(*).returns(Map(TxHelpers.defaultAddress -> Long.MaxValue / 2)) val (assetScript, assetScriptComplexity) = ScriptCompiler.compile("false", ScriptEstimatorV3(fixOverflow = true, overhead = true)).explicitGet() @@ -3231,6 +3279,9 @@ class DebugApiRouteSpec (() => blockchain.settings).when().returns(WavesSettings.default().blockchainSettings.copy(functionalitySettings = settings)) (() => blockchain.activatedFeatures).when().returns(settings.preActivatedFeatures) (blockchain.balance _).when(*, *).returns(ENOUGH_AMT) + (blockchain.wavesBalances _) + .when(*) + .returns(Map(TxHelpers.defaultAddress -> ENOUGH_AMT, TxHelpers.secondAddress -> ENOUGH_AMT, TxHelpers.address(3) -> ENOUGH_AMT)) val script = ExprScript(TRUE).explicitGet() def info(complexity: Int) = Some(AccountScriptInfo(TxHelpers.secondSigner.publicKey, script, complexity)) @@ -3471,7 +3522,8 @@ class DebugApiRouteSpec .copy( blockchain = d.blockchain, priorityPoolBlockchain = () => d.blockchain, - loadStateHash = d.levelDBWriter.loadStateHash + loadBalanceHistory = d.rocksDBWriter.loadBalanceHistory, + loadStateHash = d.rocksDBWriter.loadStateHash ) .route diff --git a/node/src/test/scala/com/wavesplatform/http/LeaseRouteSpec.scala b/node/src/test/scala/com/wavesplatform/http/LeaseRouteSpec.scala index 945cc66b966..a212b873e89 100644 --- a/node/src/test/scala/com/wavesplatform/http/LeaseRouteSpec.scala +++ b/node/src/test/scala/com/wavesplatform/http/LeaseRouteSpec.scala @@ -19,7 +19,7 @@ import com.wavesplatform.network.TransactionPublisher import com.wavesplatform.settings.WavesSettings import com.wavesplatform.state.diffs.ENOUGH_AMT import com.wavesplatform.state.reader.LeaseDetails -import com.wavesplatform.state.{BinaryDataEntry, Blockchain, Diff, Height, TxMeta} +import com.wavesplatform.state.{BinaryDataEntry, Blockchain, Height, TxMeta} import com.wavesplatform.test.* import com.wavesplatform.test.DomainPresets.* import com.wavesplatform.transaction.TxHelpers.{defaultSigner, secondSigner, signer} @@ -55,7 +55,7 @@ class LeaseRouteSpec domain.blockchain, (_, _) => Future.successful(TracedResult(Right(true))), ntpTime, - CommonAccountsApi(() => domain.blockchainUpdater.bestLiquidDiff.getOrElse(Diff.empty), domain.db, domain.blockchain), + CommonAccountsApi(() => domain.blockchainUpdater.getCompositeBlockchain, domain.rdb, domain.blockchain), new RouteTimeout(60.seconds)(Schedulers.fixedPool(1, "heavy-request-scheduler")) ) diff --git a/node/src/test/scala/com/wavesplatform/http/ProtoVersionTransactionsSpec.scala b/node/src/test/scala/com/wavesplatform/http/ProtoVersionTransactionsSpec.scala index 2c45e189413..6031d7ff614 100644 --- a/node/src/test/scala/com/wavesplatform/http/ProtoVersionTransactionsSpec.scala +++ b/node/src/test/scala/com/wavesplatform/http/ProtoVersionTransactionsSpec.scala @@ -14,6 +14,7 @@ import com.wavesplatform.protobuf.transaction.{PBSignedTransaction, PBTransactio import com.wavesplatform.protobuf.utils.PBUtils import com.wavesplatform.settings.Constants import com.wavesplatform.state.Blockchain +import com.wavesplatform.state.reader.CompositeBlockchain import com.wavesplatform.transaction.Asset.IssuedAsset import com.wavesplatform.transaction.assets.* import com.wavesplatform.transaction.assets.exchange.{ExchangeTransaction, Order} @@ -63,6 +64,7 @@ class ProtoVersionTransactionsSpec extends RouteSpec("/transactions") with RestA transactionsApi, testWallet, blockchain, + mock[() => CompositeBlockchain], () => utx.size, DummyTransactionPublisher.accepting, ntpTime, diff --git a/node/src/test/scala/com/wavesplatform/http/SpentComplexitySpec.scala b/node/src/test/scala/com/wavesplatform/http/SpentComplexitySpec.scala index e963925b250..e62a51f8226 100644 --- a/node/src/test/scala/com/wavesplatform/http/SpentComplexitySpec.scala +++ b/node/src/test/scala/com/wavesplatform/http/SpentComplexitySpec.scala @@ -72,6 +72,7 @@ class SpentComplexitySpec d.transactionsApi, testWallet, d.blockchain, + () => d.blockchain.getCompositeBlockchain, () => 0, DummyTransactionPublisher.accepting, ntpTime, diff --git a/node/src/test/scala/com/wavesplatform/http/TransactionBroadcastSpec.scala b/node/src/test/scala/com/wavesplatform/http/TransactionBroadcastSpec.scala index b726301624b..1c66e2b779e 100644 --- a/node/src/test/scala/com/wavesplatform/http/TransactionBroadcastSpec.scala +++ b/node/src/test/scala/com/wavesplatform/http/TransactionBroadcastSpec.scala @@ -9,6 +9,7 @@ import com.wavesplatform.common.utils.* import com.wavesplatform.lang.v1.estimator.v3.ScriptEstimatorV3 import com.wavesplatform.lang.v1.traits.domain.{Lease, Recipient} import com.wavesplatform.network.TransactionPublisher +import com.wavesplatform.state.reader.CompositeBlockchain import com.wavesplatform.state.{AccountScriptInfo, Blockchain} import com.wavesplatform.test.TestTime import com.wavesplatform.transaction.TxValidationError.GenericError @@ -40,6 +41,7 @@ class TransactionBroadcastSpec stub[CommonTransactionsApi], stub[Wallet], blockchain, + stub[() => CompositeBlockchain], mockFunction[Int], transactionPublisher, testTime, @@ -57,6 +59,15 @@ class TransactionBroadcastSpec sh.creditBalance(TxHelpers.matcher.toAddress, *) sh.creditBalance(ethBuyOrder.senderAddress, *) sh.creditBalance(ethSellOrder.senderAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns( + Map( + TxHelpers.matcher.toAddress -> Long.MaxValue / 3, + ethBuyOrder.senderAddress -> Long.MaxValue / 3, + ethSellOrder.senderAddress -> Long.MaxValue / 3 + ) + ) sh.issueAsset(ByteStr(EthStubBytes32)) } @@ -272,7 +283,7 @@ class TransactionBroadcastSpec (blockchain.hasAccountScript _).when(*).returns(true) } - val publisher = createTxPublisherStub(blockchain) + val publisher = createTxPublisherStub(blockchain, enableExecutionLog = true) val route = transactionsApiRoute.copy(blockchain = blockchain, transactionPublisher = publisher).route Post(routePath("/broadcast?trace=true"), invoke.json()) ~> route ~> check { diff --git a/node/src/test/scala/com/wavesplatform/http/TransactionsRouteSpec.scala b/node/src/test/scala/com/wavesplatform/http/TransactionsRouteSpec.scala index f404f1fb16d..b945381d400 100644 --- a/node/src/test/scala/com/wavesplatform/http/TransactionsRouteSpec.scala +++ b/node/src/test/scala/com/wavesplatform/http/TransactionsRouteSpec.scala @@ -5,7 +5,7 @@ import akka.http.scaladsl.model.headers.Accept import akka.http.scaladsl.server.Route import com.wavesplatform.account.KeyPair import com.wavesplatform.api.common.{CommonTransactionsApi, TransactionMeta} -import com.wavesplatform.api.http.ApiError.{InvalidIds, *} +import com.wavesplatform.api.http.ApiError.* import com.wavesplatform.api.http.{CustomJson, RouteTimeout, TransactionsApiRoute} import com.wavesplatform.block.Block import com.wavesplatform.block.Block.TransactionProof @@ -21,7 +21,8 @@ import com.wavesplatform.lang.v1.compiler.Terms.{CONST_BOOLEAN, CONST_LONG, FUNC import com.wavesplatform.lang.v1.compiler.TestCompiler import com.wavesplatform.lang.v1.traits.domain.LeaseCancel import com.wavesplatform.network.TransactionPublisher -import com.wavesplatform.state.reader.LeaseDetails +import com.wavesplatform.settings.WavesSettings +import com.wavesplatform.state.reader.{CompositeBlockchain, LeaseDetails} import com.wavesplatform.state.{Blockchain, Height, InvokeScriptResult, TxMeta} import com.wavesplatform.test.* import com.wavesplatform.test.DomainPresets.RideV6 @@ -66,12 +67,19 @@ class TransactionsRouteSpec private val addressTransactions = mock[CommonTransactionsApi] private val utxPoolSize = mockFunction[Int] private val testTime = new TestTime + private val getCompositeBlockchain = + () => { + (() => blockchain.carryFee).expects().returns(0) + (() => blockchain.settings).expects().returns(WavesSettings.default().blockchainSettings) + CompositeBlockchain(blockchain, None) + } private val transactionsApiRoute = new TransactionsApiRoute( restAPISettings, addressTransactions, testWallet, blockchain, + getCompositeBlockchain, utxPoolSize, utxPoolSynchronizer, testTime, @@ -129,6 +137,7 @@ class TransactionsRouteSpec d.commonApi.transactions, testWallet, d.blockchain, + () => d.blockchain.getCompositeBlockchain, () => 0, (t, _) => d.commonApi.transactions.broadcastTransaction(t), ntpTime, @@ -242,6 +251,7 @@ class TransactionsRouteSpec forAll(addressGen, choose(1, MaxTransactionsPerRequest).label("limitCorrect")) { case (address, limit) => (addressTransactions.aliasesOfAddress _).expects(*).returning(Observable.empty).once() (addressTransactions.transactionsByAddress _).expects(*, *, *, None).returning(Observable.empty).once() + (() => blockchain.activatedFeatures).expects().returns(Map.empty) Get(routePath(s"/address/$address/limit/$limit")) ~> route ~> check { status shouldEqual StatusCodes.OK } @@ -252,6 +262,7 @@ class TransactionsRouteSpec forAll(addressGen, choose(1, MaxTransactionsPerRequest).label("limitCorrect"), bytes32StrGen) { case (address, limit, txId) => (addressTransactions.aliasesOfAddress _).expects(*).returning(Observable.empty).once() (addressTransactions.transactionsByAddress _).expects(*, *, *, *).returning(Observable.empty).once() + (() => blockchain.activatedFeatures).expects().returns(Map.empty) Get(routePath(s"/address/$address/limit/$limit?after=$txId")) ~> route ~> check { status shouldEqual StatusCodes.OK } @@ -328,7 +339,7 @@ class TransactionsRouteSpec .once() (blockchain.resolveAlias _).expects(recipientAlias).returning(Right(recipientAddress)) - Get(routePath(s"/address/${invokeAddress}/limit/1")) ~> route ~> check { + Get(routePath(s"/address/$invokeAddress/limit/1")) ~> route ~> check { status shouldEqual StatusCodes.OK val json = (responseAs[JsArray] \ 0 \ 0 \ "stateChanges").as[JsObject] json should matchJson(s"""{ @@ -384,8 +395,12 @@ class TransactionsRouteSpec withDomain(RideV6) { d => val tx = TxHelpers.transfer() d.appendBlock(tx) - val route = seal(transactionsApiRoute.copy(blockchain = d.blockchain, commonApi = d.transactionsApi).route) - Get(routePath(s"/address/$defaultAddress/limit/1"))~> Accept(CustomJson.jsonWithNumbersAsStrings) ~> route ~> check { + val route = seal( + transactionsApiRoute + .copy(blockchain = d.blockchain, compositeBlockchain = () => d.blockchain.getCompositeBlockchain, commonApi = d.transactionsApi) + .route + ) + Get(routePath(s"/address/$defaultAddress/limit/1")) ~> Accept(CustomJson.jsonWithNumbersAsStrings) ~> route ~> check { val result = responseAs[JsArray] \ 0 \ 0 (result \ "amount").as[String] shouldBe tx.amount.value.toString (result \ "fee").as[String] shouldBe tx.fee.value.toString @@ -404,6 +419,7 @@ class TransactionsRouteSpec "returns meta for eth transfer" in { val blockchain = createBlockchainStub { blockchain => blockchain.stub.creditBalance(TxHelpers.defaultEthAddress, Waves) + (blockchain.wavesBalances _).when(*).returns(Map(TxHelpers.defaultEthAddress -> Long.MaxValue / 3, TxHelpers.secondAddress -> 0L)) blockchain.stub.activateAllFeatures() } @@ -455,6 +471,7 @@ class TransactionsRouteSpec "returns meta and state changes for eth invoke" in { val blockchain = createBlockchainStub { blockchain => blockchain.stub.creditBalance(TxHelpers.defaultEthAddress, Waves) + (blockchain.wavesBalances _).when(*).returns(Map(TxHelpers.defaultEthAddress -> Long.MaxValue / 3)) blockchain.stub.setScript( TxHelpers.secondAddress, TxHelpers.scriptV5("""@Callable(i) diff --git a/node/src/test/scala/com/wavesplatform/mining/BlockV5Test.scala b/node/src/test/scala/com/wavesplatform/mining/BlockV5Test.scala index ac34037f782..bf104e8a596 100644 --- a/node/src/test/scala/com/wavesplatform/mining/BlockV5Test.scala +++ b/node/src/test/scala/com/wavesplatform/mining/BlockV5Test.scala @@ -29,7 +29,7 @@ import io.netty.channel.group.DefaultChannelGroup import io.netty.util.concurrent.GlobalEventExecutor import monix.eval.Task import monix.execution.Scheduler -import monix.reactive.{Observable, Observer} +import monix.reactive.Observable import org.scalacheck.Gen import org.scalatest.* import org.scalatest.enablers.Length @@ -392,7 +392,7 @@ class BlockV5Test extends FlatSpec with WithDomain with OptionValues with Either def applyBlock(txs: Transaction*): SignedBlockHeader = { d.appendBlock( if (d.blockchainUpdater.height >= 1) Block.ProtoBlockVersion else Block.PlainBlockVersion, - txs: _* + txs* ) lastBlock } @@ -443,9 +443,9 @@ class BlockV5Test extends FlatSpec with WithDomain with OptionValues with Either private def withBlockchain(disabledFeatures: AtomicReference[Set[Short]], time: Time = ntpTime, settings: WavesSettings = testSettings)( f: Blockchain & BlockchainUpdater & NG => Unit ): Unit = { - withLevelDBWriter(settings.blockchainSettings) { blockchain => + withRocksDBWriter(settings.blockchainSettings) { blockchain => val bcu: BlockchainUpdaterImpl = - new BlockchainUpdaterImpl(blockchain, Observer.stopped, settings, time, ignoreBlockchainUpdateTriggers, (_, _) => Seq.empty) { + new BlockchainUpdaterImpl(blockchain, settings, time, ignoreBlockchainUpdateTriggers, (_, _) => Seq.empty) { override def activatedFeatures: Map[Short, Int] = super.activatedFeatures -- disabledFeatures.get() } try f(bcu) diff --git a/node/src/test/scala/com/wavesplatform/mining/BlockWithMaxBaseTargetTest.scala b/node/src/test/scala/com/wavesplatform/mining/BlockWithMaxBaseTargetTest.scala index a14000c465b..763bdde06a9 100644 --- a/node/src/test/scala/com/wavesplatform/mining/BlockWithMaxBaseTargetTest.scala +++ b/node/src/test/scala/com/wavesplatform/mining/BlockWithMaxBaseTargetTest.scala @@ -3,11 +3,8 @@ package com.wavesplatform.mining import java.security.Permission import java.util.concurrent.{Semaphore, TimeUnit} -import scala.concurrent.Await -import scala.concurrent.duration._ - import com.typesafe.config.ConfigFactory -import com.wavesplatform.WithDB +import com.wavesplatform.WithNewDBForEachTest import com.wavesplatform.account.KeyPair import com.wavesplatform.block.Block import com.wavesplatform.common.utils.EitherExt2 @@ -16,11 +13,11 @@ import com.wavesplatform.db.DBCacheSettings import com.wavesplatform.features.BlockchainFeatures import com.wavesplatform.lagonaki.mocks.TestBlock import com.wavesplatform.mining.BlockWithMaxBaseTargetTest.Env -import com.wavesplatform.settings.{WavesSettings, _} -import com.wavesplatform.state._ +import com.wavesplatform.settings.* +import com.wavesplatform.state.* import com.wavesplatform.state.appender.BlockAppender import com.wavesplatform.state.diffs.ENOUGH_AMT -import com.wavesplatform.state.utils.TestLevelDB +import com.wavesplatform.state.utils.TestRocksDB import com.wavesplatform.test.FreeSpec import com.wavesplatform.transaction.{BlockchainUpdater, GenesisTransaction} import com.wavesplatform.utils.BaseTargetReachedMaximum @@ -34,7 +31,10 @@ import monix.execution.schedulers.SchedulerService import monix.reactive.Observable import org.scalacheck.{Arbitrary, Gen} -class BlockWithMaxBaseTargetTest extends FreeSpec with WithDB with DBCacheSettings { +import scala.concurrent.Await +import scala.concurrent.duration.* + +class BlockWithMaxBaseTargetTest extends FreeSpec with WithNewDBForEachTest with DBCacheSettings { "base target limit" - { "node should stop if base target greater than maximum in block creation " in { @@ -98,8 +98,9 @@ class BlockWithMaxBaseTargetTest extends FreeSpec with WithDB with DBCacheSettin } }) - val blockAppendTask = BlockAppender(bcu, ntpTime, utxPoolStub, pos, scheduler)(lastBlock).onErrorRecoverWith[Any] { case _: SecurityException => - Task.unit + val blockAppendTask = BlockAppender(bcu, ntpTime, utxPoolStub, pos, scheduler)(lastBlock).onErrorRecoverWith[Any] { + case _: SecurityException => + Task.unit } Await.result(blockAppendTask.runToFuture(scheduler), Duration.Inf) @@ -113,7 +114,7 @@ class BlockWithMaxBaseTargetTest extends FreeSpec with WithDB with DBCacheSettin } def withEnv(f: Env => Unit): Unit = { - val defaultWriter = TestLevelDB.withFunctionalitySettings(db, ignoreSpendableBalanceChanged, TestFunctionalitySettings.Stub) + val defaultWriter = TestRocksDB.withFunctionalitySettings(db, TestFunctionalitySettings.Stub) val settings0 = WavesSettings.fromRootConfig(loadConfig(ConfigFactory.load())) val minerSettings = settings0.minerSettings.copy(quorum = 0) @@ -129,7 +130,7 @@ class BlockWithMaxBaseTargetTest extends FreeSpec with WithDB with DBCacheSettin ) val bcu = - new BlockchainUpdaterImpl(defaultWriter, ignoreSpendableBalanceChanged, settings, ntpTime, ignoreBlockchainUpdateTriggers, (_, _) => Seq.empty) + new BlockchainUpdaterImpl(defaultWriter, settings, ntpTime, ignoreBlockchainUpdateTriggers, (_, _) => Seq.empty) val pos = PoSSelector(bcu, settings.synchronizationSettings.maxBaseTarget) val utxPoolStub = new UtxPoolImpl(ntpTime, bcu, settings0.utxSettings, settings.maxTxErrorLogSize, settings0.minerSettings.enable) diff --git a/node/src/test/scala/com/wavesplatform/mining/MicroBlockMinerSpec.scala b/node/src/test/scala/com/wavesplatform/mining/MicroBlockMinerSpec.scala index 33e3b8ed299..34aa5b5bd85 100644 --- a/node/src/test/scala/com/wavesplatform/mining/MicroBlockMinerSpec.scala +++ b/node/src/test/scala/com/wavesplatform/mining/MicroBlockMinerSpec.scala @@ -142,6 +142,7 @@ class MicroBlockMinerSpec extends FlatSpec with PathMockFactory with WithDomain override def transactionById(transactionId: ByteStr) = inner.transactionById(transactionId) override def close(): Unit = inner.close() override def scheduleCleanup(): Unit = inner.scheduleCleanup() + override def setPriorityDiffs(diffs: Seq[Diff]): Unit = inner.setPriorityDiffs(diffs) } val microBlockMiner = new MicroBlockMinerImpl( diff --git a/node/src/test/scala/com/wavesplatform/mining/MiningFailuresSuite.scala b/node/src/test/scala/com/wavesplatform/mining/MiningFailuresSuite.scala index 660e60578c9..34af5160e57 100644 --- a/node/src/test/scala/com/wavesplatform/mining/MiningFailuresSuite.scala +++ b/node/src/test/scala/com/wavesplatform/mining/MiningFailuresSuite.scala @@ -1,7 +1,7 @@ package com.wavesplatform.mining import com.typesafe.config.ConfigFactory -import com.wavesplatform.WithDB +import com.wavesplatform.WithNewDBForEachTest import com.wavesplatform.account.KeyPair import com.wavesplatform.block.{Block, SignedBlockHeader} import com.wavesplatform.common.state.ByteStr @@ -23,7 +23,7 @@ import monix.execution.Scheduler.Implicits.global import monix.reactive.Observable import org.scalamock.scalatest.PathMockFactory -class MiningFailuresSuite extends FlatSpec with PathMockFactory with WithDB { +class MiningFailuresSuite extends FlatSpec with PathMockFactory with WithNewDBForEachTest { trait BlockchainUpdaterNG extends Blockchain with BlockchainUpdater with NG behavior of "Miner" @@ -97,10 +97,10 @@ class MiningFailuresSuite extends FlatSpec with PathMockFactory with WithDB { ) var minedBlock: Block = null - (blockchainUpdater.processBlock _).when(*, *, *).returning(Left(BlockFromFuture(100))).repeated(10) + (blockchainUpdater.processBlock _).when(*, *, *, *).returning(Left(BlockFromFuture(100))).repeated(10) (blockchainUpdater.processBlock _) - .when(*, *, *) - .onCall { (block, _, _) => + .when(*, *, *, *) + .onCall { (block, _, _, _) => minedBlock = block Right(Nil) } diff --git a/node/src/test/scala/com/wavesplatform/mining/MiningWithRewardSuite.scala b/node/src/test/scala/com/wavesplatform/mining/MiningWithRewardSuite.scala index e183a739bb0..72728a2bbc5 100644 --- a/node/src/test/scala/com/wavesplatform/mining/MiningWithRewardSuite.scala +++ b/node/src/test/scala/com/wavesplatform/mining/MiningWithRewardSuite.scala @@ -1,21 +1,20 @@ package com.wavesplatform.mining import scala.concurrent.Future -import scala.concurrent.duration._ - +import scala.concurrent.duration.* import cats.effect.Resource import com.typesafe.config.ConfigFactory -import com.wavesplatform.{TransactionGen, WithDB} +import com.wavesplatform.{TransactionGen, WithNewDBForEachTest} import com.wavesplatform.account.KeyPair import com.wavesplatform.block.Block import com.wavesplatform.common.state.ByteStr -import com.wavesplatform.common.utils._ +import com.wavesplatform.common.utils.* import com.wavesplatform.consensus.PoSSelector -import com.wavesplatform.database.{Keys, TestStorageFactory} +import com.wavesplatform.database.{RDB, TestStorageFactory} import com.wavesplatform.db.DBCacheSettings import com.wavesplatform.features.{BlockchainFeature, BlockchainFeatures} import com.wavesplatform.lagonaki.mocks.TestBlock -import com.wavesplatform.settings._ +import com.wavesplatform.settings.* import com.wavesplatform.state.{Blockchain, BlockchainUpdaterImpl, NG} import com.wavesplatform.state.diffs.ENOUGH_AMT import com.wavesplatform.transaction.{BlockchainUpdater, GenesisTransaction, Transaction} @@ -28,14 +27,13 @@ import io.netty.util.concurrent.GlobalEventExecutor import monix.eval.Task import monix.execution.Scheduler import monix.reactive.Observable -import org.iq80.leveldb.DB import org.scalacheck.{Arbitrary, Gen} import org.scalatest.compatible.Assertion import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers -class MiningWithRewardSuite extends AsyncFlatSpec with Matchers with WithDB with TransactionGen with DBCacheSettings { - import MiningWithRewardSuite._ +class MiningWithRewardSuite extends AsyncFlatSpec with Matchers with WithNewDBForEachTest with TransactionGen with DBCacheSettings { + import MiningWithRewardSuite.* behavior of "Miner with activated reward feature" @@ -141,13 +139,11 @@ class MiningWithRewardSuite extends AsyncFlatSpec with Matchers with WithDB with private def forgeBlock(miner: MinerImpl)(account: KeyPair): Either[String, (Block, MiningConstraint)] = miner.forgeBlock(account) - private def resources(settings: WavesSettings): Resource[Task, (BlockchainUpdaterImpl, DB)] = + private def resources(settings: WavesSettings): Resource[Task, (BlockchainUpdaterImpl, RDB)] = Resource.make { - val (bcu, _) = TestStorageFactory(settings, db, ntpTime, ignoreSpendableBalanceChanged, ignoreBlockchainUpdateTriggers) - import com.wavesplatform.database.DBExt - db.readWrite(_.put(Keys.blockReward(0), Some(settings.blockchainSettings.rewardsSettings.initial))) + val (bcu, _) = TestStorageFactory(settings, db, ntpTime, ignoreBlockchainUpdateTriggers) Task.now((bcu, db)) - } { case (blockchainUpdater, db) => + } { case (blockchainUpdater, _) => Task { blockchainUpdater.shutdown() } @@ -161,7 +157,7 @@ object MiningWithRewardSuite { type BlockProducer = (Long, ByteStr, KeyPair) => Block type TransactionProducer = (Long, KeyPair) => Transaction - case class Env(blocks: Seq[Block], account: KeyPair, miner: MinerImpl, blockchain: Blockchain with BlockchainUpdater with NG) + case class Env(blocks: Seq[Block], account: KeyPair, miner: MinerImpl, blockchain: Blockchain & BlockchainUpdater & NG) val settings: WavesSettings = { val commonSettings: WavesSettings = WavesSettings.fromRootConfig(loadConfig(ConfigFactory.load())) diff --git a/node/src/test/scala/com/wavesplatform/mining/ScriptComplexityMiningConstraintSuite.scala b/node/src/test/scala/com/wavesplatform/mining/ScriptComplexityMiningConstraintSuite.scala index fab7391ff54..c175e64000e 100644 --- a/node/src/test/scala/com/wavesplatform/mining/ScriptComplexityMiningConstraintSuite.scala +++ b/node/src/test/scala/com/wavesplatform/mining/ScriptComplexityMiningConstraintSuite.scala @@ -2,7 +2,7 @@ package com.wavesplatform.mining import com.typesafe.config.ConfigFactory import com.wavesplatform.account.KeyPair -import com.wavesplatform.common.utils._ +import com.wavesplatform.common.utils.* import com.wavesplatform.features.BlockchainFeatures import com.wavesplatform.lang.v1.estimator.v3.ScriptEstimatorV3 import com.wavesplatform.settings.WavesSettings @@ -24,40 +24,40 @@ class ScriptComplexityMiningConstraintSuite extends FlatSpec with PathMockFactor val (script, _) = ScriptCompiler.compile("true", ScriptEstimatorV3(fixOverflow = true, overhead = true)).explicitGet() "ScriptComplexityMiningConstraint" should "accept non-scripted txs after limit" in { - forAll(preconditions) { - case (acc1, tx1, tx2, tx3) => - val blockchain = stub[Blockchain] - (() => blockchain.settings).when().returning(settings.blockchainSettings) - (() => blockchain.height).when().returning(1) - (() => blockchain.activatedFeatures).when().returning(Map(BlockchainFeatures.DataTransaction.id -> 0)) - - val txDiffer = - TransactionDiffer(Some(System.currentTimeMillis() - 1000), System.currentTimeMillis())(blockchain, _: Transaction).resultE.explicitGet() - (blockchain.balance _).when(*, *).returning(10000000) - (blockchain.leaseBalance _).when(*).returning(LeaseBalance(0, 0)) - (blockchain.accountScript _).when(tx1.sender.toAddress).returning(Some(AccountScriptInfo(acc1.publicKey, script, 1000, Map.empty))) - (blockchain.accountScript _).when(*).returning(None) - - val c1 = constraint.put(blockchain, tx1, txDiffer(tx1)) - val cOverfilled = c1.put(blockchain, tx1, txDiffer(tx1)) - cOverfilled.isOverfilled shouldBe true - - val c2 = c1.put(blockchain, tx2, txDiffer(tx2)) - c2.isFull shouldBe false - - val c3 = c2.put(blockchain, tx3, txDiffer(tx3)) - c3.isFull shouldBe true - c3.isOverfilled shouldBe false + forAll(preconditions) { case (acc1, acc2, tx1, tx2, tx3) => + val blockchain = stub[Blockchain] + (() => blockchain.settings).when().returning(settings.blockchainSettings) + (() => blockchain.height).when().returning(1) + (() => blockchain.activatedFeatures).when().returning(Map(BlockchainFeatures.DataTransaction.id -> 0)) + + val txDiffer = + TransactionDiffer(Some(System.currentTimeMillis() - 1000), System.currentTimeMillis())(blockchain, _: Transaction).resultE.explicitGet() + (blockchain.balance _).when(*, *).returning(10000000) + (blockchain.wavesBalances _).when(*).returning(Map(acc1.toAddress -> 10000000, acc2.toAddress -> 10000000)) + (blockchain.leaseBalance _).when(*).returning(LeaseBalance(0, 0)) + (blockchain.accountScript _).when(tx1.sender.toAddress).returning(Some(AccountScriptInfo(acc1.publicKey, script, 1000, Map.empty))) + (blockchain.accountScript _).when(*).returning(None) + + val c1 = constraint.put(blockchain, tx1, txDiffer(tx1)) + val cOverfilled = c1.put(blockchain, tx1, txDiffer(tx1)) + cOverfilled.isOverfilled shouldBe true + + val c2 = c1.put(blockchain, tx2, txDiffer(tx2)) + c2.isFull shouldBe false + + val c3 = c2.put(blockchain, tx3, txDiffer(tx3)) + c3.isFull shouldBe true + c3.isOverfilled shouldBe false } } - private[this] def preconditions: Gen[(KeyPair, DataTransaction, DataTransaction, DataTransaction)] = + private[this] def preconditions: Gen[(KeyPair, KeyPair, DataTransaction, DataTransaction, DataTransaction)] = for { acc1 <- accountGen acc2 <- accountGen tx1 = DataTransaction.selfSigned(TxVersion.V1, acc1, Nil, 1000000, System.currentTimeMillis()).explicitGet() tx2 = DataTransaction.selfSigned(TxVersion.V1, acc2, Nil, 1000000, System.currentTimeMillis()).explicitGet() tx3 = DataTransaction.selfSigned(TxVersion.V1, acc2, Nil, 1000000, System.currentTimeMillis()).explicitGet() - } yield (acc1, tx1, tx2, tx3) + } yield (acc1, acc2, tx1, tx2, tx3) } diff --git a/node/src/test/scala/com/wavesplatform/network/peer/PeerDatabaseImplSpecification.scala b/node/src/test/scala/com/wavesplatform/network/peer/PeerDatabaseImplSpecification.scala index 24bbfaaaaaa..bb7715d09c1 100644 --- a/node/src/test/scala/com/wavesplatform/network/peer/PeerDatabaseImplSpecification.scala +++ b/node/src/test/scala/com/wavesplatform/network/peer/PeerDatabaseImplSpecification.scala @@ -8,7 +8,7 @@ import com.typesafe.config.ConfigFactory import com.wavesplatform.network.{PeerDatabase, PeerDatabaseImpl} import com.wavesplatform.settings.NetworkSettings import com.wavesplatform.test.FreeSpec -import net.ceedubs.ficus.Ficus._ +import net.ceedubs.ficus.Ficus.* class PeerDatabaseImplSpecification extends FreeSpec { @@ -17,26 +17,35 @@ class PeerDatabaseImplSpecification extends FreeSpec { val address1 = new InetSocketAddress(host1, 1) val address2 = new InetSocketAddress(host2, 2) - private val config1 = ConfigFactory.parseString("""waves.network { - | file = null - | known-peers = [] - | peers-data-residence-time: 2s - |}""".stripMargin).withFallback(ConfigFactory.load()).resolve() + private val config1 = ConfigFactory + .parseString("""waves.network { + | file = null + | known-peers = [] + | peers-data-residence-time: 2s + |}""".stripMargin) + .withFallback(ConfigFactory.load()) + .resolve() private val settings1 = config1.as[NetworkSettings]("waves.network") - private val config2 = ConfigFactory.parseString("""waves.network { - | file = null - | known-peers = [] - | peers-data-residence-time = 10s - |}""".stripMargin).withFallback(ConfigFactory.load()).resolve() + private val config2 = ConfigFactory + .parseString("""waves.network { + | file = null + | known-peers = [] + | peers-data-residence-time = 10s + |}""".stripMargin) + .withFallback(ConfigFactory.load()) + .resolve() private val settings2 = config2.as[NetworkSettings]("waves.network") - private val config3 = ConfigFactory.parseString(s"""waves.network { - | file = null - | known-peers = ["$host1:1"] - | peers-data-residence-time = 2s - | enable-peers-exchange = no - |}""".stripMargin).withFallback(ConfigFactory.load()).resolve() + private val config3 = ConfigFactory + .parseString(s"""waves.network { + | file = null + | known-peers = ["$host1:1"] + | peers-data-residence-time = 2s + | enable-peers-exchange = no + |}""".stripMargin) + .withFallback(ConfigFactory.load()) + .resolve() private val settings3 = config3.as[NetworkSettings]("waves.network") private def withDatabase(settings: NetworkSettings)(f: PeerDatabase => Unit): Unit = { @@ -132,22 +141,28 @@ class PeerDatabaseImplSpecification extends FreeSpec { "should clear blacklist at start" in { val databaseFile = Files.createTempFile("waves-tests", "PeerDatabaseImplSpecification-blacklisting-clear").toAbsolutePath.toString val path = if (File.separatorChar == '\\') databaseFile.replace('\\', '/') else databaseFile - val prevConfig = ConfigFactory.parseString(s"""waves.network { - | file = "$path" - | known-peers = [] - | peers-data-residence-time = 100s - |}""".stripMargin).withFallback(ConfigFactory.load()).resolve() + val prevConfig = ConfigFactory + .parseString(s"""waves.network { + | file = "$path" + | known-peers = [] + | peers-data-residence-time = 100s + |}""".stripMargin) + .withFallback(ConfigFactory.load()) + .resolve() val prevSettings = prevConfig.as[NetworkSettings]("waves.network") val prevDatabase = new PeerDatabaseImpl(prevSettings) prevDatabase.blacklist(address1.getAddress, "I don't like it") prevDatabase.close() - val config = ConfigFactory.parseString(s"""waves.network { - | file = "$path" - | known-peers = [] - | peers-data-residence-time = 100s - | enable-blacklisting = no - |}""".stripMargin).withFallback(ConfigFactory.load()).resolve() + val config = ConfigFactory + .parseString(s"""waves.network { + | file = "$path" + | known-peers = [] + | peers-data-residence-time = 100s + | enable-blacklisting = no + |}""".stripMargin) + .withFallback(ConfigFactory.load()) + .resolve() val settings = config.as[NetworkSettings]("waves.network") val database = new PeerDatabaseImpl(settings) @@ -155,12 +170,15 @@ class PeerDatabaseImplSpecification extends FreeSpec { } "should not add nodes to the blacklist" in { - val config = ConfigFactory.parseString(s"""waves.network { - | file = null - | known-peers = [] - | peers-data-residence-time = 100s - | enable-blacklisting = no - |}""".stripMargin).withFallback(ConfigFactory.load()).resolve() + val config = ConfigFactory + .parseString(s"""waves.network { + | file = null + | known-peers = [] + | peers-data-residence-time = 100s + | enable-blacklisting = no + |}""".stripMargin) + .withFallback(ConfigFactory.load()) + .resolve() val settings = config.as[NetworkSettings]("waves.network") val database = new PeerDatabaseImpl(settings) database.blacklist(address1.getAddress, "I don't like it") diff --git a/node/src/test/scala/com/wavesplatform/serialization/EvaluatedPBSerializationTest.scala b/node/src/test/scala/com/wavesplatform/serialization/EvaluatedPBSerializationTest.scala index f3d0ad8d824..f19871ac4dd 100644 --- a/node/src/test/scala/com/wavesplatform/serialization/EvaluatedPBSerializationTest.scala +++ b/node/src/test/scala/com/wavesplatform/serialization/EvaluatedPBSerializationTest.scala @@ -115,6 +115,7 @@ class EvaluatedPBSerializationTest d.transactionsApi, d.wallet, d.blockchain, + () => d.blockchain.getCompositeBlockchain, () => d.utxPool.size, (_, _) => Future.successful(TracedResult(Right(true))), ntpTime, diff --git a/node/src/test/scala/com/wavesplatform/state/BlockchainUpdaterImplSpec.scala b/node/src/test/scala/com/wavesplatform/state/BlockchainUpdaterImplSpec.scala index 2171f82ac0f..fabdbcd5a16 100644 --- a/node/src/test/scala/com/wavesplatform/state/BlockchainUpdaterImplSpec.scala +++ b/node/src/test/scala/com/wavesplatform/state/BlockchainUpdaterImplSpec.scala @@ -5,34 +5,27 @@ import com.typesafe.config.ConfigFactory import com.wavesplatform.TestHelpers.enableNG import com.wavesplatform.account.{Address, KeyPair} import com.wavesplatform.block.Block -import com.wavesplatform.block.Block.PlainBlockVersion import com.wavesplatform.common.utils.EitherExt2 -import com.wavesplatform.database.loadActiveLeases import com.wavesplatform.db.WithState.AddrWithBalance import com.wavesplatform.db.{DBCacheSettings, WithDomain} import com.wavesplatform.events.BlockchainUpdateTriggers import com.wavesplatform.history.Domain.BlockchainUpdaterExt -import com.wavesplatform.history.{Domain, chainBaseAndMicro, randomSig} +import com.wavesplatform.history.{chainBaseAndMicro, randomSig} import com.wavesplatform.lagonaki.mocks.TestBlock import com.wavesplatform.lang.v1.estimator.v2.ScriptEstimatorV2 import com.wavesplatform.settings.{WavesSettings, loadConfig} import com.wavesplatform.state.diffs.ENOUGH_AMT import com.wavesplatform.test.* import com.wavesplatform.transaction.Asset.Waves -import com.wavesplatform.transaction.TxHelpers.* import com.wavesplatform.transaction.smart.SetScriptTransaction import com.wavesplatform.transaction.smart.script.ScriptCompiler import com.wavesplatform.transaction.transfer.TransferTransaction import com.wavesplatform.transaction.utils.Signed -import com.wavesplatform.transaction.{Asset, Transaction, TxHelpers, TxVersion} +import com.wavesplatform.transaction.{Transaction, TxHelpers, TxVersion} import com.wavesplatform.utils.Time import com.wavesplatform.{EitherMatchers, NTPTime} -import monix.execution.Scheduler.Implicits.global -import monix.reactive.subjects.PublishToOneSubject import org.scalamock.scalatest.MockFactory -import scala.concurrent.Await -import scala.concurrent.duration.DurationInt import scala.util.Random class BlockchainUpdaterImplSpec extends FreeSpec with EitherMatchers with WithDomain with NTPTime with DBCacheSettings with MockFactory { @@ -299,34 +292,5 @@ class BlockchainUpdaterImplSpec extends FreeSpec with EitherMatchers with WithDo d.appendBlock(d.createBlock(5.toByte, Seq(invoke))) } } - - "spendableBalanceChanged" in { - withLevelDBWriter(RideV6) { levelDb => - val ps = PublishToOneSubject[(Address, Asset)]() - val items = ps.toListL.runToFuture - - val blockchain = new BlockchainUpdaterImpl( - levelDb, - ps, - RideV6, - ntpTime, - BlockchainUpdateTriggers.noop, - loadActiveLeases(db, _, _) - ) - - val d = Domain(db, blockchain, levelDb, RideV6) - blockchain.processBlock(d.createBlock(PlainBlockVersion, Seq(genesis(defaultAddress)), generator = TestBlock.defaultSigner)) - blockchain.processBlock(d.createBlock(PlainBlockVersion, Seq(transfer()), generator = TestBlock.defaultSigner)) - - ps.onComplete() - Await.result(items, 2.seconds) shouldBe Seq( - (TestBlock.defaultSigner.toAddress, Waves), - (defaultAddress, Waves), - (TestBlock.defaultSigner.toAddress, Waves), - (defaultAddress, Waves), - (secondAddress, Waves) - ) - } - } } } diff --git a/node/src/test/scala/com/wavesplatform/state/RollbackSpec.scala b/node/src/test/scala/com/wavesplatform/state/RollbackSpec.scala index 59468de038e..79108aab543 100644 --- a/node/src/test/scala/com/wavesplatform/state/RollbackSpec.scala +++ b/node/src/test/scala/com/wavesplatform/state/RollbackSpec.scala @@ -372,22 +372,25 @@ class RollbackSpec extends FreeSpec with WithDomain { "data transaction" in { val sender = TxHelpers.signer(1) val initialBalance = 100.waves - val dataEntry = StringDataEntry("str", "test") + val dataEntry = StringDataEntry("str", "test-1") withDomain(createSettings(BlockchainFeatures.DataTransaction -> 0), Seq(AddrWithBalance(sender.toAddress, initialBalance))) { d => val genesisBlockId = d.lastBlockId - d.appendBlock( - TestBlock.create( - nextTs, - genesisBlockId, - Seq(TxHelpers.dataEntry(sender, dataEntry)) - ) + val firstBlock = TestBlock.create( + nextTs, + genesisBlockId, + Seq(TxHelpers.dataEntry(sender, dataEntry)) ) - + d.appendBlock(firstBlock) d.blockchainUpdater.accountData(sender.toAddress, dataEntry.key) should contain(dataEntry) - d.rollbackTo(genesisBlockId) - d.blockchainUpdater.accountData(sender.toAddress, dataEntry.key) shouldBe empty + val secondEntry = StringDataEntry("str", "test-2") + d.appendBlock(TxHelpers.data(sender, Seq(secondEntry))) + d.appendBlock() + d.blockchain.accountData(sender.toAddress, "str") shouldEqual Some(secondEntry) + + d.rollbackTo(firstBlock.id()) + d.blockchainUpdater.accountData(sender.toAddress, dataEntry.key) shouldEqual Some(dataEntry) } } @@ -678,16 +681,16 @@ class RollbackSpec extends FreeSpec with WithDomain { d.blockchain.leaseBalance(leaseRecipientAddress.toAddress) shouldBe LeaseBalance(in = leaseAmount, out = 0) d.blockchain.leaseBalance(checkAddress) shouldBe LeaseBalance(in = 0, out = leaseAmount) d.blockchain.leaseDetails(leaseId1) shouldBe leaseDetails(invokeId1) - d.levelDBWriter.leaseDetails(leaseId1) shouldBe None + d.rocksDBWriter.leaseDetails(leaseId1) shouldBe None d.appendBlock() - d.levelDBWriter.leaseDetails(leaseId1) shouldBe leaseDetails(invokeId1) + d.rocksDBWriter.leaseDetails(leaseId1) shouldBe leaseDetails(invokeId1) d.blockchain.removeAfter(beforeInvoke1).explicitGet() d.blockchain.leaseBalance(leaseRecipientAddress.toAddress) shouldBe LeaseBalance.empty d.blockchain.leaseBalance(checkAddress) shouldBe LeaseBalance.empty d.blockchain.leaseDetails(leaseId1) shouldBe None - d.levelDBWriter.leaseDetails(leaseId1) shouldBe None + d.rocksDBWriter.leaseDetails(leaseId1) shouldBe None // hardened block rollback val beforeInvoke2 = d.lastBlockId @@ -697,9 +700,9 @@ class RollbackSpec extends FreeSpec with WithDomain { d.blockchain.leaseBalance(leaseRecipientAddress.toAddress) shouldBe LeaseBalance(in = leaseAmount, out = 0) d.blockchain.leaseBalance(checkAddress) shouldBe LeaseBalance(in = 0, out = leaseAmount) d.blockchain.leaseDetails(leaseId2) shouldBe leaseDetails(invokeId2) - d.levelDBWriter.leaseDetails(leaseId2) shouldBe None + d.rocksDBWriter.leaseDetails(leaseId2) shouldBe None d.appendBlock() - d.levelDBWriter.leaseDetails(leaseId2) shouldBe leaseDetails(invokeId2) + d.rocksDBWriter.leaseDetails(leaseId2) shouldBe leaseDetails(invokeId2) d.appendBlock() d.blockchain.removeAfter(beforeInvoke2).explicitGet() @@ -707,7 +710,7 @@ class RollbackSpec extends FreeSpec with WithDomain { d.blockchain.leaseBalance(leaseRecipientAddress.toAddress) shouldBe LeaseBalance.empty d.blockchain.leaseBalance(checkAddress) shouldBe LeaseBalance.empty d.blockchain.leaseDetails(leaseId2) shouldBe None - d.levelDBWriter.leaseDetails(leaseId2) shouldBe None + d.rocksDBWriter.leaseDetails(leaseId2) shouldBe None } } } @@ -750,16 +753,16 @@ class RollbackSpec extends FreeSpec with WithDomain { d.blockchain.leaseBalance(leaseRecipientAddress) shouldBe LeaseBalance.empty d.blockchain.leaseBalance(checkAddress) shouldBe LeaseBalance.empty d.blockchain.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight, cancelHeight, leaseCancelId) - d.levelDBWriter.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight) + d.rocksDBWriter.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight) d.appendBlock() - d.levelDBWriter.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight, cancelHeight, leaseCancelId) + d.rocksDBWriter.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight, cancelHeight, leaseCancelId) d.blockchain.removeAfter(beforeInvoke1).explicitGet() d.blockchain.leaseBalance(leaseRecipientAddress) shouldBe LeaseBalance(in = leaseAmount, 0) d.blockchain.leaseBalance(checkAddress) shouldBe LeaseBalance(0, out = leaseAmount) d.blockchain.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight) - d.levelDBWriter.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight) + d.rocksDBWriter.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight) // hardened block rollback val beforeInvoke2 = d.lastBlockId @@ -768,9 +771,9 @@ class RollbackSpec extends FreeSpec with WithDomain { d.blockchain.leaseBalance(leaseRecipientAddress) shouldBe LeaseBalance.empty d.blockchain.leaseBalance(checkAddress) shouldBe LeaseBalance.empty d.blockchain.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight, cancelHeight, leaseCancelId1) - d.levelDBWriter.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight) + d.rocksDBWriter.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight) d.appendBlock() - d.levelDBWriter.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight, cancelHeight, leaseCancelId1) + d.rocksDBWriter.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight, cancelHeight, leaseCancelId1) d.appendBlock() d.blockchain.removeAfter(beforeInvoke2).explicitGet() @@ -778,7 +781,7 @@ class RollbackSpec extends FreeSpec with WithDomain { d.blockchain.leaseBalance(leaseRecipientAddress) shouldBe LeaseBalance(in = leaseAmount, 0) d.blockchain.leaseBalance(checkAddress) shouldBe LeaseBalance(0, out = leaseAmount) d.blockchain.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight) - d.levelDBWriter.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight) + d.rocksDBWriter.leaseDetails(leaseId) shouldBe leaseDetails(leaseHeight) } "leaseCancel with lease tx" in { diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/BlockDifferDetailedDiffTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/BlockDifferDetailedDiffTest.scala index 9ce88f94aea..aa018c71421 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/BlockDifferDetailedDiffTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/BlockDifferDetailedDiffTest.scala @@ -18,7 +18,7 @@ class BlockDifferDetailedDiffTest extends FreeSpec with WithState { private def assertDetailedDiff(preconditions: Seq[Block], block: Block, fs: FunctionalitySettings = TFS.Enabled)( assertion: (Diff, DetailedDiff) => Unit ): Unit = - withLevelDBWriter(fs) { state => + withRocksDBWriter(fs) { state => def differ(blockchain: Blockchain, prevBlock: Option[Block], b: Block) = BlockDiffer.fromBlock(blockchain, prevBlock, b, MiningConstraint.Unlimited, b.header.generationSignature) @@ -41,12 +41,11 @@ class BlockDifferDetailedDiffTest extends FreeSpec with WithState { } val (master, b) = genesisBlock - assertDetailedDiff(Seq.empty, b) { - case (diff, DetailedDiff(parentDiff, transactionDiffs)) => - diff.portfolios(master).balance shouldBe ENOUGH_AMT - parentDiff.portfolios.get(master) shouldBe None - transactionDiffs.length shouldBe 1 - transactionDiffs.head.portfolios(master).balance shouldBe ENOUGH_AMT + assertDetailedDiff(Seq.empty, b) { case (diff, DetailedDiff(parentDiff, transactionDiffs)) => + diff.portfolios(master).balance shouldBe ENOUGH_AMT + parentDiff.portfolios.get(master) shouldBe None + transactionDiffs.length shouldBe 1 + transactionDiffs.head.portfolios(master).balance shouldBe ENOUGH_AMT } } @@ -70,23 +69,21 @@ class BlockDifferDetailedDiffTest extends FreeSpec with WithState { "transaction diffs are correct" in { val (addr1, addr2, amt1, amt2, b) = genesisTransfersBlock - assertDetailedDiff(Seq.empty, b) { - case (_, DetailedDiff(_, td)) => - val transactionDiffs = td.reverse - transactionDiffs.head.portfolios(addr1).balance shouldBe ENOUGH_AMT - transactionDiffs(1).portfolios(addr1).balance shouldBe -(amt1 + transactionFee) - transactionDiffs(1).portfolios(addr2).balance shouldBe amt1 - transactionDiffs(2).portfolios(addr2).balance shouldBe -(amt2 + transactionFee) - transactionDiffs(2).portfolios(addr1).balance shouldBe amt2 + assertDetailedDiff(Seq.empty, b) { case (_, DetailedDiff(_, td)) => + val transactionDiffs = td.reverse + transactionDiffs.head.portfolios(addr1).balance shouldBe ENOUGH_AMT + transactionDiffs(1).portfolios(addr1).balance shouldBe -(amt1 + transactionFee) + transactionDiffs(1).portfolios(addr2).balance shouldBe amt1 + transactionDiffs(2).portfolios(addr2).balance shouldBe -(amt2 + transactionFee) + transactionDiffs(2).portfolios(addr1).balance shouldBe amt2 } } "miner reward is correct" - { "without NG" in { val (addr1, _, _, _, b) = genesisTransfersBlock - assertDetailedDiff(Seq.empty, b) { - case (_, DetailedDiff(parentDiff, _)) => - parentDiff.portfolios(addr1).balance shouldBe 20 + assertDetailedDiff(Seq.empty, b) { case (_, DetailedDiff(parentDiff, _)) => + parentDiff.portfolios(addr1).balance shouldBe 20 } } @@ -95,9 +92,8 @@ class BlockDifferDetailedDiffTest extends FreeSpec with WithState { "no history — only 40% from current block" in { val (addr1, _, _, _, b) = genesisTransfersBlock - assertDetailedDiff(Seq.empty, b, ngFs) { - case (_, DetailedDiff(parentDiff, _)) => - parentDiff.portfolios(addr1).balance shouldBe (transactionFee * 2 * 0.4) // 40% + assertDetailedDiff(Seq.empty, b, ngFs) { case (_, DetailedDiff(parentDiff, _)) => + parentDiff.portfolios(addr1).balance shouldBe (transactionFee * 2 * 0.4) // 40% } } @@ -124,9 +120,8 @@ class BlockDifferDetailedDiffTest extends FreeSpec with WithState { } val (history, block, ngMiner) = blocksNgMiner - assertDetailedDiff(history, block, ngFs) { - case (_, DetailedDiff(parentDiff, _)) => - parentDiff.portfolios(ngMiner).balance shouldBe transactionFee // 60% + 40% + assertDetailedDiff(history, block, ngFs) { case (_, DetailedDiff(parentDiff, _)) => + parentDiff.portfolios(ngMiner).balance shouldBe transactionFee // 60% + 40% } } } diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/CommonValidationTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/CommonValidationTest.scala index 575cdcd9262..c40f7193fdd 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/CommonValidationTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/CommonValidationTest.scala @@ -33,30 +33,28 @@ class CommonValidationTest extends PropSpec with WithState { } } - preconditionsAndPayment.foreach { - case (genesis, transfer) => - assertDiffEi(Seq(TestBlock.create(Seq(genesis, transfer))), TestBlock.create(Seq(transfer))) { blockDiffEi => - blockDiffEi should produce("AlreadyInTheState") - } - - assertDiffEi(Seq(TestBlock.create(Seq(genesis))), TestBlock.create(Seq(transfer, transfer))) { blockDiffEi => - blockDiffEi should produce("AlreadyInTheState") - } + preconditionsAndPayment.foreach { case (genesis, transfer) => + assertDiffEi(Seq(TestBlock.create(Seq(genesis, transfer))), TestBlock.create(Seq(transfer))) { blockDiffEi => + blockDiffEi should produce("AlreadyInTheState") + } + + assertDiffEi(Seq(TestBlock.create(Seq(genesis))), TestBlock.create(Seq(transfer, transfer))) { blockDiffEi => + blockDiffEi should produce("AlreadyInTheState") + } } } private def sponsoredTransactionsCheckFeeTest(feeInAssets: Boolean, feeAmount: Long)(f: Either[ValidationError, Unit] => Any): Unit = { val settings = createSettings(BlockchainFeatures.FeeSponsorship -> 0) val gen = sponsorAndSetScript(sponsorship = true, smartToken = false, smartAccount = false, feeInAssets, feeAmount) - forAll(gen) { - case (genesisBlock, transferTx) => - withLevelDBWriter(settings) { blockchain => - val BlockDiffer.Result(preconditionDiff, preconditionFees, totalFee, _, _) = - BlockDiffer.fromBlock(blockchain, None, genesisBlock, MiningConstraint.Unlimited, genesisBlock.header.generationSignature).explicitGet() - blockchain.append(preconditionDiff, preconditionFees, totalFee, None, genesisBlock.header.generationSignature, genesisBlock) - - f(FeeValidation(blockchain, transferTx)) - } + forAll(gen) { case (genesisBlock, transferTx) => + withRocksDBWriter(settings) { blockchain => + val BlockDiffer.Result(preconditionDiff, preconditionFees, totalFee, _, _) = + BlockDiffer.fromBlock(blockchain, None, genesisBlock, MiningConstraint.Unlimited, genesisBlock.header.generationSignature).explicitGet() + blockchain.append(preconditionDiff, preconditionFees, totalFee, None, genesisBlock.header.generationSignature, genesisBlock) + + f(FeeValidation(blockchain, transferTx)) + } } } @@ -71,7 +69,7 @@ class CommonValidationTest extends PropSpec with WithState { private def smartAccountCheckFeeTest(feeInAssets: Boolean, feeAmount: Long)(f: Either[ValidationError, Unit] => Any): Unit = { val settings = createSettings(BlockchainFeatures.SmartAccounts -> 0) val (genesisBlock, transferTx) = sponsorAndSetScript(sponsorship = false, smartToken = false, smartAccount = true, feeInAssets, feeAmount) - withLevelDBWriter(settings) { blockchain => + withRocksDBWriter(settings) { blockchain => val BlockDiffer.Result(preconditionDiff, preconditionFees, totalFee, _, _) = BlockDiffer.fromBlock(blockchain, None, genesisBlock, MiningConstraint.Unlimited, genesisBlock.header.generationSignature).explicitGet() blockchain.append(preconditionDiff, preconditionFees, totalFee, None, genesisBlock.header.generationSignature, genesisBlock) @@ -137,12 +135,16 @@ class CommonValidationTest extends PropSpec with WithState { private def createSettings(preActivatedFeatures: (BlockchainFeature, Int)*): FunctionalitySettings = TestFunctionalitySettings.Enabled - .copy(featureCheckBlocksPeriod = 1, blocksForFeatureActivation = 1, preActivatedFeatures = preActivatedFeatures.map { case (k, v) => k.id -> v }.toMap) + .copy( + featureCheckBlocksPeriod = 1, + blocksForFeatureActivation = 1, + preActivatedFeatures = preActivatedFeatures.map { case (k, v) => k.id -> v }.toMap + ) private def smartTokensCheckFeeTest(feeInAssets: Boolean, feeAmount: Long)(f: Either[ValidationError, Unit] => Any): Unit = { val settings = createSettings(BlockchainFeatures.SmartAccounts -> 0, BlockchainFeatures.SmartAssets -> 0) val (genesisBlock, transferTx) = sponsorAndSetScript(sponsorship = false, smartToken = true, smartAccount = false, feeInAssets, feeAmount) - withLevelDBWriter(settings) { blockchain => + withRocksDBWriter(settings) { blockchain => val BlockDiffer.Result(preconditionDiff, preconditionFees, totalFee, _, _) = BlockDiffer.fromBlock(blockchain, None, genesisBlock, MiningConstraint.Unlimited, genesisBlock.header.generationSignature).explicitGet() blockchain.append(preconditionDiff, preconditionFees, totalFee, None, genesisBlock.header.generationSignature, genesisBlock) @@ -198,12 +200,11 @@ class CommonValidationTest extends PropSpec with WithState { ).map(genesis -> _) } - preconditionsAndPayment.foreach { - case (genesis, tx) => - tx.chainId should not be AddressScheme.current.chainId - assertDiffEi(Seq(TestBlock.create(Seq(genesis))), TestBlock.create(Seq(tx))) { blockDiffEi => - blockDiffEi should produce("Address belongs to another network") - } + preconditionsAndPayment.foreach { case (genesis, tx) => + tx.chainId should not be AddressScheme.current.chainId + assertDiffEi(Seq(TestBlock.create(Seq(genesis))), TestBlock.create(Seq(tx))) { blockDiffEi => + blockDiffEi should produce("Address belongs to another network") + } } } } diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/CommonValidationTimeTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/CommonValidationTimeTest.scala index b40420520fa..fbf46958834 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/CommonValidationTimeTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/CommonValidationTimeTest.scala @@ -17,7 +17,7 @@ class CommonValidationTimeTest extends PropSpec with WithState { val transfer = TxHelpers.transfer(master, recipient.toAddress, timestamp = prevBlockTs - Enabled.maxTransactionTimeBackOffset.toMillis - 1) - withLevelDBWriter(Enabled) { blockchain: Blockchain => + withRocksDBWriter(Enabled) { blockchain: Blockchain => val result = TransactionDiffer(Some(prevBlockTs), blockTs)(blockchain, transfer).resultE result should produce("in the past relative to previous block timestamp") } @@ -33,7 +33,7 @@ class CommonValidationTimeTest extends PropSpec with WithState { val transfer = TxHelpers.transfer(master, recipient.toAddress, timestamp = blockTs + Enabled.maxTransactionTimeForwardOffset.toMillis + 1) val functionalitySettings = Enabled.copy(lastTimeBasedForkParameter = blockTs - 1) - withLevelDBWriter(functionalitySettings) { blockchain: Blockchain => + withRocksDBWriter(functionalitySettings) { blockchain: Blockchain => TransactionDiffer(Some(prevBlockTs), blockTs)(blockchain, transfer).resultE should produce("in the future relative to block timestamp") } diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/CreateAliasTransactionDiffTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/CreateAliasTransactionDiffTest.scala index 22236679694..57df862b258 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/CreateAliasTransactionDiffTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/CreateAliasTransactionDiffTest.scala @@ -14,6 +14,7 @@ import com.wavesplatform.transaction.assets.IssueTransaction import com.wavesplatform.transaction.lease.LeaseTransaction import com.wavesplatform.transaction.transfer.TransferTransaction import com.wavesplatform.transaction.{Asset, CreateAliasTransaction, GenesisTransaction, TransactionType, TxHelpers, TxVersion} +import monix.execution.Scheduler.Implicits.global class CreateAliasTransactionDiffTest extends PropSpec with WithState { @@ -67,7 +68,7 @@ class CreateAliasTransactionDiffTest extends PropSpec with WithState { val senderAcc = anotherAliasTx.sender.toAddress blockDiff.aliases shouldBe Map(anotherAliasTx.alias -> senderAcc) - addressTransactions(db, Some(Height(newState.height + 1) -> blockDiff), senderAcc, Set(TransactionType.CreateAlias), None).collect { + addressTransactions(rdb, Some(Height(newState.height + 1) -> blockDiff), senderAcc, Set(TransactionType.CreateAlias), None).collect { case (_, cat: CreateAliasTransaction) => cat.alias }.toSet shouldBe Set( anotherAliasTx.alias, diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/ReissueTransactionDiffTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/ReissueTransactionDiffTest.scala index 71bfe84f804..1ef5b8cb889 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/ReissueTransactionDiffTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/ReissueTransactionDiffTest.scala @@ -34,14 +34,12 @@ class ReissueTransactionDiffTest extends PropSpec with WithState with EitherValu } property("Reissue transaction's fee before feature activation is 1 WAVES") { - beforeActivationScenario.foreach { - case (bs, txs) => - checkFee(bs, txs) { - case (result, lessResult, moreResult) => - result.explicitGet() - lessResult.left.value - moreResult.explicitGet() - } + beforeActivationScenario.foreach { case (bs, txs) => + checkFee(bs, txs) { case (result, lessResult, moreResult) => + result.explicitGet() + lessResult.left.value + moreResult.explicitGet() + } } } @@ -69,19 +67,17 @@ class ReissueTransactionDiffTest extends PropSpec with WithState with EitherValu } property("Reissue transaction's fee after feature activation is 0.001 WAVES") { - afterActivationScenario.foreach { - case (bs, txs) => - checkFee(bs, txs) { - case (result, lessResult, moreResult) => - result.explicitGet() - lessResult.left.value - moreResult.explicitGet() - } + afterActivationScenario.foreach { case (bs, txs) => + checkFee(bs, txs) { case (result, lessResult, moreResult) => + result.explicitGet() + lessResult.left.value + moreResult.explicitGet() + } } } private def checkFee(preconditions: Seq[Block], txs: TransactionsForCheck)(f: ValidationResults => Any): Unit = - withLevelDBWriter(fs) { blockchain => + withRocksDBWriter(fs) { blockchain => preconditions.foreach { block => val BlockDiffer.Result(preconditionDiff, preconditionFees, totalFee, _, _) = BlockDiffer.fromBlock(blockchain, blockchain.lastBlock, block, MiningConstraint.Unlimited, block.header.generationSignature).explicitGet() @@ -123,10 +119,14 @@ object ReissueTransactionDiffTest { type ValidationResults = (Either[ValidationError, Unit], Either[ValidationError, Unit], Either[ValidationError, Unit]) val fs: FunctionalitySettings = - TestFunctionalitySettings.Enabled.copy(featureCheckBlocksPeriod = 1, blocksForFeatureActivation = 1, preActivatedFeatures = TestFunctionalitySettings.Enabled.preActivatedFeatures ++ Seq( + TestFunctionalitySettings.Enabled.copy( + featureCheckBlocksPeriod = 1, + blocksForFeatureActivation = 1, + preActivatedFeatures = TestFunctionalitySettings.Enabled.preActivatedFeatures ++ Seq( BlockchainFeatures.FeeSponsorship.id -> 0, BlockchainFeatures.BlockV5.id -> 3 - )) + ) + ) val BeforeActivationFee: Long = 1 * Constants.UnitsInWave val AfterActivationFee: Long = 100000 diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/TransactionValidationErrorPrintTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/TransactionValidationErrorPrintTest.scala index 28e43411bd5..ef67f40a402 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/TransactionValidationErrorPrintTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/TransactionValidationErrorPrintTest.scala @@ -97,7 +97,8 @@ class TransactionValidationErrorPrintTest extends PropSpec with Inside with With assertDiffEi( Seq(TestBlock.create(Seq(genesis1, genesis2, issueTransaction, preTransferTransaction, preSetAssetScriptTransaction))), TestBlock.create(Seq(transferTransaction)), - RideV6.blockchainSettings.functionalitySettings + RideV6.blockchainSettings.functionalitySettings, + enableExecutionLog = true ) { error => inside(error) { case Left(TransactionValidationError(see: ScriptExecutionError, _)) => val expected = diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/ci/BigIntInvokeTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/ci/BigIntInvokeTest.scala index 10c58af5b34..29b0056786d 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/ci/BigIntInvokeTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/ci/BigIntInvokeTest.scala @@ -149,7 +149,7 @@ class BigIntInvokeTest extends PropSpec with Inside with WithState with DBCacheS d.liquidDiff.errorMessage(invoke.id()) shouldBe None d.liquidDiff.scriptsRun shouldBe 2 - d.liquidDiff.accountData.head._2.data("key").value shouldBe 1 + d.liquidDiff.accountData.head._2("key").value shouldBe 1 } } } diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/ci/CallableV4DiffTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/ci/CallableV4DiffTest.scala index 1b8f289e0c7..5cb5da64d6f 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/ci/CallableV4DiffTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/ci/CallableV4DiffTest.scala @@ -132,7 +132,7 @@ class CallableV4DiffTest extends PropSpec with WithDomain with EitherValues { TestBlock.create(Seq(invoke)), features ) { case (diff, _) => - diff.accountData(master.toAddress).data shouldBe + diff.accountData(master.toAddress) shouldBe Map( "key1" -> EmptyDataEntry("key1"), "key2" -> EmptyDataEntry("key2") diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/ci/InvokeAffectedAddressTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/ci/InvokeAffectedAddressTest.scala index bf63d1d8991..27df11b5e25 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/ci/InvokeAffectedAddressTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/ci/InvokeAffectedAddressTest.scala @@ -7,10 +7,10 @@ import com.wavesplatform.db.WithState.AddrWithBalance import com.wavesplatform.lang.directives.values.V5 import com.wavesplatform.lang.v1.compiler.TestCompiler import com.wavesplatform.test.PropSpec -import com.wavesplatform.transaction.TxHelpers._ +import com.wavesplatform.transaction.TxHelpers.* class InvokeAffectedAddressTest extends PropSpec with WithDomain { - import DomainPresets._ + import DomainPresets.* private def dApp(failed: Boolean) = TestCompiler(V5).compileContract( diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/ci/InvokeAssetChecksTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/ci/InvokeAssetChecksTest.scala index cb5c73c2ac2..41844093542 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/ci/InvokeAssetChecksTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/ci/InvokeAssetChecksTest.scala @@ -22,9 +22,9 @@ class InvokeAssetChecksTest extends PropSpec with Inside with WithState with DBC import DomainPresets.* private val invalidLengthAsset = IssuedAsset(ByteStr.decodeBase58("WAVES").get) - private val nonExistentAsset = IssuedAsset(ByteStr.decodeBase58("WAVESwavesWAVESwavesWAVESwavesWAVESwaves123").get) + private val nonExistentAsset = IssuedAsset(ByteStr.decodeBase58("WAVESwavesWAVESwavesWAVESwavesWAVESwaves123").get) - private val lengthError = s"Transfer error: invalid asset ID '$invalidLengthAsset' length = 4 bytes, must be 32" + private val lengthError = s"Transfer error: invalid asset ID '$invalidLengthAsset' length = 4 bytes, must be 32" private val nonExistentError = s"Transfer error: asset '$nonExistentAsset' is not found on the blockchain" property("invoke asset checks") { @@ -48,15 +48,15 @@ class InvokeAssetChecksTest extends PropSpec with Inside with WithState with DBC for { activated <- Seq(true, false) - func <- Seq("invalidLength", "unexisting") + func <- Seq("invalidLength", "unexisting") } { - tempDb { _ => - val miner = TxHelpers.signer(0).toAddress - val invoker = TxHelpers.signer(1) - val master = TxHelpers.signer(2) - val balances = AddrWithBalance.enoughBalances(invoker, master) + { + val miner = TxHelpers.signer(0).toAddress + val invoker = TxHelpers.signer(1) + val master = TxHelpers.signer(2) + val balances = AddrWithBalance.enoughBalances(invoker, master) val setScriptTx = TxHelpers.setScript(master, dApp) - val invoke = TxHelpers.invoke(master.toAddress, Some(func), invoker = invoker) + val invoke = TxHelpers.invoke(master.toAddress, Some(func), invoker = invoker) val dAppAddress = master.toAddress @@ -74,7 +74,7 @@ class InvokeAssetChecksTest extends PropSpec with Inside with WithState with DBC invokeInfo(false), portfolios = Map( invoke.senderAddress -> Portfolio(-invoke.fee.value), - miner -> Portfolio((setScriptTx.fee.value * 0.6 + invoke.fee.value * 0.4).toLong + 6.waves) + miner -> Portfolio((setScriptTx.fee.value * 0.6 + invoke.fee.value * 0.4).toLong + 6.waves) ), scriptsComplexity = 8, scriptResults = Map(invoke.id() -> InvokeScriptResult(error = Some(ErrorMessage(1, expectingMessage)))) @@ -85,8 +85,8 @@ class InvokeAssetChecksTest extends PropSpec with Inside with WithState with DBC invokeInfo(true), portfolios = Map( invoke.senderAddress -> Portfolio(-invoke.fee.value, assets = VectorMap(asset -> 0)), - dAppAddress -> Portfolio.build(asset, 0), - miner -> Portfolio((setScriptTx.fee.value * 0.6 + invoke.fee.value * 0.4).toLong + 6.waves) + dAppAddress -> Portfolio.build(asset, 0), + miner -> Portfolio((setScriptTx.fee.value * 0.6 + invoke.fee.value * 0.4).toLong + 6.waves) ), scriptsRun = 1, scriptsComplexity = 18, @@ -196,11 +196,11 @@ class InvokeAssetChecksTest extends PropSpec with Inside with WithState with DBC val sigVerify = s"""strict c = ${(1 to 5).map(_ => "sigVerify(base58'', base58'', base58'')").mkString(" || ")} """ def dApp(name: String = "name", description: String = "") = TestCompiler(V5).compileContract( s""" - | @Callable(i) - | func default() = [ - | ${if (complex) sigVerify else ""} - | Issue("$name", "$description", 1000, 4, true, unit, 0) - | ] + | @Callable(i) + | func default() = [ + | ${if (complex) sigVerify else ""} + | Issue("$name", "$description", 1000, 4, true, unit, 0) + | ] """.stripMargin ) diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/ci/InvokeScriptTransactionDiffTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/ci/InvokeScriptTransactionDiffTest.scala index dcf2d756a75..883820667a1 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/ci/InvokeScriptTransactionDiffTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/ci/InvokeScriptTransactionDiffTest.scala @@ -1154,7 +1154,7 @@ class InvokeScriptTransactionDiffTest extends PropSpec with WithDomain with DBCa ) .anyNumberOfTimes() InvokeScriptTransactionDiff - .apply(blockchain, invoke.timestamp, limitedExecution = false)(invoke) + .apply(blockchain, invoke.timestamp, limitedExecution = false, enableExecutionLog = false)(invoke) .resultE should produceRejectOrFailedDiff("is already issued") } diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/ci/MultiPaymentInvokeDiffTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/ci/MultiPaymentInvokeDiffTest.scala index d95549969fa..207269155f8 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/ci/MultiPaymentInvokeDiffTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/ci/MultiPaymentInvokeDiffTest.scala @@ -15,9 +15,9 @@ import com.wavesplatform.state.diffs.* import com.wavesplatform.test.* import com.wavesplatform.transaction.Asset.IssuedAsset import com.wavesplatform.transaction.assets.IssueTransaction -import com.wavesplatform.transaction.{GenesisTransaction, TxHelpers, TxVersion} import com.wavesplatform.transaction.smart.InvokeScriptTransaction.Payment import com.wavesplatform.transaction.smart.{InvokeScriptTransaction, SetScriptTransaction} +import com.wavesplatform.transaction.{GenesisTransaction, TxHelpers, TxVersion} class MultiPaymentInvokeDiffTest extends PropSpec with WithState { private val oldVersions = Seq(V1, V2, V3) @@ -28,25 +28,23 @@ class MultiPaymentInvokeDiffTest extends PropSpec with WithState { dApp(V4, transferPaymentAmount = wavesTransfer, _), accountVerifiers(V4), verifier(V4, Asset) - ).foreach { - case (genesis, setVerifier, setDApp, ci, issues, dAppAcc, invoker, fee) => - assertDiffAndState( - Seq(TestBlock.create(genesis ++ issues ++ Seq(setDApp, setVerifier))), - TestBlock.create(Seq(ci)), - features - ) { - case (diff, blockchain) => - val assetBalance = issues - .map(_.id()) - .map(IssuedAsset(_)) - .map(asset => asset -> blockchain.balance(dAppAcc.toAddress, asset)) - .filter(_._2 != 0) - .toMap + ).foreach { case (genesis, setVerifier, setDApp, ci, issues, dAppAcc, invoker, fee) => + assertDiffAndState( + Seq(TestBlock.create(genesis ++ issues ++ Seq(setDApp, setVerifier))), + TestBlock.create(Seq(ci)), + features + ) { case (diff, blockchain) => + val assetBalance = issues + .map(_.id()) + .map(IssuedAsset(_)) + .map(asset => asset -> blockchain.balance(dAppAcc.toAddress, asset)) + .filter(_._2 != 0) + .toMap - diff.portfolios(dAppAcc.toAddress).assets shouldBe assetBalance - diff.portfolios(dAppAcc.toAddress).balance shouldBe -wavesTransfer - diff.portfolios(invoker.toAddress).balance shouldBe wavesTransfer - fee - } + diff.portfolios(dAppAcc.toAddress).assets shouldBe assetBalance + diff.portfolios(dAppAcc.toAddress).balance shouldBe -wavesTransfer + diff.portfolios(invoker.toAddress).balance shouldBe wavesTransfer - fee + } } } @@ -57,23 +55,21 @@ class MultiPaymentInvokeDiffTest extends PropSpec with WithState { accountVerifiers(V4), verifier(V4, Asset), repeatAdditionalAsset = true - ).foreach { - case (genesis, setVerifier, setDApp, ci, issues, dAppAcc, _, _) => - assertDiffAndState( - Seq(TestBlock.create(genesis ++ issues ++ Seq(setDApp, setVerifier))), - TestBlock.create(Seq(ci)), - features - ) { - case (diff, blockchain) => - val assetBalance = issues - .map(_.id()) - .map(IssuedAsset(_)) - .map(asset => asset -> blockchain.balance(dAppAcc.toAddress, asset)) - .filter(_._2 != 0) - .toMap + ).foreach { case (genesis, setVerifier, setDApp, ci, issues, dAppAcc, _, _) => + assertDiffAndState( + Seq(TestBlock.create(genesis ++ issues ++ Seq(setDApp, setVerifier))), + TestBlock.create(Seq(ci)), + features + ) { case (diff, blockchain) => + val assetBalance = issues + .map(_.id()) + .map(IssuedAsset(_)) + .map(asset => asset -> blockchain.balance(dAppAcc.toAddress, asset)) + .filter(_._2 != 0) + .toMap - diff.portfolios(dAppAcc.toAddress).assets shouldBe assetBalance - } + diff.portfolios(dAppAcc.toAddress).assets shouldBe assetBalance + } } } @@ -88,15 +84,14 @@ class MultiPaymentInvokeDiffTest extends PropSpec with WithState { verifier(V4, Asset, result = "false") ) ) - ).foreach { - case (genesis, setVerifier, setDApp, ci, issues, _, _, _) => - assertDiffEi( - Seq(TestBlock.create(genesis ++ issues ++ Seq(setDApp, setVerifier))), - TestBlock.create(Seq(ci)), - features - )(_ should matchPattern { - case Right(diff: Diff) if diff.transactions.exists(!_.applied) => - }) + ).foreach { case (genesis, setVerifier, setDApp, ci, issues, _, _, _) => + assertDiffEi( + Seq(TestBlock.create(genesis ++ issues ++ Seq(setDApp, setVerifier))), + TestBlock.create(Seq(ci)), + features + )(_ should matchPattern { + case Right(diff: Diff) if diff.transactions.exists(!_.applied) => + }) } } @@ -121,20 +116,19 @@ class MultiPaymentInvokeDiffTest extends PropSpec with WithState { accountVerifiers(V4), verifier(V4, Asset), withEnoughFee = false - ).foreach { - case (genesis, setVerifier, setDApp, ci, issues, _, _, _) => - assertDiffEi( - Seq(TestBlock.create(genesis ++ issues ++ Seq(setVerifier, setDApp))), - TestBlock.create(Seq(ci)), - features - ) { - val expectedFee = (0.005 + 0.004 + 0.004 * (ContractLimits.MaxAttachedPaymentAmount - 1)) * Constants.UnitsInWave - _ should produceRejectOrFailedDiff( - s"Fee in WAVES for InvokeScriptTransaction (${ci.fee} in WAVES) " + - s"with ${ContractLimits.MaxAttachedPaymentAmount} total scripts invoked " + - s"does not exceed minimal value of ${expectedFee.toLong} WAVES" - ) - } + ).foreach { case (genesis, setVerifier, setDApp, ci, issues, _, _, _) => + assertDiffEi( + Seq(TestBlock.create(genesis ++ issues ++ Seq(setVerifier, setDApp))), + TestBlock.create(Seq(ci)), + features + ) { + val expectedFee = (0.005 + 0.004 + 0.004 * (ContractLimits.MaxAttachedPaymentAmount - 1)) * Constants.UnitsInWave + _ should produceRejectOrFailedDiff( + s"Fee in WAVES for InvokeScriptTransaction (${ci.fee} in WAVES) " + + s"with ${ContractLimits.MaxAttachedPaymentAmount} total scripts invoked " + + s"does not exceed minimal value of ${expectedFee.toLong} WAVES" + ) + } } } @@ -145,25 +139,23 @@ class MultiPaymentInvokeDiffTest extends PropSpec with WithState { accountVerifiers(V3), verifier(V3, Asset), multiPayment = false - ).foreach { - case (genesis, setVerifier, setDApp, ci, issues, dAppAcc, invoker, fee) => - assertDiffAndState( - Seq(TestBlock.create(genesis ++ issues ++ Seq(setDApp, setVerifier))), - TestBlock.create(Seq(ci)), - features - ) { - case (diff, blockchain) => - val assetBalance = issues - .map(_.id()) - .map(IssuedAsset(_)) - .map(asset => asset -> blockchain.balance(dAppAcc.toAddress, asset)) - .filter(_._2 != 0) - .toMap + ).foreach { case (genesis, setVerifier, setDApp, ci, issues, dAppAcc, invoker, fee) => + assertDiffAndState( + Seq(TestBlock.create(genesis ++ issues ++ Seq(setDApp, setVerifier))), + TestBlock.create(Seq(ci)), + features + ) { case (diff, blockchain) => + val assetBalance = issues + .map(_.id()) + .map(IssuedAsset(_)) + .map(asset => asset -> blockchain.balance(dAppAcc.toAddress, asset)) + .filter(_._2 != 0) + .toMap - diff.portfolios(dAppAcc.toAddress).assets shouldBe assetBalance - diff.portfolios(dAppAcc.toAddress).balance shouldBe -wavesTransfer - diff.portfolios(invoker.toAddress).balance shouldBe wavesTransfer - fee - } + diff.portfolios(dAppAcc.toAddress).assets shouldBe assetBalance + diff.portfolios(dAppAcc.toAddress).balance shouldBe -wavesTransfer + diff.portfolios(invoker.toAddress).balance shouldBe wavesTransfer - fee + } } } @@ -173,13 +165,12 @@ class MultiPaymentInvokeDiffTest extends PropSpec with WithState { dApp(V3, transferPaymentAmount = wavesTransfer, _), accountVerifiers(V3), verifier(V3, Asset) - ).foreach { - case (genesis, setVerifier, setDApp, ci, issues, _, _, _) => - assertDiffEi( - Seq(TestBlock.create(genesis ++ issues ++ Seq(setDApp, setVerifier))), - TestBlock.create(Seq(ci)), - features.copy(preActivatedFeatures = features.preActivatedFeatures - BlockchainFeatures.BlockV5.id) - ) { _ should produce("Multiple payments isn't allowed now") } + ).foreach { case (genesis, setVerifier, setDApp, ci, issues, _, _, _) => + assertDiffEi( + Seq(TestBlock.create(genesis ++ issues ++ Seq(setDApp, setVerifier))), + TestBlock.create(Seq(ci)), + features.copy(preActivatedFeatures = features.preActivatedFeatures - BlockchainFeatures.BlockV5.id) + ) { _ should produce("Multiple payments isn't allowed now") } } } @@ -213,11 +204,12 @@ class MultiPaymentInvokeDiffTest extends PropSpec with WithState { for { accountScript <- verifiers - additionalAssetScript <- if (additionalAssetScripts.exists(_.nonEmpty)) { - additionalAssetScripts.toSeq.flatten.map(Some(_)) - } else { - Seq(None) - } + additionalAssetScript <- + if (additionalAssetScripts.exists(_.nonEmpty)) { + additionalAssetScripts.toSeq.flatten.map(Some(_)) + } else { + Seq(None) + } } yield { val setVerifier = TxHelpers.setScript(invoker, accountScript) val setDApp = TxHelpers.setScript(master, dApp(invoker)) @@ -276,17 +268,17 @@ class MultiPaymentInvokeDiffTest extends PropSpec with WithState { private def dApp(version: StdLibVersion, transferPaymentAmount: Int, transferRecipient: KeyPair): Script = { val resultSyntax = if (version >= V4) "" else "TransferSet" TestCompiler(version).compileContract(s""" - | {-# STDLIB_VERSION ${version.id} #-} - | {-# CONTENT_TYPE DAPP #-} - | {-# SCRIPT_TYPE ACCOUNT #-} - | - | @Callable(i) - | func default() = $resultSyntax([ScriptTransfer( - | Address(base58'${transferRecipient.toAddress}'), - | $transferPaymentAmount, - | unit - | )]) - | + | {-# STDLIB_VERSION ${version.id} #-} + | {-# CONTENT_TYPE DAPP #-} + | {-# SCRIPT_TYPE ACCOUNT #-} + | + | @Callable(i) + | func default() = $resultSyntax([ScriptTransfer( + | Address(base58'${transferRecipient.toAddress}'), + | $transferPaymentAmount, + | unit + | )]) + | """.stripMargin) } @@ -295,21 +287,21 @@ class MultiPaymentInvokeDiffTest extends PropSpec with WithState { val verifierExpr = if (usePaymentsField) s""" - | match tx { - | case ist: InvokeScriptTransaction => ist.$paymentsField == ist.$paymentsField - | case _ => true - | } + | match tx { + | case ist: InvokeScriptTransaction => ist.$paymentsField == ist.$paymentsField + | case _ => true + | } """.stripMargin else "true" TestCompiler(version).compileContract(s""" - | {-# STDLIB_VERSION ${version.id} #-} - | {-# CONTENT_TYPE DAPP #-} - | {-# SCRIPT_TYPE ACCOUNT #-} - | - | @Verifier(tx) - | func verify() = $verifierExpr - | + | {-# STDLIB_VERSION ${version.id} #-} + | {-# CONTENT_TYPE DAPP #-} + | {-# SCRIPT_TYPE ACCOUNT #-} + | + | @Verifier(tx) + | func verify() = $verifierExpr + | """.stripMargin) } @@ -331,10 +323,12 @@ class MultiPaymentInvokeDiffTest extends PropSpec with WithState { else Seq(verifier(version, Account)) - private val features = TestFunctionalitySettings.Enabled.copy(preActivatedFeatures = Seq( + private val features = TestFunctionalitySettings.Enabled.copy(preActivatedFeatures = + Seq( BlockchainFeatures.SmartAccounts, BlockchainFeatures.SmartAssets, BlockchainFeatures.Ride4DApps, BlockchainFeatures.BlockV5 - ).map(_.id -> 0).toMap) + ).map(_.id -> 0).toMap + ) } diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/ci/sync/SyncDAppErrorLogTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/ci/sync/SyncDAppErrorLogTest.scala index 078d01b47f2..f870f12d09d 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/ci/sync/SyncDAppErrorLogTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/ci/sync/SyncDAppErrorLogTest.scala @@ -879,7 +879,8 @@ class SyncDAppErrorLogTest extends PropSpec with WithDomain with OptionValues { TestBlock.create(Seq(setScript)) ), TestBlock.create(Seq(invoke)), - settings.blockchainSettings.functionalitySettings + settings.blockchainSettings.functionalitySettings, + enableExecutionLog = true ) { result => result.trace .collectFirst { case invokeTrace: InvokeScriptTrace => @@ -946,7 +947,7 @@ class SyncDAppErrorLogTest extends PropSpec with WithDomain with OptionValues { TxHelpers.setScript(dApp2, dAppContract2(dApp3.toAddress)), TxHelpers.setScript(dApp3, dAppContract3) ) - d.transactionDiffer(invoke) + d.transactionDifferWithLog(invoke) .trace .collectFirst { case invokeTrace: InvokeScriptTrace => invokeTrace.resultE match { diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/smart/eth/EthereumInvokeTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/smart/eth/EthereumInvokeTest.scala index 661255b9ef8..cc8f45aa7aa 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/smart/eth/EthereumInvokeTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/smart/eth/EthereumInvokeTest.scala @@ -147,8 +147,8 @@ class EthereumInvokeTest extends PropSpec with WithDomain with EthHelpers { d.appendBlock(ethInvoke) d.liquidDiff.errorMessage(ethInvoke.id()) shouldBe None - d.liquidDiff.accountData(dApp).data("check").value shouldBe true - if (syncCall) d.liquidDiff.accountData(dApp2).data("check").value shouldBe true + d.liquidDiff.accountData(dApp)("check").value shouldBe true + if (syncCall) d.liquidDiff.accountData(dApp2)("check").value shouldBe true val assetsPortfolio = assets.map(Portfolio.build(_, paymentAmount)).fold(Portfolio())((p1, p2) => p1.combine(p2).explicitGet()) d.liquidDiff.portfolios.getOrElse(dApp, Portfolio()) shouldBe assetsPortfolio diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/smart/predef/MatcherBlockchainTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/smart/predef/MatcherBlockchainTest.scala index 303a54f714e..94115dcfd3c 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/smart/predef/MatcherBlockchainTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/smart/predef/MatcherBlockchainTest.scala @@ -40,6 +40,7 @@ class MatcherBlockchainTest extends PropSpec with MockFactory with WithDomain { override def wavesAmount(height: Int): BigInt = ??? override def transferById(id: ByteStr): Option[(Int, TransferTransaction)] = ??? override def transactionInfo(id: ByteStr): Option[(TxMeta, Transaction)] = ??? + override def transactionInfos(ids: Seq[BlockId]): Seq[Option[(TxMeta, Transaction)]] = ??? override def transactionMeta(id: ByteStr): Option[TxMeta] = ??? override def containsTransaction(tx: Transaction): Boolean = ??? override def assetDescription(id: Asset.IssuedAsset): Option[AssetDescription] = ??? @@ -54,7 +55,10 @@ class MatcherBlockchainTest extends PropSpec with MockFactory with WithDomain { override def accountData(acc: Address, key: String): Option[DataEntry[?]] = ??? override def hasData(address: Address): Boolean = ??? override def leaseBalance(address: Address): LeaseBalance = ??? + override def leaseBalances(addresses: Seq[Address]): Map[Address, LeaseBalance] = ??? override def balance(address: Address, mayBeAssetId: Asset): Long = ??? + override def balances(req: Seq[(Address, Asset)]): Map[(Address, Asset), Long] = ??? + override def wavesBalances(addresses: Seq[Address]): Map[Address, Long] = ??? override def resolveERC20Address(address: ERC20Address): Option[Asset.IssuedAsset] = ??? } @@ -95,6 +99,7 @@ class MatcherBlockchainTest extends PropSpec with MockFactory with WithDomain { checkEstimatorSumOverflow = true, newEvaluatorMode = true, checkWeakPk = true, + enableExecutionLog = false, fixBigScriptField = true ) ._3 shouldBe Right(CONST_BOOLEAN(true)) diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/smart/predef/ScriptVersionsTest.scala b/node/src/test/scala/com/wavesplatform/state/diffs/smart/predef/ScriptVersionsTest.scala index fcdd54c6049..0b52b3541c8 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/smart/predef/ScriptVersionsTest.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/smart/predef/ScriptVersionsTest.scala @@ -42,7 +42,7 @@ class ScriptVersionsTest extends FreeSpec { tx: Transaction, blockchain: Blockchain ): Either[String, EVALUATED] = - ScriptRunner(Coproduct(tx), blockchain, script, isAssetScript = false, null)._3.leftMap(_.message) + ScriptRunner(Coproduct(tx), blockchain, script, isAssetScript = false, null, enableExecutionLog = false)._3.leftMap(_.message) private val duplicateNames = """ diff --git a/node/src/test/scala/com/wavesplatform/state/diffs/smart/scenarios/BalancesV4Test.scala b/node/src/test/scala/com/wavesplatform/state/diffs/smart/scenarios/BalancesV4Test.scala index 0cdbfc6c430..bf62561a6a7 100644 --- a/node/src/test/scala/com/wavesplatform/state/diffs/smart/scenarios/BalancesV4Test.scala +++ b/node/src/test/scala/com/wavesplatform/state/diffs/smart/scenarios/BalancesV4Test.scala @@ -19,6 +19,7 @@ import com.wavesplatform.lang.v1.traits.Environment import com.wavesplatform.settings.{Constants, FunctionalitySettings, TestFunctionalitySettings} import com.wavesplatform.state.* import com.wavesplatform.state.diffs.* +import com.wavesplatform.state.reader.CompositeBlockchain import com.wavesplatform.test.* import com.wavesplatform.transaction.Asset.* import com.wavesplatform.transaction.* @@ -32,7 +33,8 @@ class BalancesV4Test extends PropSpec with WithState { val SetScriptFee: Long = Constants.UnitsInWave / 1000L val SetAssetScriptFee: Long = Constants.UnitsInWave - val rideV4Activated: FunctionalitySettings = TestFunctionalitySettings.Enabled.copy(preActivatedFeatures = Map( + val rideV4Activated: FunctionalitySettings = TestFunctionalitySettings.Enabled.copy(preActivatedFeatures = + Map( BlockchainFeatures.Ride4DApps.id -> 0, BlockchainFeatures.SmartAccounts.id -> 0, BlockchainFeatures.BlockV5.id -> 0 @@ -92,18 +94,18 @@ class BalancesV4Test extends PropSpec with WithState { Seq(TestBlock.create(b)), TestBlock.create(Seq(ci)), rideV4Activated - ) { - case (d, s) => - val apiBalance = com.wavesplatform.api.common.CommonAccountsApi(() => d, db, s).balanceDetails(acc1.toAddress).explicitGet() - val data = d.accountData(dapp.toAddress) - data.data("available") shouldBe IntegerDataEntry("available", apiBalance.available) - apiBalance.available shouldBe 16 * Constants.UnitsInWave - data.data("regular") shouldBe IntegerDataEntry("regular", apiBalance.regular) - apiBalance.regular shouldBe 26 * Constants.UnitsInWave - data.data("generating") shouldBe IntegerDataEntry("generating", apiBalance.generating) - apiBalance.generating shouldBe 5 * Constants.UnitsInWave - data.data("effective") shouldBe IntegerDataEntry("effective", apiBalance.effective) - apiBalance.effective shouldBe 17 * Constants.UnitsInWave + ) { case (d, s) => + val apiBalance = + com.wavesplatform.api.common.CommonAccountsApi(() => CompositeBlockchain(s, d), rdb, s).balanceDetails(acc1.toAddress).explicitGet() + val data = d.accountData(dapp.toAddress) + data("available") shouldBe IntegerDataEntry("available", apiBalance.available) + apiBalance.available shouldBe 16 * Constants.UnitsInWave + data("regular") shouldBe IntegerDataEntry("regular", apiBalance.regular) + apiBalance.regular shouldBe 26 * Constants.UnitsInWave + data("generating") shouldBe IntegerDataEntry("generating", apiBalance.generating) + apiBalance.generating shouldBe 5 * Constants.UnitsInWave + data("effective") shouldBe IntegerDataEntry("effective", apiBalance.effective) + apiBalance.effective shouldBe 17 * Constants.UnitsInWave } } @@ -120,11 +122,11 @@ class BalancesV4Test extends PropSpec with WithState { val script = s""" - | {-# STDLIB_VERSION 4 #-} - | {-# CONTENT_TYPE EXPRESSION #-} - | {-# SCRIPT_TYPE ASSET #-} - | - | assetBalance(Address(base58'$acc'), this.id) == $a && assetBalance(Alias("alias"), this.id) == $a + | {-# STDLIB_VERSION 4 #-} + | {-# CONTENT_TYPE EXPRESSION #-} + | {-# SCRIPT_TYPE ASSET #-} + | + | assetBalance(Address(base58'$acc'), this.id) == $a && assetBalance(Alias("alias"), this.id) == $a """.stripMargin val parsedScript = Parser.parseExpr(script).get.value ExprScript(V4, ExpressionCompiler(ctx.compilerContext, parsedScript).explicitGet()._1) @@ -182,11 +184,11 @@ class BalancesV4Test extends PropSpec with WithState { val script = s""" - | {-# STDLIB_VERSION 4 #-} - | {-# CONTENT_TYPE EXPRESSION #-} - | {-# SCRIPT_TYPE ASSET #-} - | - | wavesBalance(Address(base58'$acc')).regular == $w + | {-# STDLIB_VERSION 4 #-} + | {-# CONTENT_TYPE EXPRESSION #-} + | {-# SCRIPT_TYPE ASSET #-} + | + | wavesBalance(Address(base58'$acc')).regular == $w """.stripMargin val parsedScript = Parser.parseExpr(script).get.value ExprScript(V4, ExpressionCompiler(ctx.compilerContext, parsedScript).explicitGet()._1) @@ -222,12 +224,11 @@ class BalancesV4Test extends PropSpec with WithState { val setScript = TxHelpers.setScript(acc1, dappScript(ByteStr(acc2.toAddress.bytes), issue.id()), SetScriptFee) val invoke = TxHelpers.invoke(acc1.toAddress, func = Some("bar"), invoker = acc2, fee = InvokeScriptTxFee) - assertDiffAndState(Seq(TestBlock.create(genesis :+ issue :+ setScript)), TestBlock.create(Seq(invoke)), rideV4Activated) { - case (d, s) => - val error = d.scriptResults(invoke.id()).error - error.get.code shouldBe 3 - error.get.text should include("Transaction is not allowed by script of the asset") - s.wavesPortfolio(acc1.toAddress).balance shouldBe w + assertDiffAndState(Seq(TestBlock.create(genesis :+ issue :+ setScript)), TestBlock.create(Seq(invoke)), rideV4Activated) { case (d, s) => + val error = d.scriptResults(invoke.id()).error + error.get.code shouldBe 3 + error.get.text should include("Transaction is not allowed by script of the asset") + s.wavesPortfolio(acc1.toAddress).balance shouldBe w } } diff --git a/node/src/test/scala/com/wavesplatform/state/utils/package.scala b/node/src/test/scala/com/wavesplatform/state/utils/package.scala index f9394ac64a9..453b10c6c34 100644 --- a/node/src/test/scala/com/wavesplatform/state/utils/package.scala +++ b/node/src/test/scala/com/wavesplatform/state/utils/package.scala @@ -3,37 +3,38 @@ package com.wavesplatform.state import com.wavesplatform.account.Address import com.wavesplatform.api.common.AddressTransactions import com.wavesplatform.common.state.ByteStr -import com.wavesplatform.database.{LevelDBWriter, TestStorageFactory} +import com.wavesplatform.database.{RDB, RocksDBWriter, TestStorageFactory} import com.wavesplatform.events.BlockchainUpdateTriggers -import com.wavesplatform.settings.TestSettings._ +import com.wavesplatform.settings.TestSettings.* import com.wavesplatform.settings.{BlockchainSettings, FunctionalitySettings, GenesisSettings, RewardsSettings, TestSettings} -import com.wavesplatform.transaction.{Asset, Transaction} +import com.wavesplatform.transaction.Transaction import com.wavesplatform.utils.SystemTime -import monix.reactive.Observer -import org.iq80.leveldb.DB +import monix.execution.Scheduler package object utils { def addressTransactions( - db: DB, + rdb: RDB, diff: => Option[(Height, Diff)], address: Address, types: Set[Transaction.Type], fromId: Option[ByteStr] - ): Seq[(Height, Transaction)] = - AddressTransactions.allAddressTransactions(db, diff, address, None, types, fromId).map { case (tm, tx) => tm.height -> tx }.toSeq + )(implicit s: Scheduler): Seq[(Height, Transaction)] = + AddressTransactions + .allAddressTransactions(rdb, diff, address, None, types, fromId) + .map { case (tm, tx, _) => tm.height -> tx } + .toListL + .runSyncUnsafe() - object TestLevelDB { + object TestRocksDB { def withFunctionalitySettings( - writableDB: DB, - spendableBalanceChanged: Observer[(Address, Asset)], + rdb: RDB, fs: FunctionalitySettings - ): LevelDBWriter = + ): RocksDBWriter = TestStorageFactory( TestSettings.Default.withFunctionalitySettings(fs), - writableDB, + rdb, SystemTime, - spendableBalanceChanged, BlockchainUpdateTriggers.noop )._2 diff --git a/node/src/test/scala/com/wavesplatform/test/DomainPresets.scala b/node/src/test/scala/com/wavesplatform/test/DomainPresets.scala index 74de7adb52c..bf53e9138b5 100644 --- a/node/src/test/scala/com/wavesplatform/test/DomainPresets.scala +++ b/node/src/test/scala/com/wavesplatform/test/DomainPresets.scala @@ -4,7 +4,6 @@ import com.wavesplatform.features.{BlockchainFeature, BlockchainFeatures} import com.wavesplatform.lang.directives.values.* import com.wavesplatform.settings.{FunctionalitySettings, WavesSettings, loadConfig} - object DomainPresets { implicit class WavesSettingsOps(val ws: WavesSettings) extends AnyVal { def configure(transformF: FunctionalitySettings => FunctionalitySettings): WavesSettings = { @@ -55,8 +54,8 @@ object DomainPresets { .blockchainSettings .functionalitySettings - domainSettingsWithFS(defaultFS.copy(preActivatedFeatures = fs.map { - case (f, h) => f.id -> h + domainSettingsWithFS(defaultFS.copy(preActivatedFeatures = fs.map { case (f, h) => + f.id -> h }.toMap)) } @@ -65,16 +64,18 @@ object DomainPresets { BlockchainFeatures.NG ) - val ScriptsAndSponsorship: WavesSettings = NG.addFeatures( - BlockchainFeatures.SmartAccounts, - BlockchainFeatures.SmartAccountTrading, - BlockchainFeatures.OrderV3, - BlockchainFeatures.FeeSponsorship, - BlockchainFeatures.DataTransaction, - BlockchainFeatures.SmartAssets - ).setFeaturesHeight( - BlockchainFeatures.FeeSponsorship -> -NG.blockchainSettings.functionalitySettings.activationWindowSize(1) - ) + val ScriptsAndSponsorship: WavesSettings = NG + .addFeatures( + BlockchainFeatures.SmartAccounts, + BlockchainFeatures.SmartAccountTrading, + BlockchainFeatures.OrderV3, + BlockchainFeatures.FeeSponsorship, + BlockchainFeatures.DataTransaction, + BlockchainFeatures.SmartAssets + ) + .setFeaturesHeight( + BlockchainFeatures.FeeSponsorship -> -NG.blockchainSettings.functionalitySettings.activationWindowSize(1) + ) val RideV3: WavesSettings = ScriptsAndSponsorship.addFeatures( BlockchainFeatures.Ride4DApps @@ -89,9 +90,11 @@ object DomainPresets { val RideV6: WavesSettings = RideV5.addFeatures(BlockchainFeatures.RideV6) - val ContinuationTransaction: WavesSettings = RideV6.addFeatures(BlockchainFeatures.ContinuationTransaction).copy( - featuresSettings = RideV6.featuresSettings.copy(autoShutdownOnUnsupportedFeature = false) - ) + val ContinuationTransaction: WavesSettings = RideV6 + .addFeatures(BlockchainFeatures.ContinuationTransaction) + .copy( + featuresSettings = RideV6.featuresSettings.copy(autoShutdownOnUnsupportedFeature = false) + ) def settingsForRide(version: StdLibVersion): WavesSettings = version match { diff --git a/node/src/test/scala/com/wavesplatform/test/SharedDomain.scala b/node/src/test/scala/com/wavesplatform/test/SharedDomain.scala index 97d82b27535..568730401e0 100644 --- a/node/src/test/scala/com/wavesplatform/test/SharedDomain.scala +++ b/node/src/test/scala/com/wavesplatform/test/SharedDomain.scala @@ -2,39 +2,37 @@ package com.wavesplatform.test import java.nio.file.Files -import com.wavesplatform.{NTPTime, TestHelpers} -import com.wavesplatform.database.{LevelDBFactory, TestStorageFactory} +import com.wavesplatform.database.{RDB, TestStorageFactory} +import com.wavesplatform.db.DBCacheSettings import com.wavesplatform.db.WithState.AddrWithBalance import com.wavesplatform.events.BlockchainUpdateTriggers import com.wavesplatform.history.Domain import com.wavesplatform.settings.WavesSettings import com.wavesplatform.transaction.TxHelpers -import monix.reactive.Observer -import org.iq80.leveldb.{DB, Options} +import com.wavesplatform.{NTPTime, TestHelpers} import org.scalatest.{BeforeAndAfterAll, Suite} -trait SharedDomain extends BeforeAndAfterAll with NTPTime { _: Suite => - private val path = Files.createTempDirectory("lvl-temp").toAbsolutePath - private val db: DB = LevelDBFactory.factory.open(path.toFile, new Options().createIfMissing(true)) - private val (bui, ldb) = TestStorageFactory(settings, db, ntpTime, Observer.stopped, BlockchainUpdateTriggers.noop) +trait SharedDomain extends BeforeAndAfterAll with NTPTime with DBCacheSettings { _: Suite => + private val path = Files.createTempDirectory("rocks-temp").toAbsolutePath + private val rdb = RDB.open(dbSettings.copy(directory = path.toAbsolutePath.toString)) + private val (bui, ldb) = TestStorageFactory(settings, rdb, ntpTime, BlockchainUpdateTriggers.noop) - def settings: WavesSettings = DomainPresets.ScriptsAndSponsorship + def settings: WavesSettings = DomainPresets.ScriptsAndSponsorship def genesisBalances: Seq[AddrWithBalance] = Seq.empty - lazy val domain: Domain = Domain(db, bui, ldb, settings) - + lazy val domain: Domain = Domain(rdb, bui, ldb, settings) override protected def beforeAll(): Unit = { val genesisTransactions = genesisBalances.map(ab => TxHelpers.genesis(ab.address, ab.balance)) if (genesisTransactions.nonEmpty) { - domain.appendBlock(genesisTransactions: _*) + domain.appendBlock(genesisTransactions*) } super.beforeAll() } override protected def afterAll(): Unit = { super.afterAll() - db.close() + rdb.close() bui.shutdown() TestHelpers.deleteRecursively(path) } diff --git a/node/src/test/scala/com/wavesplatform/transaction/ChainIdSpecification.scala b/node/src/test/scala/com/wavesplatform/transaction/ChainIdSpecification.scala index adf7dbe0a03..6bc34a5045a 100644 --- a/node/src/test/scala/com/wavesplatform/transaction/ChainIdSpecification.scala +++ b/node/src/test/scala/com/wavesplatform/transaction/ChainIdSpecification.scala @@ -48,16 +48,30 @@ class ChainIdSpecification extends PropSpec { val recoveredTx = recoveredTxEi.explicitGet().asInstanceOf[ProvenTransaction] recoveredTx shouldBe tx - Verifier.verifyAsEllipticCurveSignature(recoveredTx, checkWeakPk = false).explicitGet() + Verifier.verifyAsEllipticCurveSignature(recoveredTx, isRideV6Activated = false).explicitGet() } property("TransferTransaction validation") { - forAll(addressOrAliasWithVersion(TransferTransaction.supportedVersions)) { - case (addressOrAlias, version, sender, amount, fee, ts) => + forAll(addressOrAliasWithVersion(TransferTransaction.supportedVersions)) { case (addressOrAlias, version, sender, amount, fee, ts) => + TransferTransaction( + version, + sender.publicKey, + addressOrAlias, + Waves, + amount, + Waves, + fee, + ByteStr.empty, + ts, + Proofs.empty, + AddressScheme.current.chainId + ).validatedEither shouldBe Left(GenericError("Address or alias from other network")) + + validateFromOtherNetwork( TransferTransaction( - version, + TxVersion.V3, sender.publicKey, - addressOrAlias, + Alias.createWithChainId("sasha", otherChainId).explicitGet(), Waves, amount, Waves, @@ -65,30 +79,25 @@ class ChainIdSpecification extends PropSpec { ByteStr.empty, ts, Proofs.empty, - AddressScheme.current.chainId - ).validatedEither shouldBe Left(GenericError("Address or alias from other network")) - - validateFromOtherNetwork( - TransferTransaction( - TxVersion.V3, - sender.publicKey, - Alias.createWithChainId("sasha", otherChainId).explicitGet(), - Waves, - amount, - Waves, - fee, - ByteStr.empty, - ts, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("PaymentTransaction validation") { - forAll(addressOrAliasWithVersion(PaymentTransaction.supportedVersions)) { - case (_, _, sender, amount, fee, ts) => + forAll(addressOrAliasWithVersion(PaymentTransaction.supportedVersions)) { case (_, _, sender, amount, fee, ts) => + PaymentTransaction( + sender.publicKey, + addressFromOther, + amount, + fee, + ts, + ByteStr.empty, + AddressScheme.current.chainId + ).validatedEither shouldBe Left(GenericError("Address or alias from other network")) + + validateFromOtherNetwork( PaymentTransaction( sender.publicKey, addressFromOther, @@ -96,57 +105,58 @@ class ChainIdSpecification extends PropSpec { fee, ts, ByteStr.empty, - AddressScheme.current.chainId - ).validatedEither shouldBe Left(GenericError("Address or alias from other network")) - - validateFromOtherNetwork( - PaymentTransaction( - sender.publicKey, - addressFromOther, - amount, - fee, - ts, - ByteStr.empty, - otherChainId - ).validatedEither.map(u => u.copy(signature = crypto.sign(sender.privateKey, u.bodyBytes()))).explicitGet() - ) + otherChainId + ).validatedEither.map(u => u.copy(signature = crypto.sign(sender.privateKey, u.bodyBytes()))).explicitGet() + ) } } property("LeaseTransaction validation") { - forAll(addressOrAliasWithVersion(LeaseTransaction.supportedVersions)) { - case (addressOrAlias, version, sender, amount, fee, ts) => + forAll(addressOrAliasWithVersion(LeaseTransaction.supportedVersions)) { case (addressOrAlias, version, sender, amount, fee, ts) => + LeaseTransaction( + version, + sender.publicKey, + addressOrAlias, + amount, + fee, + ts, + Proofs.empty, + AddressScheme.current.chainId + ).validatedEither shouldBe Left(GenericError("Address or alias from other network")) + + validateFromOtherNetwork( LeaseTransaction( - version, + TxVersion.V3, sender.publicKey, addressOrAlias, amount, fee, ts, Proofs.empty, - AddressScheme.current.chainId - ).validatedEither shouldBe Left(GenericError("Address or alias from other network")) - - validateFromOtherNetwork( - LeaseTransaction( - TxVersion.V3, - sender.publicKey, - addressOrAlias, - amount, - fee, - ts, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("InvokeScriptTransaction validation") { - forAll(addressOrAliasWithVersion(InvokeScriptTransaction.supportedVersions)) { - case (addressOrAlias, version, sender, _, fee, ts) => + forAll(addressOrAliasWithVersion(InvokeScriptTransaction.supportedVersions)) { case (addressOrAlias, version, sender, _, fee, ts) => + InvokeScriptTransaction( + version, + sender.publicKey, + addressOrAlias, + None, + Seq.empty, + fee, + Waves, + ts, + Proofs.empty, + AddressScheme.current.chainId + ).validatedEither shouldBe Left(GenericError("Address or alias from other network")) + + validateFromOtherNetwork( InvokeScriptTransaction( - version, + TxVersion.V2, sender.publicKey, addressOrAlias, None, @@ -155,260 +165,233 @@ class ChainIdSpecification extends PropSpec { Waves, ts, Proofs.empty, - AddressScheme.current.chainId - ).validatedEither shouldBe Left(GenericError("Address or alias from other network")) - - validateFromOtherNetwork( - InvokeScriptTransaction( - TxVersion.V2, - sender.publicKey, - addressOrAlias, - None, - Seq.empty, - fee, - Waves, - ts, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("GenesisTransaction validation") { - forAll(addressOrAliasWithVersion(GenesisTransaction.supportedVersions)) { - case (_, _, _, amount, _, ts) => - GenesisTransaction( - addressFromOther, - TxNonNegativeAmount.unsafeFrom(amount.value), - ts, - ByteStr.empty, - AddressScheme.current.chainId - ).validatedEither shouldBe Left(GenericError("Address or alias from other network")) + forAll(addressOrAliasWithVersion(GenesisTransaction.supportedVersions)) { case (_, _, _, amount, _, ts) => + GenesisTransaction( + addressFromOther, + TxNonNegativeAmount.unsafeFrom(amount.value), + ts, + ByteStr.empty, + AddressScheme.current.chainId + ).validatedEither shouldBe Left(GenericError("Address or alias from other network")) } } property("BurnTransaction validation") { - forAll(addressOrAliasWithVersion(BurnTransaction.supportedVersions)) { - case (_, _, sender, amount, fee, ts) => - validateFromOtherNetwork( - BurnTransaction( - TxVersion.V3, - sender.publicKey, - IssuedAsset(ByteStr(bytes32gen.sample.get)), - TxNonNegativeAmount.unsafeFrom(amount.value), - fee, - ts, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + forAll(addressOrAliasWithVersion(BurnTransaction.supportedVersions)) { case (_, _, sender, amount, fee, ts) => + validateFromOtherNetwork( + BurnTransaction( + TxVersion.V3, + sender.publicKey, + IssuedAsset(ByteStr(bytes32gen.sample.get)), + TxNonNegativeAmount.unsafeFrom(amount.value), + fee, + ts, + Proofs.empty, + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("CreateAliasTransaction validation") { - forAll(addressOrAliasWithVersion(CreateAliasTransaction.supportedVersions)) { - case (_, _, sender, _, fee, ts) => - validateFromOtherNetwork( - CreateAliasTransaction( - TxVersion.V3, - sender.publicKey, - "alias", - fee, - ts, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + forAll(addressOrAliasWithVersion(CreateAliasTransaction.supportedVersions)) { case (_, _, sender, _, fee, ts) => + validateFromOtherNetwork( + CreateAliasTransaction( + TxVersion.V3, + sender.publicKey, + "alias", + fee, + ts, + Proofs.empty, + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("DataTransaction validation") { - forAll(addressOrAliasWithVersion(DataTransaction.supportedVersions)) { - case (_, _, sender, _, fee, ts) => - validateFromOtherNetwork( - DataTransaction( - TxVersion.V2, - sender.publicKey, - Seq(StringDataEntry("key", "value")), - fee, - ts, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + forAll(addressOrAliasWithVersion(DataTransaction.supportedVersions)) { case (_, _, sender, _, fee, ts) => + validateFromOtherNetwork( + DataTransaction( + TxVersion.V2, + sender.publicKey, + Seq(StringDataEntry("key", "value")), + fee, + ts, + Proofs.empty, + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("ExchangeTransaction validation") { - forAll(addressOrAliasWithVersion(ExchangeTransaction.supportedVersions)) { - case (_, _, sender, amount, fee, ts) => - val pair = AssetPair(Waves, IssuedAsset(ByteStr(bytes32gen.sample.get))) - validateFromOtherNetwork( - ExchangeTransaction( - TxVersion.V3, - Order.sell(Order.V3, sender, sender.publicKey, pair, amount.value, amount.value, ts, ts + ts, fee.value).explicitGet(), - Order.buy(Order.V3, sender, sender.publicKey, pair, amount.value, amount.value, ts, ts + ts, fee.value).explicitGet(), - TxExchangeAmount.unsafeFrom(amount.value), - TxExchangePrice.unsafeFrom(amount.value), - fee.value, - fee.value, - fee, - ts, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + forAll(addressOrAliasWithVersion(ExchangeTransaction.supportedVersions)) { case (_, _, sender, amount, fee, ts) => + val pair = AssetPair(Waves, IssuedAsset(ByteStr(bytes32gen.sample.get))) + validateFromOtherNetwork( + ExchangeTransaction( + TxVersion.V3, + Order.sell(Order.V3, sender, sender.publicKey, pair, amount.value, amount.value, ts, ts + ts, fee.value).explicitGet(), + Order.buy(Order.V3, sender, sender.publicKey, pair, amount.value, amount.value, ts, ts + ts, fee.value).explicitGet(), + TxExchangeAmount.unsafeFrom(amount.value), + TxExchangePrice.unsafeFrom(amount.value), + fee.value, + fee.value, + fee, + ts, + Proofs.empty, + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("IssueTransaction validation") { - forAll(addressOrAliasWithVersion(IssueTransaction.supportedVersions)) { - case (_, _, sender, quantity, fee, ts) => - validateFromOtherNetwork( - IssueTransaction( - TxVersion.V3, - sender.publicKey, - ByteString.copyFromUtf8("name"), - ByteString.copyFromUtf8("description"), - quantity, - TxDecimals.unsafeFrom(8: Byte), - true, - None, - fee, - ts, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + forAll(addressOrAliasWithVersion(IssueTransaction.supportedVersions)) { case (_, _, sender, quantity, fee, ts) => + validateFromOtherNetwork( + IssueTransaction( + TxVersion.V3, + sender.publicKey, + ByteString.copyFromUtf8("name"), + ByteString.copyFromUtf8("description"), + quantity, + TxDecimals.unsafeFrom(8: Byte), + true, + None, + fee, + ts, + Proofs.empty, + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("LeaseCancelTransaction validation") { - forAll(addressOrAliasWithVersion(LeaseCancelTransaction.supportedVersions)) { - case (_, _, sender, _, fee, ts) => - validateFromOtherNetwork( - LeaseCancelTransaction( - TxVersion.V3, - sender.publicKey, - ByteStr(bytes32gen.sample.get), - fee, - ts, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + forAll(addressOrAliasWithVersion(LeaseCancelTransaction.supportedVersions)) { case (_, _, sender, _, fee, ts) => + validateFromOtherNetwork( + LeaseCancelTransaction( + TxVersion.V3, + sender.publicKey, + ByteStr(bytes32gen.sample.get), + fee, + ts, + Proofs.empty, + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("MassTransferTransaction validation") { - forAll(addressOrAliasWithVersion(MassTransferTransaction.supportedVersions)) { - case (addressOrAlias, _, sender, amount, fee, ts) => - validateFromOtherNetwork( - MassTransferTransaction( - TxVersion.V2, - sender.publicKey, - Waves, - Seq(ParsedTransfer(addressOrAlias, TxNonNegativeAmount.unsafeFrom(amount.value))), - fee, - ts, - ByteStr.empty, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + forAll(addressOrAliasWithVersion(MassTransferTransaction.supportedVersions)) { case (addressOrAlias, _, sender, amount, fee, ts) => + validateFromOtherNetwork( + MassTransferTransaction( + TxVersion.V2, + sender.publicKey, + Waves, + Seq(ParsedTransfer(addressOrAlias, TxNonNegativeAmount.unsafeFrom(amount.value))), + fee, + ts, + ByteStr.empty, + Proofs.empty, + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("ReissueTransaction validation") { - forAll(addressOrAliasWithVersion(ReissueTransaction.supportedVersions)) { - case (_, _, sender, quantity, fee, ts) => - validateFromOtherNetwork( - ReissueTransaction( - TxVersion.V3, - sender.publicKey, - IssuedAsset(ByteStr(bytes32gen.sample.get)), - quantity, - true, - fee, - ts, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + forAll(addressOrAliasWithVersion(ReissueTransaction.supportedVersions)) { case (_, _, sender, quantity, fee, ts) => + validateFromOtherNetwork( + ReissueTransaction( + TxVersion.V3, + sender.publicKey, + IssuedAsset(ByteStr(bytes32gen.sample.get)), + quantity, + true, + fee, + ts, + Proofs.empty, + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("SetAssetScriptTransaction validation") { - forAll(addressOrAliasWithVersion(SetAssetScriptTransaction.supportedVersions)) { - case (_, _, sender, _, fee, ts) => - validateFromOtherNetwork( - SetAssetScriptTransaction( - TxVersion.V2, - sender.publicKey, - IssuedAsset(ByteStr(bytes32gen.sample.get)), - Some(scriptGen.sample.get), - fee, - ts, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + forAll(addressOrAliasWithVersion(SetAssetScriptTransaction.supportedVersions)) { case (_, _, sender, _, fee, ts) => + validateFromOtherNetwork( + SetAssetScriptTransaction( + TxVersion.V2, + sender.publicKey, + IssuedAsset(ByteStr(bytes32gen.sample.get)), + Some(scriptGen.sample.get), + fee, + ts, + Proofs.empty, + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("SetScriptTransaction validation") { - forAll(addressOrAliasWithVersion(SetScriptTransaction.supportedVersions)) { - case (_, _, sender, _, fee, ts) => - validateFromOtherNetwork( - SetScriptTransaction( - TxVersion.V2, - sender.publicKey, - Some(scriptGen.sample.get), - fee, - ts, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + forAll(addressOrAliasWithVersion(SetScriptTransaction.supportedVersions)) { case (_, _, sender, _, fee, ts) => + validateFromOtherNetwork( + SetScriptTransaction( + TxVersion.V2, + sender.publicKey, + Some(scriptGen.sample.get), + fee, + ts, + Proofs.empty, + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("SponsorFeeTransaction validation") { - forAll(addressOrAliasWithVersion(SponsorFeeTransaction.supportedVersions)) { - case (_, _, sender, _, fee, ts) => - validateFromOtherNetwork( - SponsorFeeTransaction( - TxVersion.V2, - sender.publicKey, - IssuedAsset(ByteStr(bytes32gen.sample.get)), - None, - fee, - ts, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + forAll(addressOrAliasWithVersion(SponsorFeeTransaction.supportedVersions)) { case (_, _, sender, _, fee, ts) => + validateFromOtherNetwork( + SponsorFeeTransaction( + TxVersion.V2, + sender.publicKey, + IssuedAsset(ByteStr(bytes32gen.sample.get)), + None, + fee, + ts, + Proofs.empty, + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } property("UpdateAssetInfoTransaction validation") { - forAll(addressOrAliasWithVersion(UpdateAssetInfoTransaction.supportedVersions)) { - case (_, version, sender, _, fee, ts) => - validateFromOtherNetwork( - UpdateAssetInfoTransaction( - version, - sender.publicKey, - IssuedAsset(ByteStr(bytes32gen.sample.get)), - "name", - "description", - ts, - fee, - Waves, - Proofs.empty, - otherChainId - ).signWith(sender.privateKey).validatedEither.explicitGet() - ) + forAll(addressOrAliasWithVersion(UpdateAssetInfoTransaction.supportedVersions)) { case (_, version, sender, _, fee, ts) => + validateFromOtherNetwork( + UpdateAssetInfoTransaction( + version, + sender.publicKey, + IssuedAsset(ByteStr(bytes32gen.sample.get)), + "name", + "description", + ts, + fee, + Waves, + Proofs.empty, + otherChainId + ).signWith(sender.privateKey).validatedEither.explicitGet() + ) } } } diff --git a/node/src/test/scala/com/wavesplatform/transaction/InvokeScriptTransactionSpecification.scala b/node/src/test/scala/com/wavesplatform/transaction/InvokeScriptTransactionSpecification.scala index b547df08152..4d3610d8de4 100644 --- a/node/src/test/scala/com/wavesplatform/transaction/InvokeScriptTransactionSpecification.scala +++ b/node/src/test/scala/com/wavesplatform/transaction/InvokeScriptTransactionSpecification.scala @@ -40,8 +40,8 @@ class InvokeScriptTransactionSpecification extends PropSpec { deser.timestamp shouldEqual transaction.timestamp deser.proofs shouldEqual transaction.proofs bytes shouldEqual deser.bytes() - Verifier.verifyAsEllipticCurveSignature(transaction, checkWeakPk = false) should beRight - Verifier.verifyAsEllipticCurveSignature(deser, checkWeakPk = false) should beRight // !!!!!!!!!!!!!!! + Verifier.verifyAsEllipticCurveSignature(transaction, isRideV6Activated = false) should beRight + Verifier.verifyAsEllipticCurveSignature(deser, isRideV6Activated = false) should beRight // !!!!!!!!!!!!!!! } property("protobuf roundtrip") { diff --git a/node/src/test/scala/com/wavesplatform/transaction/IssueTransactionV2Specification.scala b/node/src/test/scala/com/wavesplatform/transaction/IssueTransactionV2Specification.scala index e72944ae961..b65704bf306 100644 --- a/node/src/test/scala/com/wavesplatform/transaction/IssueTransactionV2Specification.scala +++ b/node/src/test/scala/com/wavesplatform/transaction/IssueTransactionV2Specification.scala @@ -18,11 +18,11 @@ import com.wavesplatform.state.HistoryTest import com.wavesplatform.test.PropSpec import com.wavesplatform.transaction.assets.IssueTransaction import com.wavesplatform.transaction.serialization.impl.IssueTxSerializer -import com.wavesplatform.{WithDB, crypto} +import com.wavesplatform.{WithNewDBForEachTest, crypto} import org.scalatest.EitherValues import play.api.libs.json.Json -class IssueTransactionV2Specification extends PropSpec with WithDB with HistoryTest with EitherValues { +class IssueTransactionV2Specification extends PropSpec with WithNewDBForEachTest with HistoryTest with EitherValues { property("IssueV2 serialization roundtrip") { forAll(issueV2TransactionGen()) { tx: IssueTransaction => diff --git a/node/src/test/scala/com/wavesplatform/transaction/assets/exchange/EthOrderSpec.scala b/node/src/test/scala/com/wavesplatform/transaction/assets/exchange/EthOrderSpec.scala index 2cb2e65e6af..3975ef09d64 100644 --- a/node/src/test/scala/com/wavesplatform/transaction/assets/exchange/EthOrderSpec.scala +++ b/node/src/test/scala/com/wavesplatform/transaction/assets/exchange/EthOrderSpec.scala @@ -127,6 +127,15 @@ class EthOrderSpec val sh = StubHelpers(blockchain) sh.creditBalance(ethBuyOrder.senderAddress, *) sh.creditBalance(ethSellOrder.senderAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns( + Map( + TxHelpers.matcher.toAddress -> Long.MaxValue / 3, + ethBuyOrder.senderAddress -> Long.MaxValue / 3, + ethSellOrder.senderAddress -> Long.MaxValue / 3 + ) + ) sh.issueAsset(ByteStr(EthStubBytes32)) } @@ -141,6 +150,9 @@ class EthOrderSpec val sh = StubHelpers(blockchain) sh.creditBalance(TxHelpers.matcher.toAddress, *) sh.creditBalance(ethSellOrder.senderAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns(Map(TxHelpers.matcher.toAddress -> Long.MaxValue / 3, ethSellOrder.senderAddress -> Long.MaxValue / 3)) sh.issueAsset(ByteStr(EthStubBytes32)) } @@ -282,6 +294,9 @@ class EthOrderSpec val sh = StubHelpers(blockchain) sh.creditBalance(TxHelpers.matcher.toAddress, *) sh.creditBalance(TestEthOrdersPublicKey.toAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns(Map(TxHelpers.matcher.toAddress -> Long.MaxValue / 3, TestEthOrdersPublicKey.toAddress -> Long.MaxValue / 3)) sh.issueAsset(ByteStr(EthStubBytes32)) } @@ -306,6 +321,9 @@ class EthOrderSpec val sh = StubHelpers(blockchain) sh.creditBalance(TxHelpers.matcher.toAddress, *) sh.creditBalance(ethSellOrder.senderAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns(Map(TxHelpers.matcher.toAddress -> Long.MaxValue / 3, ethSellOrder.senderAddress -> Long.MaxValue / 3)) // TODO: something more smart ? val script = TxHelpers.script(""" @@ -345,6 +363,15 @@ class EthOrderSpec sh.creditBalance(TxHelpers.matcher.toAddress, *) sh.creditBalance(ethBuyOrder.senderAddress, *) sh.creditBalance(ethSellOrder.senderAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns( + Map( + TxHelpers.matcher.toAddress -> Long.MaxValue / 3, + ethBuyOrder.senderAddress -> Long.MaxValue / 3, + ethSellOrder.senderAddress -> Long.MaxValue / 3 + ) + ) sh.issueAsset(ByteStr(EthStubBytes32)) val script = TxHelpers.script( diff --git a/node/src/test/scala/com/wavesplatform/transaction/assets/exchange/OrderJsonSpecification.scala b/node/src/test/scala/com/wavesplatform/transaction/assets/exchange/OrderJsonSpecification.scala index ace18185481..c5acb34d6e7 100644 --- a/node/src/test/scala/com/wavesplatform/transaction/assets/exchange/OrderJsonSpecification.scala +++ b/node/src/test/scala/com/wavesplatform/transaction/assets/exchange/OrderJsonSpecification.scala @@ -232,7 +232,7 @@ class OrderJsonSpecification extends PropSpec with JsonMatchers with EthHelpers case JsSuccess(o: Order, _) => o.json() should matchJson(json) - Verifier.verifyAsEllipticCurveSignature(o, checkWeakPk = false).explicitGet() + Verifier.verifyAsEllipticCurveSignature(o, isRideV6Activated = false).explicitGet() } } } diff --git a/node/src/test/scala/com/wavesplatform/transaction/assets/exchange/OrderSpecification.scala b/node/src/test/scala/com/wavesplatform/transaction/assets/exchange/OrderSpecification.scala index 90338c1eb77..e8028be2327 100644 --- a/node/src/test/scala/com/wavesplatform/transaction/assets/exchange/OrderSpecification.scala +++ b/node/src/test/scala/com/wavesplatform/transaction/assets/exchange/OrderSpecification.scala @@ -155,40 +155,45 @@ class OrderSpecification extends PropSpec with ValidationMatcher with NTPTime { Random.nextBytes(rndAsset) - Verifier.verifyAsEllipticCurveSignature(order, checkWeakPk = true) should beRight + Verifier.verifyAsEllipticCurveSignature(order, isRideV6Activated = true) should beRight - Verifier.verifyAsEllipticCurveSignature(order.copy(matcherPublicKey = pka.publicKey), checkWeakPk = true) should produce(err) + Verifier.verifyAsEllipticCurveSignature(order.copy(matcherPublicKey = pka.publicKey), isRideV6Activated = true) should produce(err) val assetPair = order.assetPair Verifier.verifyAsEllipticCurveSignature( order.copy(assetPair = assetPair.copy(amountAsset = IssuedAsset(ByteStr(rndAsset)))), - checkWeakPk = true + isRideV6Activated = true ) should produce(err) Verifier.verifyAsEllipticCurveSignature( order.copy(assetPair = assetPair.copy(priceAsset = IssuedAsset(ByteStr(rndAsset)))), - checkWeakPk = true + isRideV6Activated = true ) should produce(err) - Verifier.verifyAsEllipticCurveSignature(order.copy(orderType = OrderType.reverse(order.orderType)), checkWeakPk = true) should produce(err) - Verifier.verifyAsEllipticCurveSignature(order.copy(price = TxOrderPrice.unsafeFrom(order.price.value + 1)), checkWeakPk = true) should produce( + Verifier.verifyAsEllipticCurveSignature(order.copy(orderType = OrderType.reverse(order.orderType)), isRideV6Activated = true) should produce( + err + ) + Verifier.verifyAsEllipticCurveSignature( + order.copy(price = TxOrderPrice.unsafeFrom(order.price.value + 1)), + isRideV6Activated = true + ) should produce( err ) Verifier.verifyAsEllipticCurveSignature( order.copy(amount = TxExchangeAmount.unsafeFrom(order.amount.value + 1)), - checkWeakPk = true + isRideV6Activated = true ) should produce(err) - Verifier.verifyAsEllipticCurveSignature(order.copy(expiration = order.expiration + 1), checkWeakPk = true) should produce(err) + Verifier.verifyAsEllipticCurveSignature(order.copy(expiration = order.expiration + 1), isRideV6Activated = true) should produce(err) Verifier.verifyAsEllipticCurveSignature( order.copy(matcherFee = TxMatcherFee.unsafeFrom(order.matcherFee.value + 1)), - checkWeakPk = true + isRideV6Activated = true ) should produce(err) val orderAuth = order.orderAuthentication.asInstanceOf[OrderProofs] Verifier.verifyAsEllipticCurveSignature( order.copy(orderAuthentication = orderAuth.copy(key = pka.publicKey)), - checkWeakPk = true + isRideV6Activated = true ) should produce(err) Verifier.verifyAsEllipticCurveSignature( order.copy(orderAuthentication = orderAuth.copy(proofs = Proofs(Seq(ByteStr(pka.publicKey.arr ++ pka.publicKey.arr))))), - checkWeakPk = true + isRideV6Activated = true ) should produce(err) } } diff --git a/node/src/test/scala/com/wavesplatform/transaction/smart/EthereumTransactionSpec.scala b/node/src/test/scala/com/wavesplatform/transaction/smart/EthereumTransactionSpec.scala index 81d7d8949f5..fa1981c41a6 100644 --- a/node/src/test/scala/com/wavesplatform/transaction/smart/EthereumTransactionSpec.scala +++ b/node/src/test/scala/com/wavesplatform/transaction/smart/EthereumTransactionSpec.scala @@ -100,6 +100,7 @@ class EthereumTransactionSpec b.stub.issueAsset(TestAsset.id) b.stub.creditBalance(senderAddress, Waves, Long.MaxValue) b.stub.creditBalance(senderAddress, TestAsset, Long.MaxValue) + (b.wavesBalances _).when(*).returns(Map(senderAddress -> Long.MaxValue)) (b.resolveERC20Address _).when(ERC20Address(TestAsset.id.take(20))).returning(Some(TestAsset)) } val differ = blockchain.stub.transactionDiffer(TestTime(System.currentTimeMillis())).andThen(_.resultE.explicitGet()) @@ -301,6 +302,9 @@ class EthereumTransactionSpec sh.activateFeatures(BlockchainFeatures.BlockV5, BlockchainFeatures.RideV6) sh.creditBalance(invokerAccount.toWavesAddress, *) sh.creditBalance(dAppAccount.toAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns(Map(invokerAccount.toWavesAddress -> Long.MaxValue / 3, dAppAccount.toAddress -> Long.MaxValue / 3)) sh.issueAsset(ByteStr(EthStubBytes32)) val script = TxHelpers.script( @@ -360,6 +364,9 @@ class EthereumTransactionSpec sh.activateFeatures(BlockchainFeatures.BlockV5, BlockchainFeatures.RideV6) sh.creditBalance(invokerAccount.toWavesAddress, *) sh.creditBalance(dAppAccount.toAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns(Map(invokerAccount.toWavesAddress -> Long.MaxValue / 3, dAppAccount.toAddress -> Long.MaxValue / 3)) sh.issueAsset(ByteStr(EthStubBytes32)) val script = TxHelpers.script( @@ -397,6 +404,9 @@ class EthereumTransactionSpec sh.activateFeatures(BlockchainFeatures.BlockV5, BlockchainFeatures.RideV6) sh.creditBalance(invokerAccount.toWavesAddress, *) sh.creditBalance(dAppAccount.toAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns(Map(invokerAccount.toWavesAddress -> Long.MaxValue / 3, dAppAccount.toAddress -> Long.MaxValue / 3)) sh.issueAsset(ByteStr(EthStubBytes32)) val script = TxHelpers.script( @@ -450,6 +460,9 @@ class EthereumTransactionSpec sh.activateFeatures(BlockchainFeatures.BlockV5, BlockchainFeatures.RideV6) sh.creditBalance(invokerAccount.toWavesAddress, *) sh.creditBalance(dAppAccount.toAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns(Map(invokerAccount.toWavesAddress -> Long.MaxValue / 3, dAppAccount.toAddress -> Long.MaxValue / 3)) sh.issueAsset(ByteStr(EthStubBytes32)) val script = TxHelpers.script( @@ -497,6 +510,9 @@ class EthereumTransactionSpec sh.activateFeatures(BlockchainFeatures.BlockV5, BlockchainFeatures.RideV6) sh.creditBalance(invokerAccount.toWavesAddress, *) sh.creditBalance(dAppAccount.toAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns(Map(invokerAccount.toWavesAddress -> Long.MaxValue / 3, dAppAccount.toAddress -> Long.MaxValue / 3)) sh.issueAsset(ByteStr(EthStubBytes32)) val script = TxHelpers.script( @@ -534,6 +550,9 @@ class EthereumTransactionSpec sh.activateFeatures(BlockchainFeatures.BlockV5, BlockchainFeatures.RideV6) sh.creditBalance(invokerAccount.toWavesAddress, *) sh.creditBalance(dAppAccount.toAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns(Map(invokerAccount.toWavesAddress -> Long.MaxValue / 3, dAppAccount.toAddress -> Long.MaxValue / 3)) sh.issueAsset(ByteStr(EthStubBytes32)) val script = TxHelpers.script( @@ -587,6 +606,9 @@ class EthereumTransactionSpec sh.activateFeatures(BlockchainFeatures.BlockV5, BlockchainFeatures.RideV6) sh.creditBalance(invokerAccount.toWavesAddress, *) sh.creditBalance(dAppAccount.toAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns(Map(invokerAccount.toWavesAddress -> Long.MaxValue / 3, dAppAccount.toAddress -> Long.MaxValue / 3)) sh.issueAsset(TestAsset.id) val script = TxHelpers.script( @@ -646,6 +668,9 @@ class EthereumTransactionSpec sh.activateFeatures(BlockchainFeatures.BlockV5, BlockchainFeatures.RideV6) sh.creditBalance(invokerAccount.toWavesAddress, *) sh.creditBalance(dAppAccount.toAddress, *) + (blockchain.wavesBalances _) + .when(*) + .returns(Map(invokerAccount.toWavesAddress -> Long.MaxValue / 3, dAppAccount.toAddress -> Long.MaxValue / 3)) sh.issueAsset(TestAsset.id) val script = TxHelpers.script( diff --git a/node/src/test/scala/com/wavesplatform/utils/DiffMatchers.scala b/node/src/test/scala/com/wavesplatform/utils/DiffMatchers.scala index 967a5f87e1e..a3659c0b992 100644 --- a/node/src/test/scala/com/wavesplatform/utils/DiffMatchers.scala +++ b/node/src/test/scala/com/wavesplatform/utils/DiffMatchers.scala @@ -2,7 +2,7 @@ package com.wavesplatform.utils import com.wavesplatform.common.state.ByteStr import com.wavesplatform.state.Diff -import org.scalatest.matchers.{Matcher, MatchResult} +import org.scalatest.matchers.{MatchResult, Matcher} trait DiffMatchers { def containAppliedTx(transactionId: ByteStr) = new DiffAppliedTxMatcher(transactionId, true) diff --git a/node/src/test/scala/com/wavesplatform/utils/EmptyBlockchain.scala b/node/src/test/scala/com/wavesplatform/utils/EmptyBlockchain.scala index 5e4961a9f7a..3f6a4048a8f 100644 --- a/node/src/test/scala/com/wavesplatform/utils/EmptyBlockchain.scala +++ b/node/src/test/scala/com/wavesplatform/utils/EmptyBlockchain.scala @@ -6,7 +6,7 @@ import com.wavesplatform.block.SignedBlockHeader import com.wavesplatform.common.state.ByteStr import com.wavesplatform.lang.ValidationError import com.wavesplatform.settings.BlockchainSettings -import com.wavesplatform.state._ +import com.wavesplatform.state.* import com.wavesplatform.state.reader.LeaseDetails import com.wavesplatform.transaction.Asset.{IssuedAsset, Waves} import com.wavesplatform.transaction.TxValidationError.GenericError @@ -46,6 +46,8 @@ trait EmptyBlockchain extends Blockchain { override def transactionInfo(id: ByteStr): Option[(TxMeta, Transaction)] = None + override def transactionInfos(ids: Seq[ByteStr]): Seq[Option[(TxMeta, Transaction)]] = Seq.empty + override def transactionMeta(id: ByteStr): Option[TxMeta] = None override def containsTransaction(tx: Transaction): Boolean = false @@ -68,14 +70,20 @@ trait EmptyBlockchain extends Blockchain { override def assetScript(asset: IssuedAsset): Option[AssetScriptInfo] = None - override def accountData(acc: Address, key: String): Option[DataEntry[_]] = None + override def accountData(acc: Address, key: String): Option[DataEntry[?]] = None override def hasData(acc: Address): Boolean = false override def balance(address: Address, mayBeAssetId: Asset): Long = 0 + override def balances(req: Seq[(Address, Asset)]): Map[(Address, Asset), Long] = Map.empty + + override def wavesBalances(addresses: Seq[Address]): Map[Address, Long] = Map.empty + override def leaseBalance(address: Address): LeaseBalance = LeaseBalance.empty + override def leaseBalances(addresses: Seq[Address]): Map[Address, LeaseBalance] = Map.empty + override def resolveERC20Address(address: ERC20Address): Option[IssuedAsset] = None } diff --git a/node/src/test/scala/com/wavesplatform/utils/ObservedLoadingCacheSpecification.scala b/node/src/test/scala/com/wavesplatform/utils/ObservedLoadingCacheSpecification.scala index f928d9f0664..ac355cad702 100644 --- a/node/src/test/scala/com/wavesplatform/utils/ObservedLoadingCacheSpecification.scala +++ b/node/src/test/scala/com/wavesplatform/utils/ObservedLoadingCacheSpecification.scala @@ -1,10 +1,10 @@ package com.wavesplatform.utils -import java.util.concurrent.TimeUnit -import java.util.concurrent.atomic.AtomicLong - import com.google.common.base.Ticker import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache} + +import java.util.concurrent.TimeUnit +import java.util.concurrent.atomic.AtomicLong import com.wavesplatform.test.FreeSpec import com.wavesplatform.utils.ObservedLoadingCacheSpecification.FakeTicker import monix.execution.Ack @@ -13,7 +13,7 @@ import org.scalamock.scalatest.MockFactory import scala.concurrent.Future import scala.concurrent.duration.DurationInt -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* class ObservedLoadingCacheSpecification extends FreeSpec with MockFactory { private val ExpiringTime = 10.minutes @@ -68,7 +68,7 @@ class ObservedLoadingCacheSpecification extends FreeSpec with MockFactory { .newBuilder() .expireAfterWrite(ExpiringTime.toMillis, TimeUnit.MILLISECONDS) .ticker(ticker) - .build(new CacheLoader[String, Integer] { + .build[String, Integer](new CacheLoader[String, Integer] { override def load(key: String): Integer = key.length }) diff --git a/node/src/test/scala/com/wavesplatform/utx/UtxPoolSpecification.scala b/node/src/test/scala/com/wavesplatform/utx/UtxPoolSpecification.scala index 200d4d4ec30..d8818e96aec 100644 --- a/node/src/test/scala/com/wavesplatform/utx/UtxPoolSpecification.scala +++ b/node/src/test/scala/com/wavesplatform/utx/UtxPoolSpecification.scala @@ -8,7 +8,7 @@ import com.wavesplatform.block.{Block, SignedBlockHeader} import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.EitherExt2 import com.wavesplatform.consensus.TransactionsOrdering -import com.wavesplatform.database.{LevelDBWriter, TestStorageFactory, openDB} +import com.wavesplatform.database.{RDB, RocksDBWriter, TestStorageFactory} import com.wavesplatform.db.WithDomain import com.wavesplatform.db.WithState.AddrWithBalance import com.wavesplatform.events.UtxEvent @@ -26,7 +26,7 @@ import com.wavesplatform.mining.* import com.wavesplatform.settings.* import com.wavesplatform.state.* import com.wavesplatform.state.diffs.{invoke as _, *} -import com.wavesplatform.state.utils.TestLevelDB +import com.wavesplatform.state.utils.TestRocksDB import com.wavesplatform.test.* import com.wavesplatform.transaction.Asset.Waves import com.wavesplatform.transaction.TxHelpers.* @@ -36,32 +36,28 @@ import com.wavesplatform.transaction.smart.script.ScriptCompiler import com.wavesplatform.transaction.transfer.* import com.wavesplatform.transaction.transfer.MassTransferTransaction.ParsedTransfer import com.wavesplatform.transaction.utils.Signed -import com.wavesplatform.transaction.{Asset, Transaction, *} +import com.wavesplatform.transaction.{Transaction, *} import com.wavesplatform.utils.Time import com.wavesplatform.utx.UtxPool.PackStrategy -import monix.reactive.subjects.PublishSubject -import org.iq80.leveldb.DB import org.scalacheck.Gen.* import org.scalacheck.{Arbitrary, Gen} import org.scalamock.scalatest.MockFactory -import org.scalatest.EitherValues +import org.scalatest.{Assertion, EitherValues} import org.scalatest.concurrent.Eventually import java.nio.file.{Files, Path} import scala.collection.mutable.ListBuffer import scala.concurrent.duration.* -import scala.util.Random +import scala.util.{Random, Using} private object UtxPoolSpecification { - private val ignoreSpendableBalanceChanged = PublishSubject[(Address, Asset)]() + final case class TempDB(fs: FunctionalitySettings, dbSettings: DBSettings) extends AutoCloseable { + val path: Path = Files.createTempDirectory("rocksdb-test") + val rdb = RDB.open(dbSettings.copy(directory = path.toAbsolutePath.toString)) + val writer: RocksDBWriter = TestRocksDB.withFunctionalitySettings(rdb, fs) - final case class TempDB(fs: FunctionalitySettings, dbSettings: DBSettings) { - val path: Path = Files.createTempDirectory("leveldb-test") - val db: DB = openDB(path.toAbsolutePath.toString) - val writer: LevelDBWriter = TestLevelDB.withFunctionalitySettings(db, ignoreSpendableBalanceChanged, fs) - - sys.addShutdownHook { - db.close() + override def close(): Unit = { + rdb.close() TestHelpers.deleteRecursively(path) } } @@ -75,7 +71,7 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact import FunctionalitySettings.TESTNET.maxTransactionTimeBackOffset as maxAge import UtxPoolSpecification.* - private def mkBlockchain(genAccounts: Map[Address, Long]) = { + private def withBlockchain[A](genAccounts: Map[Address, Long])(test: BlockchainUpdaterImpl => A): A = { val genesisSettings = TestHelpers.genesisSettings(genAccounts) val origSettings = WavesSettings.default() val settings = origSettings.copy( @@ -94,10 +90,11 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact featuresSettings = origSettings.featuresSettings.copy(autoShutdownOnUnsupportedFeature = false) ) - val dbContext = TempDB(settings.blockchainSettings.functionalitySettings, settings.dbSettings) - val (bcu, _) = TestStorageFactory(settings, dbContext.db, new TestTime, ignoreSpendableBalanceChanged, ignoreBlockchainUpdateTriggers) - bcu.processBlock(Block.genesis(genesisSettings, bcu.isFeatureActivated(BlockchainFeatures.RideV6)).explicitGet()) should beRight - bcu + Using.resource(TempDB(settings.blockchainSettings.functionalitySettings, settings.dbSettings)) { dbContext => + val (bcu, _) = TestStorageFactory(settings, dbContext.rdb, new TestTime, ignoreBlockchainUpdateTriggers) + bcu.processBlock(Block.genesis(genesisSettings, bcu.isFeatureActivated(BlockchainFeatures.RideV6)).explicitGet()) should beRight + test(bcu) + } } private def transfer(sender: KeyPair, maxAmount: Long, time: Time) = @@ -110,26 +107,6 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact .explicitGet()) .label("transferTransaction") - private def transferWithRecipient(sender: KeyPair, recipient: PublicKey, maxAmount: Long, time: Time) = - (for { - amount <- chooseNum(1, (maxAmount * 0.9).toLong) - fee <- chooseNum(extraFee, (maxAmount * 0.1).toLong) - } yield TransferTransaction - .selfSigned(1.toByte, sender, recipient.toAddress, Waves, amount, Waves, fee, ByteStr.empty, time.getTimestamp()) - .explicitGet()) - .label("transferWithRecipient") - - private def massTransferWithRecipients(sender: KeyPair, recipients: List[PublicKey], maxAmount: Long, time: Time) = { - val amount = maxAmount / (recipients.size + 1) - val transfers = recipients.map(r => ParsedTransfer(r.toAddress, TxNonNegativeAmount.unsafeFrom(amount))) - val minFee = FeeValidation.FeeConstants(TransactionType.Transfer) + FeeValidation.FeeConstants(TransactionType.MassTransfer) * transfers.size - val txs = - for { fee <- chooseNum(minFee, amount) } yield MassTransferTransaction - .selfSigned(1.toByte, sender, Waves, transfers, fee, time.getTimestamp(), ByteStr.empty) - .explicitGet() - txs.label("transferWithRecipient") - } - private def invokeScript(sender: KeyPair, dApp: Address, time: Time) = Gen.choose(500000L, 600000L).map { fee => Signed.invokeScript(TxVersion.V1, sender, dApp, None, Seq.empty, fee, Waves, time.getTimestamp()) @@ -148,144 +125,92 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact SetScriptTransaction.selfSigned(TxVersion.V1, sender, Some(script), extraFee, time.getTimestamp()).explicitGet() } - private val accountsGen = for { - sender <- accountGen.label("sender") - senderBalance <- positiveLongGen.label("senderBalance") - if senderBalance > 100000L - } yield (sender, senderBalance) - - private val stateGen = for { - (sender, senderBalance) <- accountsGen - } yield { - val bcu = mkBlockchain(Map(sender.toAddress -> senderBalance)) - (sender, senderBalance, bcu) + private def withState[A](test: (KeyPair, Long, BlockchainUpdaterImpl) => A): A = { + val sender = TxHelpers.signer(1) + val senderBalance = ENOUGH_AMT + withBlockchain(Map(sender.toAddress -> senderBalance)) { bcu => + test(sender, senderBalance, bcu) + } } - private val stateWithTransfer = for { - (sender, balance, bcu) <- stateGen - time = new TestTime - transfer <- transfer(sender, balance, time) - } yield (time, bcu, transfer) - - private val stateWithThreeAccounts = for { - (sender1, senderBalance1) <- accountsGen - (sender2, senderBalance2) <- accountsGen - (sender3, senderBalance3) <- accountsGen - } yield { - val bcu = mkBlockchain(Map(sender1.toAddress -> senderBalance1, sender2.toAddress -> senderBalance2, sender3.toAddress -> senderBalance3)) - (((sender1, senderBalance1), (sender2, senderBalance2), (sender3, senderBalance3)), bcu) + private def withStateWithTransfer[A](test: (TestTime, BlockchainUpdaterImpl, TransferTransaction) => A): A = { + withState { case (sender, _, bcu) => + val time = new TestTime + test(time, bcu, transfer(sender, time)) + } } - private val twoOutOfManyValidPayments = (for { - (sender, senderBalance, bcu) <- stateGen - recipient <- accountGen - n <- chooseNum(3, 10) - fee <- chooseNum(extraFee, (senderBalance * 0.01).toLong) - offset <- chooseNum(1000L, 2000L) - } yield { - val time = TestTime() - val utx = - new UtxPoolImpl( - time, - bcu, + private def withStateWithThreeAccounts[A](test: (((KeyPair, Long), (KeyPair, Long), (KeyPair, Long)), BlockchainUpdaterImpl) => A): A = { + val sender1 = TxHelpers.signer(1) + val senderBalance1 = ENOUGH_AMT + val sender2 = TxHelpers.signer(2) + val senderBalance2 = ENOUGH_AMT + val sender3 = TxHelpers.signer(3) + val senderBalance3 = ENOUGH_AMT + + withBlockchain(Map(sender1.toAddress -> senderBalance1, sender2.toAddress -> senderBalance2, sender3.toAddress -> senderBalance3)) { bcu => + test(((sender1, senderBalance1), (sender2, senderBalance2), (sender3, senderBalance3)), bcu) + } + } + + private def withTwoOutOfManyValidPayments[A](test: (UtxPoolImpl, TestTime, IndexedSeq[TransferTransaction], FiniteDuration) => A): A = + withState { case (sender, senderBalance, bcu) => + val recipient = TxHelpers.signer(2) + + val time = TestTime() + val utx = + new UtxPoolImpl( + time, + bcu, + UtxSettings( + 10, + PoolDefaultMaxBytes, + 1000, + Set.empty, + Set.empty, + Set.empty, + allowTransactionsFromSmartAccounts = true, + allowSkipChecks = false, + forceValidateInCleanup = false, + alwaysUnlimitedExecution = false + ), + Int.MaxValue, + isMiningEnabled = true + ) + val amountPart = (senderBalance - extraFee) / 2 - extraFee + val txs = for (_ <- 1 to 10) yield createWavesTransfer(sender, recipient.toAddress, amountPart, extraFee, time.getTimestamp()).explicitGet() + test(utx, time, txs, 2000.millis) + } + + private def withBlacklisted[A](test: (KeyPair, UtxPoolImpl, Seq[TransferTransaction]) => A): A = + withState { case (sender, _, bcu) => + val recipient = TxHelpers.signer(2) + val time = TestTime() + val txs = (1 to 10).map(_ => transferWithRecipient(sender, recipient.publicKey, time)) + val settings = UtxSettings( 10, PoolDefaultMaxBytes, 1000, - Set.empty, + Set(sender.toAddress.toString), Set.empty, Set.empty, allowTransactionsFromSmartAccounts = true, allowSkipChecks = false, forceValidateInCleanup = false, alwaysUnlimitedExecution = false - ), - Int.MaxValue, - isMiningEnabled = true - ) - val amountPart = (senderBalance - fee) / 2 - fee - val txs = for (_ <- 1 to n) yield createWavesTransfer(sender, recipient.toAddress, amountPart, fee, time.getTimestamp()).explicitGet() - (utx, time, txs, (offset + 1000).millis) - }).label("twoOutOfManyValidPayments") - - private val withBlacklisted = (for { - (sender, senderBalance, bcu) <- stateGen - recipient <- accountGen - time = TestTime() - txs <- Gen.nonEmptyListOf(transferWithRecipient(sender, recipient.publicKey, senderBalance / 10, time)) // @TODO: Random transactions - } yield { - val settings = - UtxSettings( - 10, - PoolDefaultMaxBytes, - 1000, - Set(sender.toAddress.toString), - Set.empty, - Set.empty, - allowTransactionsFromSmartAccounts = true, - allowSkipChecks = false, - forceValidateInCleanup = false, - alwaysUnlimitedExecution = false - ) - val utxPool = new UtxPoolImpl(time, bcu, settings, Int.MaxValue, isMiningEnabled = true) - (sender, utxPool, txs) - }).label("withBlacklisted") - - private val withBlacklistedAndAllowedByRule = (for { - (sender, senderBalance, bcu) <- stateGen - recipient <- accountGen - time = TestTime() - txs <- Gen.nonEmptyListOf(transferWithRecipient(sender, recipient.publicKey, senderBalance / 10, time)) // @TODO: Random transactions - } yield { - val settings = - UtxSettings( - txs.length, - PoolDefaultMaxBytes, - 1000, - Set(sender.toAddress.toString), - Set(recipient.toAddress.toString), - Set.empty, - allowTransactionsFromSmartAccounts = true, - allowSkipChecks = false, - forceValidateInCleanup = false, - alwaysUnlimitedExecution = false - ) - val utxPool = new UtxPoolImpl(time, bcu, settings, Int.MaxValue, isMiningEnabled = true) - (sender, utxPool, txs) - }).label("withBlacklistedAndAllowedByRule") - - private val withBlacklistedAndWhitelisted = (for { - (sender, senderBalance, bcu) <- stateGen - recipient <- accountGen - time = TestTime() - txs <- Gen.nonEmptyListOf(transferWithRecipient(sender, recipient.publicKey, senderBalance / 10, time)) - } yield { - val settings = - UtxSettings( - txs.length, - PoolDefaultMaxBytes, - 1000, - Set(sender.toAddress.toString), - Set.empty, - Set(sender.toAddress.toString), - allowTransactionsFromSmartAccounts = true, - allowSkipChecks = false, - forceValidateInCleanup = false, - alwaysUnlimitedExecution = false - ) - val utxPool = new UtxPoolImpl(time, bcu, settings, Int.MaxValue, isMiningEnabled = true) - (sender, utxPool, txs) - }).label("withBlacklistedAndWhitelisted") + ) + val utxPool = new UtxPoolImpl(time, bcu, settings, Int.MaxValue, isMiningEnabled = true) + test(sender, utxPool, txs) + } - private def massTransferWithBlacklisted(allowRecipients: Boolean) = - (for { - (sender, senderBalance, bcu) <- stateGen - addressesSize <- Gen.choose(1, MassTransferTransaction.MaxTransferCount) - addressGen = Gen.listOfN(addressesSize, accountGen).filter(list => if (allowRecipients) list.nonEmpty else true) - recipients <- addressGen.map(_.map(_.publicKey)) - time = TestTime() - txs <- Gen.nonEmptyListOf(massTransferWithRecipients(sender, recipients, senderBalance / 10, time)) - } yield { + private def withMassTransferWithBlacklisted[A](allowRecipients: Boolean)(test: (KeyPair, UtxPoolImpl, Seq[MassTransferTransaction]) => A): A = { + withState { case (sender, senderBalance, bcu) => + val recipients = (1 to 10).map(idx => TxHelpers.signer(1 + idx).publicKey) + val time = TestTime() + // @TODO: Random transactions + val txs = (1 to 10).map(_ => massTransferWithRecipients(sender, recipients, senderBalance / 10, time)) ++ + (if (!allowRecipients) Seq(massTransferWithRecipients(sender, Seq.empty, senderBalance / 10, time)) else Seq.empty) val whitelist: Set[String] = if (allowRecipients) recipients.map(_.toAddress.toString).toSet else Set.empty val settings = UtxSettings( @@ -301,28 +226,62 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact alwaysUnlimitedExecution = false ) val utxPool = new UtxPoolImpl(time, bcu, settings, Int.MaxValue, isMiningEnabled = true) - (sender, utxPool, txs) - }).label("massTransferWithBlacklisted") + test(sender, utxPool, txs) + } + } - private def utxTest(utxSettings: UtxSettings, txCount: Int = 10)(f: (Seq[TransferTransaction], UtxPool, TestTime) => Unit): Unit = - forAll(stateGen, chooseNum(2, txCount).label("txCount")) { case ((sender, senderBalance, bcu), count) => - val time = TestTime() + private def withBlacklistedAndAllowedByRule[A](test: (KeyPair, UtxPoolImpl, Seq[TransferTransaction]) => A): A = + withState { case (sender, _, bcu) => + val recipient = TxHelpers.signer(2) + val time = TestTime() + val txs = (1 to 10).map(_ => transferWithRecipient(sender, recipient.publicKey, time)) + val settings = + UtxSettings( + txs.length, + PoolDefaultMaxBytes, + 1000, + Set(sender.toAddress.toString), + Set(recipient.toAddress.toString), + Set.empty, + allowTransactionsFromSmartAccounts = true, + allowSkipChecks = false, + forceValidateInCleanup = false, + alwaysUnlimitedExecution = false + ) + val utxPool = new UtxPoolImpl(time, bcu, settings, Int.MaxValue, isMiningEnabled = true) + test(sender, utxPool, txs) + } - forAll(listOfN(count, transfer(sender, senderBalance / 2, time))) { txs => - val utx = new UtxPoolImpl(time, bcu, utxSettings, Int.MaxValue, isMiningEnabled = true) - f(txs, utx, time) - } + private def withBlacklistedAndWhitelisted[A](test: (KeyPair, UtxPoolImpl, Seq[TransferTransaction]) => A): A = { + withState { case (sender, _, bcu) => + val recipient = TxHelpers.signer(2) + val time = TestTime() + val txs = (1 to 10).map(_ => transferWithRecipient(sender, recipient.publicKey, time)) + val settings = + UtxSettings( + txs.length, + PoolDefaultMaxBytes, + 1000, + Set(sender.toAddress.toString), + Set.empty, + Set(sender.toAddress.toString), + allowTransactionsFromSmartAccounts = true, + allowSkipChecks = false, + forceValidateInCleanup = false, + alwaysUnlimitedExecution = false + ) + val utxPool = new UtxPoolImpl(time, bcu, settings, Int.MaxValue, isMiningEnabled = true) + test(sender, utxPool, txs) } + } - private val dualTxGen: Gen[(UtxPool, TestTime, Seq[Transaction], Seq[Transaction])] = - for { - (sender, senderBalance, bcu) <- stateGen - ts = System.currentTimeMillis() - count1 <- chooseNum(5, 10) - tx1 <- listOfN(count1, transfer(sender, senderBalance / 2, TestTime(ts))) - tx2 <- listOfN(count1, transfer(sender, senderBalance / 2, TestTime(ts + maxAge.toMillis + 1000))) - } yield { - val time = TestTime() + private def withDualTxs[A](test: (UtxPool, TestTime, Seq[Transaction], Seq[Transaction]) => A): A = + withState { case (sender, _, bcu) => + val ts = System.currentTimeMillis() + val count = 5 + val txs1 = (1 to count).map(idx => transfer(sender, TestTime(ts + idx))) + val txs2 = (1 to count).map(idx => transfer(sender, TestTime(ts + idx + maxAge.toMillis + 1000))) + val time = TestTime() val utx = new UtxPoolImpl( time, bcu, @@ -341,29 +300,13 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact Int.MaxValue, isMiningEnabled = true ) - (utx, time, tx1, tx2) + test(utx, time, txs1, txs2) } - private def preconditionBlocks(lastBlockId: ByteStr, master: KeyPair, time: Time): Seq[Block] = { - val ts = time.getTimestamp() - val script = TestCompiler(V3).compileExpression( - """ - |let x = 1 - |let y = 2 - |true - """.stripMargin - ) - val setScript = SetScriptTransaction.selfSigned(1.toByte, master, Some(script), 100000L, ts + 1).explicitGet() - Seq(TestBlock.create(ts + 1, lastBlockId, Seq(setScript))) - } - - private def withScriptedAccount(scEnabled: Boolean): Gen[(KeyPair, Long, UtxPoolImpl, Long)] = - for { - (sender, senderBalance, bcu) <- stateGen - time = TestTime() - preconditions = preconditionBlocks(bcu.lastBlockId.get, sender, time) - } yield { - // val smartAccountsFs = TestFunctionalitySettings.Enabled.copy(preActivatedFeatures = Map(BlockchainFeatures.SmartAccounts.id -> 0)) + private def withScriptedAccount[A](scEnabled: Boolean)(test: (KeyPair, Long, UtxPoolImpl, Long) => A): A = + withState { case (sender, senderBalance, bcu) => + val time = TestTime() + val preconditions = preconditionBlocks(bcu.lastBlockId.get, sender, time) preconditions.foreach(b => bcu.processBlock(b) should beRight) val utx = new UtxPoolImpl( time, @@ -384,15 +327,55 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact isMiningEnabled = true ) - (sender, senderBalance, utx, bcu.lastBlockTimestamp.getOrElse(0L)) + test(sender, senderBalance, utx, bcu.lastBlockTimestamp.getOrElse(0L)) } - private def transactionV1Gen(sender: KeyPair, ts: Long, feeAmount: Long): Gen[TransferTransaction] = accountGen.map { recipient => - TransferTransaction.selfSigned(1.toByte, sender, recipient.toAddress, Waves, waves(1), Waves, feeAmount, ByteStr.empty, ts).explicitGet() + private def transfer(sender: KeyPair, time: Time) = + TransferTransaction + .selfSigned(1.toByte, sender, TxHelpers.address(2), Waves, 1, Waves, extraFee, ByteStr.empty, time.getTimestamp()) + .explicitGet() + + private def transferWithRecipient(sender: KeyPair, recipient: PublicKey, time: Time) = + TransferTransaction + .selfSigned(1.toByte, sender, recipient.toAddress, Waves, 1, Waves, extraFee, ByteStr.empty, time.getTimestamp()) + .explicitGet() + + private def massTransferWithRecipients(sender: KeyPair, recipients: Seq[PublicKey], maxAmount: Long, time: Time) = { + val amount = maxAmount / (recipients.size + 1) + val transfers = recipients.map(r => ParsedTransfer(r.toAddress, TxNonNegativeAmount.unsafeFrom(amount))) + val minFee = FeeValidation.FeeConstants(TransactionType.Transfer) + FeeValidation.FeeConstants(TransactionType.MassTransfer) * transfers.size + MassTransferTransaction + .selfSigned(1.toByte, sender, Waves, transfers, minFee, time.getTimestamp(), ByteStr.empty) + .explicitGet() } - private def transactionV2Gen(sender: KeyPair, ts: Long, feeAmount: Long): Gen[TransferTransaction] = accountGen.map { recipient => - TransferTransaction.selfSigned(2.toByte, sender, recipient.toAddress, Waves, waves(1), Waves, feeAmount, ByteStr.empty, ts).explicitGet() + private def transactionV1(sender: KeyPair, ts: Long, feeAmount: Long): TransferTransaction = + TransferTransaction.selfSigned(1.toByte, sender, TxHelpers.address(2), Waves, waves(1), Waves, feeAmount, ByteStr.empty, ts).explicitGet() + + private def transactionV2(sender: KeyPair, ts: Long, feeAmount: Long): TransferTransaction = + TransferTransaction.selfSigned(2.toByte, sender, TxHelpers.address(2), Waves, waves(1), Waves, feeAmount, ByteStr.empty, ts).explicitGet() + + private def utxTest(utxSettings: UtxSettings, txCount: Int = 10)(f: (Seq[TransferTransaction], UtxPool, TestTime) => Unit): Unit = { + withState { case (sender, _, bcu) => + val time = TestTime() + val txs = (1 to txCount).map(_ => transfer(sender, time)) + + val utx = new UtxPoolImpl(time, bcu, utxSettings, Int.MaxValue, isMiningEnabled = true) + f(txs, utx, time) + } + } + + private def preconditionBlocks(lastBlockId: ByteStr, master: KeyPair, time: Time): Seq[Block] = { + val ts = time.getTimestamp() + val script = TestCompiler(V3).compileExpression( + """ + |let x = 1 + |let y = 2 + |true + """.stripMargin + ) + val setScript = SetScriptTransaction.selfSigned(1.toByte, master, Some(script), 100000L, ts + 1).explicitGet() + Seq(TestBlock.create(ts + 1, lastBlockId, Seq(setScript))) } "UTX Pool" - { @@ -433,7 +416,7 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact } "adds new transactions when skip checks is allowed" in { - forAll(stateGen) { case (sender, senderBalance, bcu) => + withState { case (sender, senderBalance, bcu) => val time = TestTime() val gen = for { @@ -468,7 +451,7 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact } "adds new transactions when transaction is whitelisted" in { - forAll(stateWithThreeAccounts) { case (((sender1, senderBalance1), (sender2, senderBalance2), (sender3, _)), bcu) => + withStateWithThreeAccounts { case (((sender1, senderBalance1), (sender2, senderBalance2), (sender3, _)), bcu) => val time = TestTime() val precondition = TestBlock.create( @@ -529,7 +512,7 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact utx.putIfNew(txs.head).resultE should matchPattern { case Right(false) => } } - "packUnconfirmed result is limited by constraint" in forAll(dualTxGen) { case (utx, _, txs, _) => + "packUnconfirmed result is limited by constraint" in withDualTxs { case (utx, _, txs, _) => txs.foreach(tx => utx.putIfNew(tx).resultE should beRight) utx.all.size shouldEqual txs.size @@ -541,7 +524,7 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact if (maxNumber <= utx.all.size) restUpdated.isFull shouldBe true } - "packUnconfirmed takes whitelisted first of all" in forAll(stateWithThreeAccounts) { + "packUnconfirmed takes whitelisted first of all" in withStateWithThreeAccounts { case (((sender1, senderBalance1), (sender2, senderBalance2), (sender3, _)), bcu) => val time = TestTime() @@ -586,7 +569,7 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact } } - "evicts expired transactions when packUnconfirmed is called" in forAll(dualTxGen) { case (utx, time, txs, _) => + "evicts expired transactions when packUnconfirmed is called" in withDualTxs { case (utx, time, txs, _) => txs.foreach(tx => utx.putIfNew(tx).resultE should beRight) utx.all.size shouldEqual txs.size @@ -597,16 +580,15 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact utx.all shouldBe empty } - "evicts one of mutually invalid transactions when packUnconfirmed is called" in forAll(twoOutOfManyValidPayments) { - case (utx, time, txs, offset) => - txs.foreach(tx => utx.putIfNew(tx).resultE should beRight) - utx.all.size shouldEqual txs.size + "evicts one of mutually invalid transactions when packUnconfirmed is called" in withTwoOutOfManyValidPayments { case (utx, time, txs, offset) => + txs.foreach(tx => utx.putIfNew(tx).resultE should beRight) + utx.all.size shouldEqual txs.size - time.advance(offset) + time.advance(offset) - val (packed, _) = utx.packUnconfirmed(limitByNumber(100), PackStrategy.Unlimited) - packed.get.size shouldBe 2 - utx.all.size shouldBe 2 + val (packed, _) = utx.packUnconfirmed(limitByNumber(100), PackStrategy.Unlimited) + packed.get.size shouldBe 2 + utx.all.size shouldBe 2 } "correctly process constraints in packUnconfirmed" in { @@ -697,8 +679,7 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact "blacklisting" - { "prevent a transfer transaction from specific addresses" in { - val transferGen = Gen.oneOf(withBlacklisted, massTransferWithBlacklisted(allowRecipients = false)) - forAll(transferGen) { case (_, utxPool, txs) => + def test(utxPool: UtxPoolImpl, txs: Seq[Transaction]): Assertion = { val r = txs.forall { tx => utxPool.putIfNew(tx).resultE match { case Left(SenderIsBlacklisted(_)) => true @@ -709,18 +690,23 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact r shouldBe true utxPool.all.size shouldEqual 0 } + + withBlacklisted { case (_, utxPool, txs) => test(utxPool, txs) } + withMassTransferWithBlacklisted(allowRecipients = false) { case (_, utxPool, txs) => test(utxPool, txs) } } "allow a transfer transaction from blacklisted address to specific addresses" in { - val transferGen = Gen.oneOf(withBlacklistedAndAllowedByRule, massTransferWithBlacklisted(allowRecipients = true)) - forAll(transferGen) { case (_, utxPool, txs) => + def test(utxPool: UtxPoolImpl, txs: Seq[Transaction]): Assertion = { txs.foreach(utxPool.putIfNew(_).resultE should beRight) utxPool.all.size shouldEqual txs.size } + + withBlacklistedAndAllowedByRule { case (_, utxPool, txs) => test(utxPool, txs) } + withMassTransferWithBlacklisted(allowRecipients = true) { case (_, utxPool, txs) => test(utxPool, txs) } } "allow a transfer transaction from whitelisted address" in { - forAll(withBlacklistedAndWhitelisted) { case (_, utxPool, txs) => + withBlacklistedAndWhitelisted { case (_, utxPool, txs) => all(txs.map { t => utxPool.putIfNew(t).resultE }) shouldBe Symbol("right") @@ -731,41 +717,38 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact "smart accounts" - { "signed txs from scripted account is not allowed" in { - val enoughFeeTxWithScriptedAccount = - for { - (sender, senderBalance, utx, ts) <- withScriptedAccount(true) - feeAmount <- choose(extraFee, senderBalance / 2) - tx <- transactionV1Gen(sender, ts + 1, feeAmount) - } yield (utx, tx) + withScriptedAccount(scEnabled = true) { case (sender, _, utx, ts) => + val tx = transactionV1(sender, ts + 1, extraFee) - val (utx, tx) = enoughFeeTxWithScriptedAccount.sample.getOrElse(throw new IllegalStateException("NO SAMPLE")) - utx.putIfNew(tx).resultE should produce("signature from scripted account") + utx.putIfNew(tx).resultE should produce("signature from scripted account") + } } "any transaction from scripted account is not allowed if smartAccounts disabled in utx pool" - { - def enoughFeeTxWithScriptedAccount(version: Int): Gen[(UtxPoolImpl, TransferTransaction)] = - for { - (sender, senderBalance, utx, ts) <- withScriptedAccount(false) - feeAmount <- choose(extraFee, senderBalance / 2) - tx <- version match { - case 1 => transactionV1Gen(sender, ts + 1, feeAmount) - case 2 => transactionV2Gen(sender, ts + 1, feeAmount) + def withVersion[A](version: Int)(test: (UtxPoolImpl, TransferTransaction) => A): A = + withScriptedAccount(scEnabled = false) { case (sender, _, utx, ts) => + val tx = version match { + case 1 => transactionV1(sender, ts + 1, extraFee) + case 2 => transactionV2(sender, ts + 1, extraFee) } - } yield (utx, tx) + test(utx, tx) + } "v1" in { - val (utx1, tx1) = enoughFeeTxWithScriptedAccount(1).sample.getOrElse(throw new IllegalStateException("NO SAMPLE")) - utx1.putIfNew(tx1).resultE.left.value + withVersion(1) { case (utx, tx) => + utx.putIfNew(tx).resultE.left.value + } } "v2" in { - val (utx2, tx2) = enoughFeeTxWithScriptedAccount(2).sample.getOrElse(throw new IllegalStateException("NO SAMPLE")) - utx2.putIfNew(tx2).resultE should produce("denied from UTX pool") + withVersion(2) { case (utx, tx) => + utx.putIfNew(tx).resultE should produce("denied from UTX pool") + } } } "when pack time limit is exceeded" - { - "always packs the first transaction" in forAll(stateWithTransfer) { case (time, bcu, transfer) => + "always packs the first transaction" in withStateWithTransfer { case (time, bcu, transfer) => var timeSourceIsRunning = false def nanoTimeSource(): Long = if (timeSourceIsRunning) 100000L @@ -1033,9 +1016,9 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact acc1 <- accountGen tx1 <- transfer(acc, ENOUGH_AMT / 3, ntpTime) txs <- Gen.nonEmptyListOf(transfer(acc1, 10000000L, ntpTime).suchThat(_.fee.value < tx1.fee.value)) - } yield (tx1, txs) + } yield (acc, acc1, tx1, txs) - forAll(gen) { case (tx1, rest) => + forAll(gen) { case (acc, acc1, tx1, rest) => val blockchain = stub[Blockchain] (() => blockchain.settings).when().returning(WavesSettings.default().blockchainSettings) (() => blockchain.height).when().returning(1) @@ -1053,6 +1036,8 @@ class UtxPoolSpecification extends FreeSpec with MockFactory with BlocksTransact (blockchain.balance _).when(*, *).returning(ENOUGH_AMT) + (blockchain.wavesBalances _).when(*).returning(Map(acc.toAddress -> ENOUGH_AMT, acc1.toAddress -> ENOUGH_AMT)) + (blockchain.leaseBalance _).when(*).returning(LeaseBalance(0, 0)) (blockchain.accountScript _).when(*).onCall { _: Address => utx.removeAll(rest) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 4a38891c366..d05275c5c87 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -14,31 +14,25 @@ object Dependencies { private def kamonModule(module: String) = "io.kamon" %% s"kamon-$module" % "2.5.12" - private def jacksonModule(group: String, module: String) = s"com.fasterxml.jackson.$group" % s"jackson-$module" % "2.14.1" - - private def catsModule(module: String, version: String = "2.6.1") = Def.setting("org.typelevel" %%% s"cats-$module" % version) - private def web3jModule(module: String) = "org.web3j" % module % "4.9.5" def monixModule(module: String): Def.Initialize[ModuleID] = Def.setting("io.monix" %%% s"monix-$module" % "3.4.1") val kindProjector = compilerPlugin("org.typelevel" % "kind-projector" % "0.13.2" cross CrossVersion.full) - val akkaHttp = akkaHttpModule("akka-http") - val jacksonModuleScala = jacksonModule("module", "module-scala").withCrossVersion(CrossVersion.Binary()) - val googleGuava = "com.google.guava" % "guava" % "31.1-jre" - val kamonCore = kamonModule("core") - val machinist = "org.typelevel" %% "machinist" % "0.6.8" - val logback = "ch.qos.logback" % "logback-classic" % "1.3.5" // 1.4.x and later is built for Java 11 - val janino = "org.codehaus.janino" % "janino" % "3.1.9" - val asyncHttpClient = "org.asynchttpclient" % "async-http-client" % "2.12.3" - val curve25519 = "com.wavesplatform" % "curve25519-java" % "0.6.4" - val nettyHandler = "io.netty" % "netty-handler" % "4.1.85.Final" - - val catsCore = catsModule("core", "2.9.0") + val akkaHttp = akkaHttpModule("akka-http") + val googleGuava = "com.google.guava" % "guava" % "31.1-jre" + val kamonCore = kamonModule("core") + val machinist = "org.typelevel" %% "machinist" % "0.6.8" + val logback = "ch.qos.logback" % "logback-classic" % "1.3.5" // 1.4.x and later is built for Java 11 + val janino = "org.codehaus.janino" % "janino" % "3.1.9" + val asyncHttpClient = "org.asynchttpclient" % "async-http-client" % "2.12.3" + val curve25519 = "com.wavesplatform" % "curve25519-java" % "0.6.4" + val nettyHandler = "io.netty" % "netty-handler" % "4.1.85.Final" + val shapeless = Def.setting("com.chuusai" %%% "shapeless" % "2.3.10") - val scalaTest = "org.scalatest" %% "scalatest" % "3.2.14" % Test + val scalaTest = "org.scalatest" %% "scalatest" % "3.2.15" % Test val scalaJsTest = Def.setting("com.lihaoyi" %%% "utest" % "0.8.1" % Test) val sttp3 = "com.softwaremill.sttp.client3" % "core_2.13" % "3.5.2" // 3.6.x and later is built for Java 11 @@ -59,8 +53,8 @@ object Dependencies { // defined here because %%% can only be used within a task or setting macro // explicit dependency can likely be removed when monix 3 is released monixModule("eval").value, - catsCore.value, - "com.lihaoyi" %%% "fastparse" % "2.3.3", + "org.typelevel" %%% "cats-core" % "2.9.0", + "com.lihaoyi" %%% "fastparse" % "2.3.3", shapeless.value, "org.typelevel" %%% "cats-mtl" % "1.3.0", "ch.obermuhlner" % "big-math" % "2.3.2", @@ -74,8 +68,8 @@ object Dependencies { lazy val it = scalaTest +: Seq( logback, - "com.spotify" % "docker-client" % "8.16.0", - jacksonModule("dataformat", "dataformat-properties"), + "com.spotify" % "docker-client" % "8.16.0", + "com.fasterxml.jackson.dataformat" % "jackson-dataformat-properties" % "2.14.2", asyncHttpClient ).map(_ % Test) @@ -93,16 +87,10 @@ object Dependencies { akkaModule("slf4j") % Runtime ) - private def leveldbJava(module: String = "") = "org.iq80.leveldb" % s"leveldb${if (module.nonEmpty) "-" else ""}$module" % "0.12" - - private[this] val levelDBJNA = { - val levelDbVersion = "1.23.1" + private[this] val dbDeps = Seq( - "com.wavesplatform.leveldb-jna" % "leveldb-jna-core" % levelDbVersion, - "com.wavesplatform.leveldb-jna" % "leveldb-jna-native" % levelDbVersion, - leveldbJava("api") + "org.rocksdb" % "rocksdbjni" % "8.0.0" ) - } lazy val node = Def.setting( Seq( @@ -110,7 +98,7 @@ object Dependencies { "commons-net" % "commons-net" % "3.8.0", "org.apache.commons" % "commons-lang3" % "3.12.0", "com.iheart" %% "ficus" % "1.5.2", - "net.logstash.logback" % "logstash-logback-encoder" % "7.2" % Runtime, + "net.logstash.logback" % "logstash-logback-encoder" % "7.3" % Runtime, kamonCore, kamonModule("system-metrics"), kamonModule("influxdb"), @@ -119,7 +107,7 @@ object Dependencies { "org.influxdb" % "influxdb-java" % "2.23", googleGuava, "com.google.code.findbugs" % "jsr305" % "3.0.2" % Compile, // javax.annotation stubs - "com.typesafe.play" %% "play-json" % "2.9.3", + "com.typesafe.play" %% "play-json" % "2.9.4", akkaModule("actor"), akkaModule("stream"), akkaHttp, @@ -128,13 +116,13 @@ object Dependencies { monixModule("reactive").value, nettyHandler, "com.typesafe.scala-logging" %% "scala-logging" % "3.9.5", - "eu.timepit" %% "refined" % "0.10.1", - "com.esaulpaugh" % "headlong" % "9.0.0", + "eu.timepit" %% "refined" % "0.10.2" exclude ("org.scala-lang.modules", "scala-xml_2.13"), + "eu.timepit" %% "refined-cats" % "0.10.2" exclude ("org.scala-lang.modules", "scala-xml_2.13"), + "com.esaulpaugh" % "headlong" % "9.2.0", web3jModule("abi"), - akkaModule("testkit") % Test, - akkaHttpModule("akka-http-testkit") % Test, - leveldbJava().exclude("com.google.guava", "guava") % Test - ) ++ test ++ console ++ logDeps ++ levelDBJNA ++ protobuf.value ++ langCompilerPlugins.value + akkaModule("testkit") % Test, + akkaHttpModule("akka-http-testkit") % Test + ) ++ test ++ console ++ logDeps ++ dbDeps ++ protobuf.value ++ langCompilerPlugins.value ) lazy val scalapbRuntime = Def.setting { @@ -165,5 +153,5 @@ object Dependencies { } lazy val kanela = - Seq("io.kamon" % "kanela-agent" % "1.0.14") + Seq("io.kamon" % "kanela-agent" % "1.0.17") } diff --git a/project/plugins.sbt b/project/plugins.sbt index 177881fec39..00e8ac22695 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -6,7 +6,7 @@ resolvers ++= Seq( // Should go before Scala.js addSbtPlugin("com.thesamet" % "sbt-protoc" % "1.0.6") -libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.11.12" +libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.11.13" Seq( "com.eed3si9n" % "sbt-assembly" % "1.2.0", @@ -21,10 +21,10 @@ Seq( ).map(addSbtPlugin) libraryDependencies ++= Seq( - "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.13.3", + "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.14.2", "org.hjson" % "hjson" % "3.0.0", "org.vafer" % "jdeb" % "1.10" artifacts Artifact("jdeb", "jar", "jar"), - "org.slf4j" % "jcl-over-slf4j" % "2.0.0", + "org.slf4j" % "jcl-over-slf4j" % "2.0.5", ("com.spotify" % "docker-client" % "8.16.0") .exclude("commons-logging", "commons-logging") )