From c35fa76de3f7d7422fe93f54febaff8a58417baf Mon Sep 17 00:00:00 2001 From: tamirms Date: Fri, 28 Jun 2024 11:27:33 +0100 Subject: [PATCH] Remove dump-ledger-state script --- .github/workflows/horizon-master.yml | 28 -- exp/tools/dump-ledger-state/Dockerfile | 45 --- exp/tools/dump-ledger-state/README.md | 14 - exp/tools/dump-ledger-state/diff_test.sh | 36 -- .../dump-ledger-state/docker-entrypoint.sh | 39 -- exp/tools/dump-ledger-state/dump_core_db.sh | 27 -- exp/tools/dump-ledger-state/main.go | 366 ------------------ exp/tools/dump-ledger-state/run_test.sh | 39 -- .../stellar-core-testnet.cfg | 39 -- exp/tools/dump-ledger-state/stellar-core.cfg | 201 ---------- 10 files changed, 834 deletions(-) delete mode 100644 .github/workflows/horizon-master.yml delete mode 100644 exp/tools/dump-ledger-state/Dockerfile delete mode 100644 exp/tools/dump-ledger-state/README.md delete mode 100755 exp/tools/dump-ledger-state/diff_test.sh delete mode 100755 exp/tools/dump-ledger-state/docker-entrypoint.sh delete mode 100755 exp/tools/dump-ledger-state/dump_core_db.sh delete mode 100644 exp/tools/dump-ledger-state/main.go delete mode 100755 exp/tools/dump-ledger-state/run_test.sh delete mode 100644 exp/tools/dump-ledger-state/stellar-core-testnet.cfg delete mode 100644 exp/tools/dump-ledger-state/stellar-core.cfg diff --git a/.github/workflows/horizon-master.yml b/.github/workflows/horizon-master.yml deleted file mode 100644 index e2487a0d64..0000000000 --- a/.github/workflows/horizon-master.yml +++ /dev/null @@ -1,28 +0,0 @@ -name: Horizon master - -on: - push: - branches: [master] - -jobs: - - push-state-diff-image: - name: Push stellar/ledger-state-diff:{sha,latest} to DockerHub - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v3 - - - name: Login to DockerHub - uses: docker/login-action@bb984efc561711aaa26e433c32c3521176eae55b - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Build and push to DockerHub - uses: docker/build-push-action@7f9d37fa544684fb73bfe4835ed7214c255ce02b - with: - push: true - tags: stellar/ledger-state-diff:${{ github.sha }},stellar/ledger-state-diff:latest - file: exp/tools/dump-ledger-state/Dockerfile - build-args: GITCOMMIT=${{ github.sha }} - no-cache: true diff --git a/exp/tools/dump-ledger-state/Dockerfile b/exp/tools/dump-ledger-state/Dockerfile deleted file mode 100644 index 5ffcb9c0a2..0000000000 --- a/exp/tools/dump-ledger-state/Dockerfile +++ /dev/null @@ -1,45 +0,0 @@ -FROM ubuntu:22.04 - -ENV STELLAR_CORE_VERSION=21.0.0-1872.c6f474133.focal -ENV DEBIAN_FRONTEND=noninteractive - -RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl wget gnupg apt-utils -RUN wget -qO - https://apt.stellar.org/SDF.asc | APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=true apt-key add - -RUN echo "deb https://apt.stellar.org focal stable" >/etc/apt/sources.list.d/SDF.list -# RUN echo "deb https://apt.stellar.org focal unstable" >/etc/apt/sources.list.d/SDF-unstable.list -RUN apt-get update -y - -RUN apt-get install -y stellar-core=${STELLAR_CORE_VERSION} jq -RUN apt-get clean -RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ $(env -i bash -c '. /etc/os-release; echo $VERSION_CODENAME')-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list && \ - wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \ - apt-get update && \ - DEBIAN_FRONTEND="noninteractive" apt-get install -y postgresql-9.6 postgresql-contrib-9.6 postgresql-client-9.6 - -# Create a PostgreSQL role named `circleci` and then create a database `core` owned by the `circleci` role. -RUN su - postgres -c "/etc/init.d/postgresql start && psql --command \"CREATE USER circleci WITH SUPERUSER;\" && createdb -O circleci core" - -# Adjust PostgreSQL configuration so that remote connections to the -# database are possible. -RUN echo "host all all all trust" > /etc/postgresql/9.6/main/pg_hba.conf - -# And add `listen_addresses` to `/etc/postgresql/9.6/main/postgresql.conf` -RUN echo "listen_addresses='*'" >> /etc/postgresql/9.6/main/postgresql.conf - -COPY --from=golang:1.22-bullseye /usr/local/go/ /usr/local/go/ -RUN ln -s /usr/local/go/bin/go /usr/local/bin/go -WORKDIR /go/src/github.com/stellar/go -COPY go.mod go.sum ./ -RUN go mod download -COPY . ./ - -ENV PGPORT=5432 -ENV PGUSER=circleci -ENV PGHOST=localhost - -WORKDIR /go/src/github.com/stellar/go/exp/tools/dump-ledger-state - -ARG GITCOMMIT -ENV GITCOMMIT=${GITCOMMIT} - -ENTRYPOINT ["./docker-entrypoint.sh"] diff --git a/exp/tools/dump-ledger-state/README.md b/exp/tools/dump-ledger-state/README.md deleted file mode 100644 index 17376bd17d..0000000000 --- a/exp/tools/dump-ledger-state/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# dump-ledger-state - -This tool dumps the state from history archive buckets to 4 separate files: -* accounts.csv -* accountdata.csv -* offers.csv -* trustlines.csv -* claimablebalances.csv - -It's primary use is to test `SingleLedgerStateReader`. To run the test (`run_test.sh`) it: -1. Runs `dump-ledger-state`. -2. Syncs stellar-core to the same checkpoint: `stellar-core catchup [ledger]/1`. -3. Dumps stellar-core DB by using `dump_core_db.sh` script. -4. Diffs results by using `diff_test.sh` script. diff --git a/exp/tools/dump-ledger-state/diff_test.sh b/exp/tools/dump-ledger-state/diff_test.sh deleted file mode 100755 index 69295b2a82..0000000000 --- a/exp/tools/dump-ledger-state/diff_test.sh +++ /dev/null @@ -1,36 +0,0 @@ -ENTRIES=(accounts accountdata offers trustlines claimablebalances pools) - -echo "Sorting dump-ledger-state output files..." -for i in "${ENTRIES[@]}" -do - if test -f "${i}_sorted.csv"; then - echo "Skipping, ${i}_sorted.csv exists (remove if out of date to sort again)" - continue - fi - wc -l ${i}.csv - sort -S 500M -o ${i}_sorted.csv ${i}.csv -done - -echo "Sorting stellar-core output files..." -for i in "${ENTRIES[@]}" -do - if test -f "${i}_core_sorted.csv"; then - echo "Skipping, ${i}_core_sorted.csv exists (remove if out of date to sort again)" - continue - fi - wc -l ${i}_core.csv - sort -S 500M -o ${i}_core_sorted.csv ${i}_core.csv -done - -echo "Checking diffs..." -for type in "${ENTRIES[@]}" -do - diff -q ${type}_core_sorted.csv ${type}_sorted.csv - if [ "$?" -ne "0" ] - then - echo "ERROR: $type does NOT match"; - exit -1 - else - echo "$type OK"; - fi -done diff --git a/exp/tools/dump-ledger-state/docker-entrypoint.sh b/exp/tools/dump-ledger-state/docker-entrypoint.sh deleted file mode 100755 index f1451c2ad5..0000000000 --- a/exp/tools/dump-ledger-state/docker-entrypoint.sh +++ /dev/null @@ -1,39 +0,0 @@ -#! /bin/bash -set -e - -/etc/init.d/postgresql start - -while ! psql -U circleci -d core -h localhost -p 5432 -c 'select 1' >/dev/null 2>&1; do - echo "Waiting for postgres to be available..." - sleep 1 -done - -echo "using version $(stellar-core version)" - -if [ -z ${TESTNET+x} ]; then - stellar-core --conf ./stellar-core.cfg new-db -else - stellar-core --conf ./stellar-core-testnet.cfg new-db -fi - -if [ -z ${LATEST_LEDGER+x} ]; then - # Get latest ledger - echo "Getting latest checkpoint ledger..." - if [ -z ${TESTNET+x} ]; then - export LATEST_LEDGER=`curl -s http://history.stellar.org/prd/core-live/core_live_001/.well-known/stellar-history.json | jq -r '.currentLedger'` - else - export LATEST_LEDGER=`curl -s http://history.stellar.org/prd/core-testnet/core_testnet_001/.well-known/stellar-history.json | jq -r '.currentLedger'` - fi -fi - -if [[ -z "${LATEST_LEDGER}" ]]; then - echo "could not obtain latest ledger" - exit 1 -fi - -echo "Latest ledger: $LATEST_LEDGER" - -if ! ./run_test.sh; then - echo "ingestion dump (git commit \`$GITCOMMIT\`) of ledger \`$LATEST_LEDGER\` does not match stellar core db." - exit 1 -fi \ No newline at end of file diff --git a/exp/tools/dump-ledger-state/dump_core_db.sh b/exp/tools/dump-ledger-state/dump_core_db.sh deleted file mode 100755 index ebd8871a47..0000000000 --- a/exp/tools/dump-ledger-state/dump_core_db.sh +++ /dev/null @@ -1,27 +0,0 @@ -# Get state from stellar-core DB, colums match CSV printer -# FETCH_COUNT is there for circleci to use cursor-based method of getting rows (less RAM usage): -# https://dba.stackexchange.com/a/101510 - -echo "Fetching accounts from stellar-core DB..." -psql -d core -t -A -F"," --variable="FETCH_COUNT=10000" -c "select accountid, balance, seqnum, numsubentries, inflationdest, homedomain, thresholds, flags, COALESCE(extension, 'AAAAAA=='), signers, ledgerext from accounts" > accounts_core.csv -rm accounts_core_sorted.csv || true # Remove if exist in case original files are rebuilt - -echo "Fetching accountdata from stellar-core DB..." -psql -d core -t -A -F"," --variable="FETCH_COUNT=10000" -c "select accountid, dataname, datavalue, COALESCE(extension, 'AAAAAA=='), ledgerext from accountdata" > accountdata_core.csv -rm accountdata_core_sorted.csv || true # Remove if exist in case original files are rebuilt - -echo "Fetching offers from stellar-core DB..." -psql -d core -t -A -F"," --variable="FETCH_COUNT=10000" -c "select sellerid, offerid, sellingasset, buyingasset, amount, pricen, priced, flags, COALESCE(extension, 'AAAAAA=='), ledgerext from offers" > offers_core.csv -rm offers_core_sorted.csv || true # Remove if exist in case original files are rebuilt - -echo "Fetching trustlines from stellar-core DB..." -psql -d core -t -A -F"," --variable="FETCH_COUNT=10000" -c "select ledgerentry from trustlines" > trustlines_core.csv -rm trustlines_core_sorted.csv || true # Remove if exist in case original files are rebuilt - -echo "Fetching claimable balances from stellar-core DB..." -psql -d core -t -A -F"," --variable="FETCH_COUNT=10000" -c "select balanceid, ledgerentry from claimablebalance" > claimablebalances_core.csv -rm claimablebalances_core_sorted.csv || true # Remove if exist in case original files are rebuilt - -echo "Fetching liquidity pools from stellar-core DB..." -psql -d core -t -A -F"," --variable="FETCH_COUNT=10000" -c "select ledgerentry from liquiditypool" > pools_core.csv -rm pools_core_sorted.csv || true # Remove if exist in case original files are rebuilt \ No newline at end of file diff --git a/exp/tools/dump-ledger-state/main.go b/exp/tools/dump-ledger-state/main.go deleted file mode 100644 index 26f59348a7..0000000000 --- a/exp/tools/dump-ledger-state/main.go +++ /dev/null @@ -1,366 +0,0 @@ -package main - -import ( - "context" - "encoding/base64" - "encoding/csv" - "flag" - "io" - "os" - "runtime" - "strconv" - "time" - - "github.com/stellar/go/historyarchive" - "github.com/stellar/go/ingest" - "github.com/stellar/go/support/errors" - "github.com/stellar/go/support/log" - "github.com/stellar/go/support/storage" - "github.com/stellar/go/xdr" -) - -// csvMap maintains a mapping from ledger entry type to csv file -type csvMap struct { - files map[xdr.LedgerEntryType]*os.File - writers map[xdr.LedgerEntryType]*csv.Writer -} - -// newCSVMap constructs an empty csvMap instance -func newCSVMap() csvMap { - return csvMap{ - files: map[xdr.LedgerEntryType]*os.File{}, - writers: map[xdr.LedgerEntryType]*csv.Writer{}, - } -} - -// put creates a new file with the given file name and links that file to the -// given ledger entry type -func (c csvMap) put(entryType xdr.LedgerEntryType, fileName string) error { - if _, ok := c.files[entryType]; ok { - return errors.Errorf("entry type %s is already present in the file set", fileName) - } - - file, err := os.Create(fileName) - if err != nil { - return errors.Wrapf(err, "could not open file %s", fileName) - } - - c.files[entryType] = file - c.writers[entryType] = csv.NewWriter(file) - - return nil -} - -// get returns a csv writer for the given ledger entry type if it exists in the mapping -func (c csvMap) get(entryType xdr.LedgerEntryType) (*csv.Writer, bool) { - writer, ok := c.writers[entryType] - return writer, ok -} - -// close will close all files contained in the mapping -func (c csvMap) close() { - for entryType, file := range c.files { - if err := file.Close(); err != nil { - log.WithField("type", entryType.String()).Warn("could not close csv file") - } - delete(c.files, entryType) - delete(c.writers, entryType) - } -} - -type csvProcessor struct { - files csvMap - changeStats *ingest.StatsChangeProcessor -} - -func (processor csvProcessor) ProcessChange(change ingest.Change) error { - csvWriter, ok := processor.files.get(change.Type) - if !ok { - return nil - } - if err := processor.changeStats.ProcessChange(context.Background(), change); err != nil { - return err - } - - legerExt, err := xdr.MarshalBase64(change.Post.Ext) - if err != nil { - return err - } - - switch change.Type { - case xdr.LedgerEntryTypeAccount: - account := change.Post.Data.MustAccount() - - inflationDest := "" - if account.InflationDest != nil { - inflationDest = account.InflationDest.Address() - } - - var signers string - if len(account.Signers) > 0 { - var err error - signers, err = xdr.MarshalBase64(account.Signers) - if err != nil { - return err - } - } - - accountExt, err := xdr.MarshalBase64(account.Ext) - if err != nil { - return err - } - - csvWriter.Write([]string{ - account.AccountId.Address(), - strconv.FormatInt(int64(account.Balance), 10), - strconv.FormatInt(int64(account.SeqNum), 10), - strconv.FormatInt(int64(account.NumSubEntries), 10), - inflationDest, - base64.StdEncoding.EncodeToString([]byte(account.HomeDomain)), - base64.StdEncoding.EncodeToString(account.Thresholds[:]), - strconv.FormatInt(int64(account.Flags), 10), - accountExt, - signers, - legerExt, - }) - case xdr.LedgerEntryTypeTrustline: - ledgerEntry, err := xdr.MarshalBase64(change.Post) - if err != nil { - return err - } - csvWriter.Write([]string{ - ledgerEntry, - }) - case xdr.LedgerEntryTypeOffer: - offer := change.Post.Data.MustOffer() - - selling, err := xdr.MarshalBase64(offer.Selling) - if err != nil { - return err - } - - buying, err := xdr.MarshalBase64(offer.Buying) - if err != nil { - return err - } - - offerExt, err := xdr.MarshalBase64(offer.Ext) - if err != nil { - return err - } - - csvWriter.Write([]string{ - offer.SellerId.Address(), - strconv.FormatInt(int64(offer.OfferId), 10), - selling, - buying, - strconv.FormatInt(int64(offer.Amount), 10), - strconv.FormatInt(int64(offer.Price.N), 10), - strconv.FormatInt(int64(offer.Price.D), 10), - strconv.FormatInt(int64(offer.Flags), 10), - offerExt, - legerExt, - }) - case xdr.LedgerEntryTypeData: - accountData := change.Post.Data.MustData() - accountDataExt, err := xdr.MarshalBase64(accountData.Ext) - if err != nil { - return err - } - - csvWriter.Write([]string{ - accountData.AccountId.Address(), - base64.StdEncoding.EncodeToString([]byte(accountData.DataName)), - base64.StdEncoding.EncodeToString(accountData.DataValue), - accountDataExt, - legerExt, - }) - case xdr.LedgerEntryTypeClaimableBalance: - claimableBalance := change.Post.Data.MustClaimableBalance() - - ledgerEntry, err := xdr.MarshalBase64(change.Post) - if err != nil { - return err - } - - balanceID, err := xdr.MarshalBase64(claimableBalance.BalanceId) - if err != nil { - return err - } - - csvWriter.Write([]string{ - balanceID, - ledgerEntry, - }) - case xdr.LedgerEntryTypeLiquidityPool: - ledgerEntry, err := xdr.MarshalBase64(change.Post) - if err != nil { - return err - } - csvWriter.Write([]string{ - ledgerEntry, - }) - default: - return errors.Errorf("Invalid LedgerEntryType: %d", change.Type) - } - - if err := csvWriter.Error(); err != nil { - return errors.Wrap(err, "Error during csv.Writer.Write") - } - - csvWriter.Flush() - - if err := csvWriter.Error(); err != nil { - return errors.Wrap(err, "Error during csv.Writer.Flush") - } - return nil -} - -func main() { - testnet := flag.Bool("testnet", false, "connect to the Stellar test network") - flag.Parse() - - archive, err := archive(*testnet) - if err != nil { - panic(err) - } - log.SetLevel(log.InfoLevel) - - files := newCSVMap() - defer files.close() - - for entryType, fileName := range map[xdr.LedgerEntryType]string{ - xdr.LedgerEntryTypeAccount: "./accounts.csv", - xdr.LedgerEntryTypeData: "./accountdata.csv", - xdr.LedgerEntryTypeOffer: "./offers.csv", - xdr.LedgerEntryTypeTrustline: "./trustlines.csv", - xdr.LedgerEntryTypeClaimableBalance: "./claimablebalances.csv", - xdr.LedgerEntryTypeLiquidityPool: "./pools.csv", - } { - if err = files.put(entryType, fileName); err != nil { - log.WithField("err", err). - WithField("file", fileName). - Fatal("cannot create csv file") - } - } - - ledgerSequenceString := os.Getenv("LATEST_LEDGER") - ledgerSequence, err := strconv.Atoi(ledgerSequenceString) - if err != nil { - log.WithField("ledger", ledgerSequenceString). - WithField("err", err). - Fatal("cannot parse latest ledger") - } - log.WithField("ledger", ledgerSequence). - Info("Processing entries from History Archive Snapshot") - - changeReader, err := ingest.NewCheckpointChangeReader( - context.Background(), - archive, - uint32(ledgerSequence), - ) - if err != nil { - log.WithField("err", err).Fatal("cannot construct change reader") - } - defer changeReader.Close() - - changeStats := &ingest.StatsChangeProcessor{} - doneStats := printPipelineStats(changeStats) - changeProcessor := csvProcessor{files: files, changeStats: changeStats} - logFatalError := func(err error) { - log.WithField("err", err).Fatal("could not process all changes from HAS") - } - for { - change, err := changeReader.Read() - if err == io.EOF { - break - } - if err != nil { - logFatalError(errors.Wrap(err, "could not read transaction")) - } - - if err = changeProcessor.ProcessChange(change); err != nil { - logFatalError(errors.Wrap(err, "could not process change")) - } - } - - // Remove sorted files - sortedFiles := []string{ - "./accounts_sorted.csv", - "./accountdata_sorted.csv", - "./offers_sorted.csv", - "./trustlines_sorted.csv", - "./claimablebalances_sort.csv", - } - for _, file := range sortedFiles { - err := os.Remove(file) - // Ignore not exist errors - if err != nil && !os.IsNotExist(err) { - panic(err) - } - } - - doneStats <- true -} - -func archive(testnet bool) (*historyarchive.Archive, error) { - if testnet { - return historyarchive.Connect( - "https://history.stellar.org/prd/core-testnet/core_testnet_001", - historyarchive.ArchiveOptions{ - ConnectOptions: storage.ConnectOptions{ - UserAgent: "dump-ledger-state", - }, - }, - ) - } - - return historyarchive.Connect( - "https://history.stellar.org/prd/core-live/core_live_001/", - historyarchive.ArchiveOptions{ - ConnectOptions: storage.ConnectOptions{ - UserAgent: "dump-ledger-state", - }, - }, - ) -} - -func printPipelineStats(reporter *ingest.StatsChangeProcessor) chan<- bool { - startTime := time.Now() - done := make(chan bool) - ticker := time.NewTicker(10 * time.Second) - - go func() { - defer ticker.Stop() - - for { - var m runtime.MemStats - runtime.ReadMemStats(&m) - results := reporter.GetResults() - stats := log.F(results.Map()) - stats["Alloc"] = bToMb(m.Alloc) - stats["HeapAlloc"] = bToMb(m.HeapAlloc) - stats["Sys"] = bToMb(m.Sys) - stats["NumGC"] = m.NumGC - stats["Goroutines"] = runtime.NumGoroutine() - stats["NumCPU"] = runtime.NumCPU() - stats["Duration"] = time.Since(startTime) - - log.WithFields(stats).Info("Current Job Status") - - select { - case <-ticker.C: - continue - case <-done: - // Pipeline done - return - } - } - }() - - return done -} - -func bToMb(b uint64) uint64 { - return b / 1024 / 1024 -} diff --git a/exp/tools/dump-ledger-state/run_test.sh b/exp/tools/dump-ledger-state/run_test.sh deleted file mode 100755 index ef2b56356c..0000000000 --- a/exp/tools/dump-ledger-state/run_test.sh +++ /dev/null @@ -1,39 +0,0 @@ -#! /bin/bash -set -e - -if [ -z ${LATEST_LEDGER+x} ]; then - # Get latest ledger - echo "Getting latest checkpoint ledger..." - if [ -z ${TESTNET+x} ]; then - export LATEST_LEDGER=`curl -s http://history.stellar.org/prd/core-live/core_live_001/.well-known/stellar-history.json | jq -r '.currentLedger'` - else - export LATEST_LEDGER=`curl -s http://history.stellar.org/prd/core-testnet/core_testnet_001/.well-known/stellar-history.json | jq -r '.currentLedger'` - fi - echo "Latest ledger: $LATEST_LEDGER" -fi - -# Dump state using Golang -if [ -z ${TESTNET+x} ]; then - echo "Dumping pubnet state using ingest..." - go run ./main.go -else - echo "Dumping testnet state using ingest..." - go run ./main.go --testnet -fi -echo "State dumped..." - -# Catchup core -if [ -z ${TESTNET+x} ]; then - echo "Catch up from pubnet" - stellar-core --conf ./stellar-core.cfg catchup $LATEST_LEDGER/1 -else - echo "Catch up from testnet" - stellar-core --conf ./stellar-core-testnet.cfg catchup $LATEST_LEDGER/1 -fi - -echo "Dumping state from stellar-core..." -./dump_core_db.sh -echo "State dumped..." - -echo "Comparing state dumps..." -./diff_test.sh diff --git a/exp/tools/dump-ledger-state/stellar-core-testnet.cfg b/exp/tools/dump-ledger-state/stellar-core-testnet.cfg deleted file mode 100644 index a02221e795..0000000000 --- a/exp/tools/dump-ledger-state/stellar-core-testnet.cfg +++ /dev/null @@ -1,39 +0,0 @@ -HTTP_PORT=11626 -PUBLIC_HTTP_PORT=true -LOG_FILE_PATH="" - -DATABASE="postgresql://dbname=core host=localhost user=circleci" -NETWORK_PASSPHRASE="Test SDF Network ; September 2015" -UNSAFE_QUORUM=true -FAILURE_SAFETY=1 -CATCHUP_RECENT=8640 - -EXPERIMENTAL_BUCKETLIST_DB=true - -[HISTORY.cache] -get="cp /opt/stellar/history-cache/{0} {1}" - -[[HOME_DOMAINS]] -HOME_DOMAIN="testnet.stellar.org" -QUALITY="HIGH" - -[[VALIDATORS]] -NAME="sdf_testnet_1" -HOME_DOMAIN="testnet.stellar.org" -PUBLIC_KEY="GDKXE2OZMJIPOSLNA6N6F2BVCI3O777I2OOC4BV7VOYUEHYX7RTRYA7Y" -ADDRESS="core-testnet1.stellar.org" -HISTORY="curl -sf http://history.stellar.org/prd/core-testnet/core_testnet_001/{0} -o {1}" - -[[VALIDATORS]] -NAME="sdf_testnet_2" -HOME_DOMAIN="testnet.stellar.org" -PUBLIC_KEY="GCUCJTIYXSOXKBSNFGNFWW5MUQ54HKRPGJUTQFJ5RQXZXNOLNXYDHRAP" -ADDRESS="core-testnet2.stellar.org" -HISTORY="curl -sf http://history.stellar.org/prd/core-testnet/core_testnet_002/{0} -o {1}" - -[[VALIDATORS]] -NAME="sdf_testnet_3" -HOME_DOMAIN="testnet.stellar.org" -PUBLIC_KEY="GC2V2EFSXN6SQTWVYA5EPJPBWWIMSD2XQNKUOHGEKB535AQE2I6IXV2Z" -ADDRESS="core-testnet3.stellar.org" -HISTORY="curl -sf http://history.stellar.org/prd/core-testnet/core_testnet_003/{0} -o {1}" \ No newline at end of file diff --git a/exp/tools/dump-ledger-state/stellar-core.cfg b/exp/tools/dump-ledger-state/stellar-core.cfg deleted file mode 100644 index 0d97346ce6..0000000000 --- a/exp/tools/dump-ledger-state/stellar-core.cfg +++ /dev/null @@ -1,201 +0,0 @@ -HTTP_PORT=11626 -PUBLIC_HTTP_PORT=true -LOG_FILE_PATH="" - -DATABASE="postgresql://dbname=core host=localhost user=circleci" -NETWORK_PASSPHRASE="Public Global Stellar Network ; September 2015" -CATCHUP_RECENT=1 - -EXPERIMENTAL_BUCKETLIST_DB=true - -[HISTORY.cache] -get="cp /opt/stellar/history-cache/{0} {1}" - -[[HOME_DOMAINS]] -HOME_DOMAIN="publicnode.org" -QUALITY="HIGH" - -[[HOME_DOMAINS]] -HOME_DOMAIN="lobstr.co" -QUALITY="HIGH" - -[[HOME_DOMAINS]] -HOME_DOMAIN="www.franklintempleton.com" -QUALITY="HIGH" - -[[HOME_DOMAINS]] -HOME_DOMAIN="satoshipay.io" -QUALITY="HIGH" - -[[HOME_DOMAINS]] -HOME_DOMAIN="whalestack.com" -QUALITY="HIGH" - -[[HOME_DOMAINS]] -HOME_DOMAIN="www.stellar.org" -QUALITY="HIGH" - -[[HOME_DOMAINS]] -HOME_DOMAIN="stellar.blockdaemon.com" -QUALITY="HIGH" - -[[VALIDATORS]] -NAME="Boötes" -PUBLIC_KEY="GCVJ4Z6TI6Z2SOGENSPXDQ2U4RKH3CNQKYUHNSSPYFPNWTLGS6EBH7I2" -ADDRESS="bootes.publicnode.org:11625" -HISTORY="curl -sf https://bootes-history.publicnode.org/{0} -o {1}" -HOME_DOMAIN="publicnode.org" - -[[VALIDATORS]] -NAME="Lyra by BP Ventures" -PUBLIC_KEY="GCIXVKNFPKWVMKJKVK2V4NK7D4TC6W3BUMXSIJ365QUAXWBRPPJXIR2Z" -ADDRESS="lyra.publicnode.org:11625" -HISTORY="curl -sf https://lyra-history.publicnode.org/{0} -o {1}" -HOME_DOMAIN="publicnode.org" - -[[VALIDATORS]] -NAME="Hercules by OG Technologies" -PUBLIC_KEY="GBLJNN3AVZZPG2FYAYTYQKECNWTQYYUUY2KVFN2OUKZKBULXIXBZ4FCT" -ADDRESS="hercules.publicnode.org:11625" -HISTORY="curl -sf https://hercules-history.publicnode.org/{0} -o {1}" -HOME_DOMAIN="publicnode.org" - -[[VALIDATORS]] -NAME="LOBSTR 3 (North America)" -PUBLIC_KEY="GD5QWEVV4GZZTQP46BRXV5CUMMMLP4JTGFD7FWYJJWRL54CELY6JGQ63" -ADDRESS="v3.stellar.lobstr.co:11625" -HISTORY="curl -sf https://archive.v3.stellar.lobstr.co/{0} -o {1}" -HOME_DOMAIN="lobstr.co" - -[[VALIDATORS]] -NAME="LOBSTR 1 (Europe)" -PUBLIC_KEY="GCFONE23AB7Y6C5YZOMKUKGETPIAJA4QOYLS5VNS4JHBGKRZCPYHDLW7" -ADDRESS="v1.stellar.lobstr.co:11625" -HISTORY="curl -sf https://archive.v1.stellar.lobstr.co/{0} -o {1}" -HOME_DOMAIN="lobstr.co" - -[[VALIDATORS]] -NAME="LOBSTR 2 (Europe)" -PUBLIC_KEY="GCB2VSADESRV2DDTIVTFLBDI562K6KE3KMKILBHUHUWFXCUBHGQDI7VL" -ADDRESS="v2.stellar.lobstr.co:11625" -HISTORY="curl -sf https://archive.v2.stellar.lobstr.co/{0} -o {1}" -HOME_DOMAIN="lobstr.co" - -[[VALIDATORS]] -NAME="LOBSTR 4 (Asia)" -PUBLIC_KEY="GA7TEPCBDQKI7JQLQ34ZURRMK44DVYCIGVXQQWNSWAEQR6KB4FMCBT7J" -ADDRESS="v4.stellar.lobstr.co:11625" -HISTORY="curl -sf https://archive.v4.stellar.lobstr.co/{0} -o {1}" -HOME_DOMAIN="lobstr.co" - -[[VALIDATORS]] -NAME="LOBSTR 5 (India)" -PUBLIC_KEY="GA5STBMV6QDXFDGD62MEHLLHZTPDI77U3PFOD2SELU5RJDHQWBR5NNK7" -ADDRESS="v5.stellar.lobstr.co:11625" -HISTORY="curl -sf https://archive.v5.stellar.lobstr.co/{0} -o {1}" -HOME_DOMAIN="lobstr.co" - -[[VALIDATORS]] -NAME="FT SCV 2" -PUBLIC_KEY="GCMSM2VFZGRPTZKPH5OABHGH4F3AVS6XTNJXDGCZ3MKCOSUBH3FL6DOB" -ADDRESS="stellar2.franklintempleton.com:11625" -HISTORY="curl -sf https://stellar-history-usc.franklintempleton.com/azuscshf401/{0} -o {1}" -HOME_DOMAIN="www.franklintempleton.com" - -[[VALIDATORS]] -NAME="FT SCV 3" -PUBLIC_KEY="GA7DV63PBUUWNUFAF4GAZVXU2OZMYRATDLKTC7VTCG7AU4XUPN5VRX4A" -ADDRESS="stellar3.franklintempleton.com:11625" -HISTORY="curl -sf https://stellar-history-ins.franklintempleton.com/azinsshf401/{0} -o {1}" -HOME_DOMAIN="www.franklintempleton.com" - -[[VALIDATORS]] -NAME="FT SCV 1" -PUBLIC_KEY="GARYGQ5F2IJEBCZJCBNPWNWVDOFK7IBOHLJKKSG2TMHDQKEEC6P4PE4V" -ADDRESS="stellar1.franklintempleton.com:11625" -HISTORY="curl -sf https://stellar-history-usw.franklintempleton.com/azuswshf401/{0} -o {1}" -HOME_DOMAIN="www.franklintempleton.com" - -[[VALIDATORS]] -NAME="SatoshiPay Frankfurt" -PUBLIC_KEY="GC5SXLNAM3C4NMGK2PXK4R34B5GNZ47FYQ24ZIBFDFOCU6D4KBN4POAE" -ADDRESS="stellar-de-fra.satoshipay.io:11625" -HISTORY="curl -sf https://stellar-history-de-fra.satoshipay.io/{0} -o {1}" -HOME_DOMAIN="satoshipay.io" - -[[VALIDATORS]] -NAME="SatoshiPay Singapore" -PUBLIC_KEY="GBJQUIXUO4XSNPAUT6ODLZUJRV2NPXYASKUBY4G5MYP3M47PCVI55MNT" -ADDRESS="stellar-sg-sin.satoshipay.io:11625" -HISTORY="curl -sf https://stellar-history-sg-sin.satoshipay.io/{0} -o {1}" -HOME_DOMAIN="satoshipay.io" - -[[VALIDATORS]] -NAME="SatoshiPay Iowa" -PUBLIC_KEY="GAK6Z5UVGUVSEK6PEOCAYJISTT5EJBB34PN3NOLEQG2SUKXRVV2F6HZY" -ADDRESS="stellar-us-iowa.satoshipay.io:11625" -HISTORY="curl -sf https://stellar-history-us-iowa.satoshipay.io/{0} -o {1}" -HOME_DOMAIN="satoshipay.io" - -[[VALIDATORS]] -NAME="Whalestack (Germany)" -PUBLIC_KEY="GD6SZQV3WEJUH352NTVLKEV2JM2RH266VPEM7EH5QLLI7ZZAALMLNUVN" -ADDRESS="germany.stellar.whalestack.com:11625" -HISTORY="curl -sf https://germany.stellar.whalestack.com/history/{0} -o {1}" -HOME_DOMAIN="whalestack.com" - -[[VALIDATORS]] -NAME="Whalestack (Hong Kong)" -PUBLIC_KEY="GAZ437J46SCFPZEDLVGDMKZPLFO77XJ4QVAURSJVRZK2T5S7XUFHXI2Z" -ADDRESS="hongkong.stellar.whalestack.com:11625" -HISTORY="curl -sf https://hongkong.stellar.whalestack.com/history/{0} -o {1}" -HOME_DOMAIN="whalestack.com" - -[[VALIDATORS]] -NAME="Whalestack (Finland)" -PUBLIC_KEY="GADLA6BJK6VK33EM2IDQM37L5KGVCY5MSHSHVJA4SCNGNUIEOTCR6J5T" -ADDRESS="finland.stellar.whalestack.com:11625" -HISTORY="curl -sf https://finland.stellar.whalestack.com/history/{0} -o {1}" -HOME_DOMAIN="whalestack.com" - -[[VALIDATORS]] -NAME="SDF 2" -PUBLIC_KEY="GCM6QMP3DLRPTAZW2UZPCPX2LF3SXWXKPMP3GKFZBDSF3QZGV2G5QSTK" -ADDRESS="core-live-b.stellar.org:11625" -HISTORY="curl -sf http://history.stellar.org/prd/core-live/core_live_002/{0} -o {1}" -HOME_DOMAIN="www.stellar.org" - -[[VALIDATORS]] -NAME="SDF 1" -PUBLIC_KEY="GCGB2S2KGYARPVIA37HYZXVRM2YZUEXA6S33ZU5BUDC6THSB62LZSTYH" -ADDRESS="core-live-a.stellar.org:11625" -HISTORY="curl -sf http://history.stellar.org/prd/core-live/core_live_001/{0} -o {1}" -HOME_DOMAIN="www.stellar.org" - -[[VALIDATORS]] -NAME="SDF 3" -PUBLIC_KEY="GABMKJM6I25XI4K7U6XWMULOUQIQ27BCTMLS6BYYSOWKTBUXVRJSXHYQ" -ADDRESS="core-live-c.stellar.org:11625" -HISTORY="curl -sf http://history.stellar.org/prd/core-live/core_live_003/{0} -o {1}" -HOME_DOMAIN="www.stellar.org" - -[[VALIDATORS]] -NAME="Blockdaemon Validator 3" -PUBLIC_KEY="GAYXZ4PZ7P6QOX7EBHPIZXNWY4KCOBYWJCA4WKWRKC7XIUS3UJPT6EZ4" -ADDRESS="stellar-full-validator3.bdnodes.net:11625" -HISTORY="curl -sf https://stellar-full-history3.bdnodes.net/{0} -o {1}" -HOME_DOMAIN="stellar.blockdaemon.com" - -[[VALIDATORS]] -NAME="Blockdaemon Validator 2" -PUBLIC_KEY="GAVXB7SBJRYHSG6KSQHY74N7JAFRL4PFVZCNWW2ARI6ZEKNBJSMSKW7C" -ADDRESS="stellar-full-validator2.bdnodes.net:11625" -HISTORY="curl -sf https://stellar-full-history2.bdnodes.net/{0} -o {1}" -HOME_DOMAIN="stellar.blockdaemon.com" - -[[VALIDATORS]] -NAME="Blockdaemon Validator 1" -PUBLIC_KEY="GAAV2GCVFLNN522ORUYFV33E76VPC22E72S75AQ6MBR5V45Z5DWVPWEU" -ADDRESS="stellar-full-validator1.bdnodes.net:11625" -HISTORY="curl -sf https://stellar-full-history1.bdnodes.net/{0} -o {1}" -HOME_DOMAIN="stellar.blockdaemon.com" \ No newline at end of file