From cc7604202e2239cdc1faafd2c269f052621b9c69 Mon Sep 17 00:00:00 2001 From: dreacot Date: Thu, 21 Jul 2022 23:56:15 +0100 Subject: [PATCH] add dcr wallet under it's own package --- dcr.go | 58 + dexclient.go | 2 +- go.mod | 8 + multiwallet.go | 173 ++- multiwallet_utils.go | 9 +- politeia_sync.go | 9 +- rescan.go | 53 +- sync.go | 493 -------- syncnotification.go | 1096 ++++++++--------- txandblocknotifications.go | 159 --- types.go | 8 +- utils.go | 33 +- vsp.go | 192 --- wallets.go | 46 +- .../dcr/account_mixer.go | 74 +- accounts.go => wallets/dcr/accounts.go | 24 +- address.go => wallets/dcr/address.go | 17 +- consensus.go => wallets/dcr/consensus.go | 6 +- decodetx.go => wallets/dcr/decodetx.go | 2 +- wallets/dcr/errors.go | 61 + wallets/dcr/log.go | 176 +++ message.go => wallets/dcr/message.go | 16 +- wallets/dcr/multiwallet_config.go | 152 +++ wallets/dcr/sync.go | 489 ++++++++ wallets/dcr/syncnotification.go | 682 ++++++++++ ticket.go => wallets/dcr/ticket.go | 334 ++--- .../dcr/transactions.go | 82 +- wallets/dcr/txandblocknotifications.go | 159 +++ txauthor.go => wallets/dcr/txauthor.go | 50 +- txindex.go => wallets/dcr/txindex.go | 20 +- txparser.go => wallets/dcr/txparser.go | 6 +- wallets/dcr/types.go | 530 ++++++++ wallets/dcr/utils.go | 502 ++++++++ utxo.go => wallets/dcr/utxo.go | 4 +- wallets/dcr/vsp.go | 192 +++ wallet.go => wallets/dcr/wallet.go | 88 +- .../dcr/wallet_config.go | 2 +- {walletdata => wallets/dcr/walletdata}/db.go | 0 .../dcr/walletdata}/filter.go | 0 .../dcr/walletdata}/read.go | 0 .../dcr/walletdata}/save.go | 0 wordlist.go => wallets/dcr/wordlist.go | 2 +- 42 files changed, 4075 insertions(+), 1934 deletions(-) create mode 100644 dcr.go delete mode 100644 sync.go delete mode 100644 txandblocknotifications.go delete mode 100644 vsp.go rename account_mixer.go => wallets/dcr/account_mixer.go (79%) rename accounts.go => wallets/dcr/accounts.go (90%) rename address.go => wallets/dcr/address.go (86%) rename consensus.go => wallets/dcr/consensus.go (98%) rename decodetx.go => wallets/dcr/decodetx.go (99%) create mode 100644 wallets/dcr/errors.go create mode 100644 wallets/dcr/log.go rename message.go => wallets/dcr/message.go (73%) create mode 100644 wallets/dcr/multiwallet_config.go create mode 100644 wallets/dcr/sync.go create mode 100644 wallets/dcr/syncnotification.go rename ticket.go => wallets/dcr/ticket.go (70%) rename transactions.go => wallets/dcr/transactions.go (81%) create mode 100644 wallets/dcr/txandblocknotifications.go rename txauthor.go => wallets/dcr/txauthor.go (93%) rename txindex.go => wallets/dcr/txindex.go (79%) rename txparser.go => wallets/dcr/txparser.go (94%) create mode 100644 wallets/dcr/types.go create mode 100644 wallets/dcr/utils.go rename utxo.go => wallets/dcr/utxo.go (98%) create mode 100644 wallets/dcr/vsp.go rename wallet.go => wallets/dcr/wallet.go (76%) rename wallet_config.go => wallets/dcr/wallet_config.go (99%) rename {walletdata => wallets/dcr/walletdata}/db.go (100%) rename {walletdata => wallets/dcr/walletdata}/filter.go (100%) rename {walletdata => wallets/dcr/walletdata}/read.go (100%) rename {walletdata => wallets/dcr/walletdata}/save.go (100%) rename wordlist.go => wallets/dcr/wordlist.go (99%) diff --git a/dcr.go b/dcr.go new file mode 100644 index 000000000..3ea1b5ba1 --- /dev/null +++ b/dcr.go @@ -0,0 +1,58 @@ +package dcrlibwallet + +import ( + // "context" + // "fmt" + "os" + "path/filepath" + + "decred.org/dcrwallet/v2/errors" + + "github.com/asdine/storm" + // "github.com/asdine/storm/q" + + bolt "go.etcd.io/bbolt" + + "github.com/planetdecred/dcrlibwallet/wallets/dcr" +) + +func initializeDCRWallet(rootDir, dbDriver, netType string) (*storm.DB, string, error) { + var mwDB *storm.DB + + rootDir = filepath.Join(rootDir, netType, "dcr") + err := os.MkdirAll(rootDir, os.ModePerm) + if err != nil { + return mwDB, "", errors.Errorf("failed to create dcr rootDir: %v", err) + } + + err = initLogRotator(filepath.Join(rootDir, logFileName)) + if err != nil { + return mwDB, "", errors.Errorf("failed to init dcr logRotator: %v", err.Error()) + } + + mwDB, err = storm.Open(filepath.Join(rootDir, walletsDbName)) + if err != nil { + log.Errorf("Error opening dcr wallets database: %s", err.Error()) + if err == bolt.ErrTimeout { + // timeout error occurs if storm fails to acquire a lock on the database file + return mwDB, "", errors.E(ErrWalletDatabaseInUse) + } + return mwDB, "", errors.Errorf("error opening dcr wallets database: %s", err.Error()) + } + + // init database for saving/reading wallet objects + err = mwDB.Init(&dcr.Wallet{}) + if err != nil { + log.Errorf("Error initializing wallets database: %s", err.Error()) + return mwDB, "", err + } + + // init database for saving/reading proposal objects + err = mwDB.Init(&dcr.Proposal{}) + if err != nil { + log.Errorf("Error initializing wallets database: %s", err.Error()) + return mwDB, "", err + } + + return mwDB, rootDir, nil +} diff --git a/dexclient.go b/dexclient.go index 087679138..5d9079e90 100644 --- a/dexclient.go +++ b/dexclient.go @@ -105,7 +105,7 @@ func (mw *MultiWallet) prepareDexSupportForDcrWalletLibrary() error { return nil, fmt.Errorf("account error: %v", err) } - walletDesc := fmt.Sprintf("%q in %s", wallet.Name, wallet.dataDir) + walletDesc := fmt.Sprintf("%q in %s", wallet.Name, wallet.DataDir) return dexdcr.NewSpvWallet(wallet.Internal(), walletDesc, chainParams, logger.SubLogger("DLWL")), nil } diff --git a/go.mod b/go.mod index 5ee2f89f9..2afb2a507 100644 --- a/go.mod +++ b/go.mod @@ -5,6 +5,12 @@ require ( decred.org/dcrwallet/v2 v2.0.2-0.20220505152146-ece5da349895 github.com/DataDog/zstd v1.4.8 // indirect github.com/asdine/storm v0.0.0-20190216191021-fe89819f6282 + github.com/btcsuite/btcd v0.22.0-beta.0.20211026140004-31791ba4dc6e // indirect + github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f // indirect + github.com/btcsuite/btcutil v1.0.3-0.20210527170813-e2ba6805a890 // indirect + github.com/btcsuite/btcwallet v0.12.0 // indirect + github.com/btcsuite/btcwallet/walletdb v1.4.0 // indirect + github.com/btcsuite/btcwallet/wtxmgr v1.3.0 // indirect github.com/companyzero/sntrup4591761 v0.0.0-20220309191932-9e0f3af2f07a // indirect github.com/dchest/siphash v1.2.3 // indirect github.com/decred/base58 v1.0.4 // indirect @@ -16,6 +22,7 @@ require ( github.com/decred/dcrd/dcrutil/v4 v4.0.0 github.com/decred/dcrd/gcs/v3 v3.0.0 github.com/decred/dcrd/hdkeychain/v3 v3.1.0 + github.com/decred/dcrd/rpc/jsonrpc/types/v3 v3.0.0 github.com/decred/dcrd/txscript/v4 v4.0.0 github.com/decred/dcrd/wire v1.5.0 github.com/decred/dcrdata/v7 v7.0.0-20211216152310-365c9dc820eb @@ -26,6 +33,7 @@ require ( github.com/jessevdk/go-flags v1.5.0 // indirect github.com/jrick/logrotate v1.0.0 github.com/kevinburke/nacl v0.0.0-20190829012316-f3ed23dbd7f8 + github.com/lightninglabs/neutrino v0.13.1-0.20211214231330-53b628ce1756 // indirect github.com/onsi/ginkgo v1.14.0 github.com/onsi/gomega v1.10.1 github.com/planetdecred/dcrlibwallet/dexdcr v0.0.0-20220223161805-c736f970653d diff --git a/multiwallet.go b/multiwallet.go index efbaa26fd..f9695bd5f 100644 --- a/multiwallet.go +++ b/multiwallet.go @@ -17,8 +17,10 @@ import ( "github.com/asdine/storm/q" "github.com/decred/dcrd/chaincfg/v3" "github.com/planetdecred/dcrlibwallet/utils" - "github.com/planetdecred/dcrlibwallet/walletdata" - bolt "go.etcd.io/bbolt" + "github.com/planetdecred/dcrlibwallet/wallets/dcr/walletdata" + + "github.com/planetdecred/dcrlibwallet/wallets/dcr" + "golang.org/x/crypto/bcrypt" ) @@ -28,15 +30,16 @@ type MultiWallet struct { db *storm.DB chainParams *chaincfg.Params - wallets map[int]*Wallet - badWallets map[int]*Wallet - syncData *syncData + wallets map[int]*dcr.Wallet + badWallets map[int]*dcr.Wallet - notificationListenersMu sync.RWMutex + // syncData *dcr.SyncData + + // notificationListenersMu sync.RWMutex txAndBlockNotificationListeners map[string]TxAndBlockNotificationListener blocksRescanProgressListener BlocksRescanProgressListener - accountMixerNotificationListener map[string]AccountMixerNotificationListener + // accountMixerNotificationListener map[string]AccountMixerNotificationListener shuttingDown chan bool cancelFuncs []context.CancelFunc @@ -56,55 +59,29 @@ func NewMultiWallet(rootDir, dbDriver, netType, politeiaHost string) (*MultiWall return nil, err } - rootDir = filepath.Join(rootDir, netType) - err = os.MkdirAll(rootDir, os.ModePerm) + dcrDB, dcrRootDir, err := initializeDCRWallet(rootDir, dbDriver, netType) if err != nil { - return nil, errors.Errorf("failed to create rootDir: %v", err) - } - - err = initLogRotator(filepath.Join(rootDir, logFileName)) - if err != nil { - return nil, errors.Errorf("failed to init logRotator: %v", err.Error()) - } - - mwDB, err := storm.Open(filepath.Join(rootDir, walletsDbName)) - if err != nil { - log.Errorf("Error opening wallets database: %s", err.Error()) - if err == bolt.ErrTimeout { - // timeout error occurs if storm fails to acquire a lock on the database file - return nil, errors.E(ErrWalletDatabaseInUse) - } - return nil, errors.Errorf("error opening wallets database: %s", err.Error()) - } - - // init database for saving/reading wallet objects - err = mwDB.Init(&Wallet{}) - if err != nil { - log.Errorf("Error initializing wallets database: %s", err.Error()) - return nil, err - } - - // init database for saving/reading proposal objects - err = mwDB.Init(&Proposal{}) - if err != nil { - log.Errorf("Error initializing wallets database: %s", err.Error()) - return nil, err + log.Errorf("error initializing DCRWallet: %s", err.Error()) + return nil, errors.Errorf("error initializing DCRWallet: %s", err.Error()) } mw := &MultiWallet{ dbDriver: dbDriver, - rootDir: rootDir, - db: mwDB, + rootDir: dcrRootDir, + db: dcrDB, chainParams: chainParams, - wallets: make(map[int]*Wallet), - badWallets: make(map[int]*Wallet), - syncData: &syncData{ - syncProgressListeners: make(map[string]SyncProgressListener), - }, + wallets: make(map[int]*dcr.Wallet), + badWallets: make(map[int]*dcr.Wallet), + // syncData: &dcr.SyncData{ + // SyncProgressListeners: make(map[string]dcr.SyncProgressListener), + // }, txAndBlockNotificationListeners: make(map[string]TxAndBlockNotificationListener), - accountMixerNotificationListener: make(map[string]AccountMixerNotificationListener), } + // syncData: &dcr.SyncData{ + // SyncProgressListeners: make(map[string]dcr.SyncProgressListener), + // }, + mw.Politeia, err = newPoliteia(mw, politeiaHost) if err != nil { return nil, err @@ -112,7 +89,7 @@ func NewMultiWallet(rootDir, dbDriver, netType, politeiaHost string) (*MultiWall // read saved wallets info from db and initialize wallets query := mw.db.Select(q.True()).OrderBy("ID") - var wallets []*Wallet + var wallets []*dcr.Wallet err = query.Find(&wallets) if err != nil && err != storm.ErrNotFound { return nil, err @@ -120,8 +97,8 @@ func NewMultiWallet(rootDir, dbDriver, netType, politeiaHost string) (*MultiWall // prepare the wallets loaded from db for use for _, wallet := range wallets { - err = wallet.prepare(rootDir, chainParams, mw.walletConfigSetFn(wallet.ID), mw.walletConfigReadFn(wallet.ID)) - if err == nil && !WalletExistsAt(wallet.dataDir) { + err = wallet.Prepare(rootDir, chainParams, mw.walletConfigSetFn(wallet.ID), mw.walletConfigReadFn(wallet.ID)) + if err == nil && !WalletExistsAt(wallet.DataDir) { err = fmt.Errorf("missing wallet database file") } if err != nil { @@ -153,7 +130,9 @@ func (mw *MultiWallet) Shutdown() { mw.shuttingDown <- true mw.CancelRescan() - mw.CancelSync() + for _, wallet := range mw.wallets { + wallet.CancelSync(mw.wallets) + } for _, wallet := range mw.wallets { wallet.Shutdown() @@ -266,9 +245,9 @@ func (mw *MultiWallet) StartupSecurityType() int32 { } func (mw *MultiWallet) OpenWallets(startupPassphrase []byte) error { - if mw.IsSyncing() { - return errors.New(ErrSyncAlreadyInProgress) - } + // if mw.IsSyncing() { + // return errors.New(ErrSyncAlreadyInProgress) + // } err := mw.VerifyStartupPassphrase(startupPassphrase) if err != nil { @@ -276,7 +255,7 @@ func (mw *MultiWallet) OpenWallets(startupPassphrase []byte) error { } for _, wallet := range mw.wallets { - err = wallet.openWallet() + err = wallet.OpenWallet() if err != nil { return err } @@ -299,24 +278,24 @@ func (mw *MultiWallet) AllWalletsAreWatchOnly() (bool, error) { return true, nil } -func (mw *MultiWallet) CreateWatchOnlyWallet(walletName, extendedPublicKey string) (*Wallet, error) { - wallet := &Wallet{ +func (mw *MultiWallet) CreateWatchOnlyWallet(walletName, extendedPublicKey string) (*dcr.Wallet, error) { + wallet := &dcr.Wallet{ Name: walletName, IsRestored: true, HasDiscoveredAccounts: true, } return mw.saveNewWallet(wallet, func() error { - err := wallet.prepare(mw.rootDir, mw.chainParams, mw.walletConfigSetFn(wallet.ID), mw.walletConfigReadFn(wallet.ID)) + err := wallet.Prepare(mw.rootDir, mw.chainParams, mw.walletConfigSetFn(wallet.ID), mw.walletConfigReadFn(wallet.ID)) if err != nil { return err } - return wallet.createWatchingOnlyWallet(extendedPublicKey) + return wallet.CreateWatchingOnlyWallet(extendedPublicKey) }) } -func (mw *MultiWallet) CreateNewWallet(walletName, privatePassphrase string, privatePassphraseType int32) (*Wallet, error) { +func (mw *MultiWallet) CreateNewWallet(walletName, privatePassphrase string, privatePassphraseType int32) (*dcr.Wallet, error) { seed, err := GenerateSeed() if err != nil { return nil, err @@ -326,7 +305,7 @@ func (mw *MultiWallet) CreateNewWallet(walletName, privatePassphrase string, pri if err != nil { return nil, err } - wallet := &Wallet{ + wallet := &dcr.Wallet{ Name: walletName, CreatedAt: time.Now(), EncryptedSeed: encryptedSeed, @@ -335,18 +314,18 @@ func (mw *MultiWallet) CreateNewWallet(walletName, privatePassphrase string, pri } return mw.saveNewWallet(wallet, func() error { - err := wallet.prepare(mw.rootDir, mw.chainParams, mw.walletConfigSetFn(wallet.ID), mw.walletConfigReadFn(wallet.ID)) + err := wallet.Prepare(mw.rootDir, mw.chainParams, mw.walletConfigSetFn(wallet.ID), mw.walletConfigReadFn(wallet.ID)) if err != nil { return err } - return wallet.createWallet(privatePassphrase, seed) + return wallet.CreateWallet(privatePassphrase, seed) }) } -func (mw *MultiWallet) RestoreWallet(walletName, seedMnemonic, privatePassphrase string, privatePassphraseType int32) (*Wallet, error) { +func (mw *MultiWallet) RestoreWallet(walletName, seedMnemonic, privatePassphrase string, privatePassphraseType int32) (*dcr.Wallet, error) { - wallet := &Wallet{ + wallet := &dcr.Wallet{ Name: walletName, PrivatePassphraseType: privatePassphraseType, IsRestored: true, @@ -354,16 +333,16 @@ func (mw *MultiWallet) RestoreWallet(walletName, seedMnemonic, privatePassphrase } return mw.saveNewWallet(wallet, func() error { - err := wallet.prepare(mw.rootDir, mw.chainParams, mw.walletConfigSetFn(wallet.ID), mw.walletConfigReadFn(wallet.ID)) + err := wallet.Prepare(mw.rootDir, mw.chainParams, mw.walletConfigSetFn(wallet.ID), mw.walletConfigReadFn(wallet.ID)) if err != nil { return err } - return wallet.createWallet(privatePassphrase, seedMnemonic) + return wallet.CreateWallet(privatePassphrase, seedMnemonic) }) } -func (mw *MultiWallet) LinkExistingWallet(walletName, walletDataDir, originalPubPass string, privatePassphraseType int32) (*Wallet, error) { +func (mw *MultiWallet) LinkExistingWallet(walletName, walletDataDir, originalPubPass string, privatePassphraseType int32) (*dcr.Wallet, error) { // check if `walletDataDir` contains wallet.db if !WalletExistsAt(walletDataDir) { return nil, errors.New(ErrNotExist) @@ -376,7 +355,7 @@ func (mw *MultiWallet) LinkExistingWallet(walletName, walletDataDir, originalPub return nil, err } - wallet := &Wallet{ + wallet := &dcr.Wallet{ Name: walletName, PrivatePassphraseType: privatePassphraseType, IsRestored: true, @@ -386,36 +365,36 @@ func (mw *MultiWallet) LinkExistingWallet(walletName, walletDataDir, originalPub return mw.saveNewWallet(wallet, func() error { // move wallet.db and tx.db files to newly created dir for the wallet currentWalletDbFilePath := filepath.Join(walletDataDir, walletDbName) - newWalletDbFilePath := filepath.Join(wallet.dataDir, walletDbName) + newWalletDbFilePath := filepath.Join(wallet.DataDir, walletDbName) if err := moveFile(currentWalletDbFilePath, newWalletDbFilePath); err != nil { return err } currentTxDbFilePath := filepath.Join(walletDataDir, walletdata.OldDbName) - newTxDbFilePath := filepath.Join(wallet.dataDir, walletdata.DbName) + newTxDbFilePath := filepath.Join(wallet.DataDir, walletdata.DbName) if err := moveFile(currentTxDbFilePath, newTxDbFilePath); err != nil { return err } // prepare the wallet for use and open it err := (func() error { - err := wallet.prepare(mw.rootDir, mw.chainParams, mw.walletConfigSetFn(wallet.ID), mw.walletConfigReadFn(wallet.ID)) + err := wallet.Prepare(mw.rootDir, mw.chainParams, mw.walletConfigSetFn(wallet.ID), mw.walletConfigReadFn(wallet.ID)) if err != nil { return err } if originalPubPass == "" || originalPubPass == w.InsecurePubPassphrase { - return wallet.openWallet() + return wallet.OpenWallet() } - err = mw.loadWalletTemporarily(ctx, wallet.dataDir, originalPubPass, func(tempWallet *w.Wallet) error { + err = mw.loadWalletTemporarily(ctx, wallet.DataDir, originalPubPass, func(tempWallet *w.Wallet) error { return tempWallet.ChangePublicPassphrase(ctx, []byte(originalPubPass), []byte(w.InsecurePubPassphrase)) }) if err != nil { return err } - return wallet.openWallet() + return wallet.OpenWallet() })() // restore db files to their original location if there was an error @@ -441,7 +420,7 @@ func (mw *MultiWallet) LinkExistingWallet(walletName, walletDataDir, originalPub // // IFF all the above operations succeed, the wallet info will be persisted to db // and the wallet will be added to `mw.wallets`. -func (mw *MultiWallet) saveNewWallet(wallet *Wallet, setupWallet func() error) (*Wallet, error) { +func (mw *MultiWallet) saveNewWallet(wallet *dcr.Wallet, setupWallet func() error) (*dcr.Wallet, error) { exists, err := mw.WalletNameExists(wallet.Name) if err != nil { return nil, err @@ -449,10 +428,10 @@ func (mw *MultiWallet) saveNewWallet(wallet *Wallet, setupWallet func() error) ( return nil, errors.New(ErrExist) } - if mw.IsConnectedToDecredNetwork() { - mw.CancelSync() - defer mw.SpvSync() - } + // if mw.IsConnectedToDecredNetwork() { + // mw.CancelSync() + // defer mw.SpvSync() + // } // Perform database save operations in batch transaction // for automatic rollback if error occurs at any point. err = mw.batchDbTransaction(func(db storm.Node) error { @@ -481,7 +460,7 @@ func (mw *MultiWallet) saveNewWallet(wallet *Wallet, setupWallet func() error) ( if wallet.Name == "" { wallet.Name = "wallet-" + strconv.Itoa(wallet.ID) // wallet-# } - wallet.dataDir = walletDataDir + wallet.DataDir = walletDataDir wallet.DbDriver = mw.dbDriver err = db.Save(wallet) // update database with complete wallet information @@ -528,16 +507,16 @@ func (mw *MultiWallet) DeleteWallet(walletID int, privPass []byte) error { return errors.New(ErrNotExist) } - if mw.IsConnectedToDecredNetwork() { - mw.CancelSync() - defer func() { - if mw.OpenedWalletsCount() > 0 { - mw.SpvSync() - } - }() - } + // if mw.IsConnectedToDecredNetwork() { + // mw.CancelSync() + // defer func() { + // if mw.OpenedWalletsCount() > 0 { + // mw.SpvSync() + // } + // }() + // } - err := wallet.deleteWallet(privPass) + err := wallet.DeleteWallet(privPass) if err != nil { return translateError(err) } @@ -552,7 +531,7 @@ func (mw *MultiWallet) DeleteWallet(walletID int, privPass []byte) error { return nil } -func (mw *MultiWallet) BadWallets() map[int]*Wallet { +func (mw *MultiWallet) BadWallets() map[int]*dcr.Wallet { return mw.badWallets } @@ -569,13 +548,13 @@ func (mw *MultiWallet) DeleteBadWallet(walletID int) error { return translateError(err) } - os.RemoveAll(wallet.dataDir) + os.RemoveAll(wallet.DataDir) delete(mw.badWallets, walletID) return nil } -func (mw *MultiWallet) WalletWithID(walletID int) *Wallet { +func (mw *MultiWallet) WalletWithID(walletID int) *dcr.Wallet { if wallet, ok := mw.wallets[walletID]; ok { return wallet } @@ -640,7 +619,7 @@ func (mw *MultiWallet) OpenedWalletsCount() int32 { func (mw *MultiWallet) SyncedWalletsCount() int32 { var syncedWallets int32 for _, wallet := range mw.wallets { - if wallet.WalletOpened() && wallet.synced { + if wallet.WalletOpened() && wallet.Synced { syncedWallets++ } } @@ -653,7 +632,7 @@ func (mw *MultiWallet) WalletNameExists(walletName string) (bool, error) { return false, errors.E(ErrReservedWalletName) } - err := mw.db.One("Name", walletName, &Wallet{}) + err := mw.db.One("Name", walletName, &dcr.Wallet{}) if err == nil { return true, nil } else if err != storm.ErrNotFound { @@ -696,7 +675,7 @@ func (mw *MultiWallet) ChangePrivatePassphraseForWallet(walletID int, oldPrivate } } - err := wallet.changePrivatePassphrase(oldPrivatePassphrase, newPrivatePassphrase) + err := wallet.ChangePrivatePassphrase(oldPrivatePassphrase, newPrivatePassphrase) if err != nil { return translateError(err) } @@ -707,7 +686,7 @@ func (mw *MultiWallet) ChangePrivatePassphraseForWallet(walletID int, oldPrivate if err != nil { log.Errorf("error saving wallet-[%d] to database after passphrase change: %v", wallet.ID, err) - err2 := wallet.changePrivatePassphrase(newPrivatePassphrase, oldPrivatePassphrase) + err2 := wallet.ChangePrivatePassphrase(newPrivatePassphrase, oldPrivatePassphrase) if err2 != nil { log.Errorf("error undoing wallet passphrase change: %v", err2) log.Errorf("error wallet passphrase was changed but passphrase type and newly encrypted seed could not be saved: %v", err) diff --git a/multiwallet_utils.go b/multiwallet_utils.go index 9208c2be4..788f281e0 100644 --- a/multiwallet_utils.go +++ b/multiwallet_utils.go @@ -16,6 +16,9 @@ import ( "github.com/kevinburke/nacl" "github.com/kevinburke/nacl/secretbox" "golang.org/x/crypto/scrypt" + + "github.com/planetdecred/dcrlibwallet/wallets/dcr" + ) const ( @@ -143,7 +146,7 @@ func (mw *MultiWallet) WalletWithXPub(xpub string) (int, error) { return -1, err } for _, account := range accounts.Accounts { - if account.AccountNumber == ImportedAccountNumber { + if account.AccountNumber == dcr.ImportedAccountNumber { continue } acctXPub, err := w.Internal().AccountXpub(ctx, account.AccountNumber) @@ -166,7 +169,7 @@ func (mw *MultiWallet) WalletWithSeed(seedMnemonic string) (int, error) { return -1, errors.New(ErrEmptySeed) } - newSeedLegacyXPUb, newSeedSLIP0044XPUb, err := deriveBIP44AccountXPubs(seedMnemonic, DefaultAccountNum, mw.chainParams) + newSeedLegacyXPUb, newSeedSLIP0044XPUb, err := deriveBIP44AccountXPubs(seedMnemonic, dcr.DefaultAccountNum, mw.chainParams) if err != nil { return -1, err } @@ -180,7 +183,7 @@ func (mw *MultiWallet) WalletWithSeed(seedMnemonic string) (int, error) { // incorrect result from the check below. But this would return true // if the watch-only wallet was created using the xpub of the default // account of the provided seed. - usesSameSeed, err := wallet.AccountXPubMatches(DefaultAccountNum, newSeedLegacyXPUb, newSeedSLIP0044XPUb) + usesSameSeed, err := wallet.AccountXPubMatches(dcr.DefaultAccountNum, newSeedLegacyXPUb, newSeedSLIP0044XPUb) if err != nil { return -1, err } diff --git a/politeia_sync.go b/politeia_sync.go index 8832111c1..eeefd20f8 100644 --- a/politeia_sync.go +++ b/politeia_sync.go @@ -12,6 +12,9 @@ import ( "github.com/asdine/storm" tkv1 "github.com/decred/politeia/politeiawww/api/ticketvote/v1" www "github.com/decred/politeia/politeiawww/api/www/v1" + + "github.com/planetdecred/dcrlibwallet/wallets/dcr" + ) const ( @@ -411,7 +414,7 @@ func (p *Politeia) ProposalVoteDetailsRaw(walletID int, token string) (*Proposal return nil, err } - ticketHashes, addresses, err := wal.Internal().CommittedTickets(wal.shutdownContext(), hashes) + ticketHashes, addresses, err := wal.Internal().CommittedTickets(wal.ShutdownContext(), hashes) if err != nil { return nil, err } @@ -437,7 +440,7 @@ func (p *Politeia) ProposalVoteDetailsRaw(walletID int, token string) (*Proposal } // filter out tickets controlled by imported accounts - if ainfo.AccountNumber == ImportedAccountNumber { + if ainfo.AccountNumber == dcr.ImportedAccountNumber { continue } @@ -522,7 +525,7 @@ func (p *Politeia) CastVotes(walletID int, eligibleTickets []*ProposalVote, toke msg := token + ticket.Hash + voteBitHex - signature, err := wal.signMessage(ticket.Address, msg) + signature, err := wal.SignMessageDirect(ticket.Address, msg) if err != nil { return err } diff --git a/rescan.go b/rescan.go index d6babc19f..8989d7127 100644 --- a/rescan.go +++ b/rescan.go @@ -25,24 +25,25 @@ func (mw *MultiWallet) RescanBlocksFromHeight(walletID int, startHeight int32) e return errors.E(ErrNotConnected) } - if mw.IsRescanning() || !mw.IsSynced() { - return errors.E(ErrInvalid) - } + // if mw.IsRescanning() || !mw.IsSynced() { + // return errors.E(ErrInvalid) + // } go func() { defer func() { - mw.syncData.mu.Lock() - mw.syncData.rescanning = false - mw.syncData.cancelRescan = nil - mw.syncData.mu.Unlock() + // mw.syncData.mu.Lock() + // mw.syncData.rescanning = false + // mw.syncData.cancelRescan = nil + // mw.syncData.mu.Unlock() }() - ctx, cancel := wallet.shutdownContextWithCancel() + ctx, _ := wallet.ShutdownContextWithCancel() + // ctx, cancel := wallet.ShutdownContextWithCancel() //undo this lateer - mw.syncData.mu.Lock() - mw.syncData.rescanning = true - mw.syncData.cancelRescan = cancel - mw.syncData.mu.Unlock() + // mw.syncData.mu.Lock() + // mw.syncData.rescanning = true + // mw.syncData.cancelRescan = cancel + // mw.syncData.mu.Unlock() if mw.blocksRescanProgressListener != nil { mw.blocksRescanProgressListener.OnBlocksRescanStarted(walletID) @@ -104,9 +105,9 @@ func (mw *MultiWallet) RescanBlocksFromHeight(walletID int, startHeight int32) e var err error if startHeight == 0 { - err = wallet.reindexTransactions() + err = wallet.ReindexTransactions() } else { - err = wallet.walletDataDB.SaveLastIndexPoint(startHeight) + err = wallet.WalletDataDB.SaveLastIndexPoint(startHeight) if err != nil { if mw.blocksRescanProgressListener != nil { mw.blocksRescanProgressListener.OnBlocksRescanEnded(walletID, err) @@ -125,20 +126,22 @@ func (mw *MultiWallet) RescanBlocksFromHeight(walletID int, startHeight int32) e } func (mw *MultiWallet) CancelRescan() { - mw.syncData.mu.Lock() - defer mw.syncData.mu.Unlock() - if mw.syncData.cancelRescan != nil { - mw.syncData.cancelRescan() - mw.syncData.cancelRescan = nil - - log.Info("Rescan canceled.") - } + // mw.syncData.mu.Lock() + // defer mw.syncData.mu.Unlock() + // if mw.syncData.cancelRescan != nil { + // mw.syncData.cancelRescan() + // mw.syncData.cancelRescan = nil + + // log.Info("Rescan canceled.") + // } } func (mw *MultiWallet) IsRescanning() bool { - mw.syncData.mu.RLock() - defer mw.syncData.mu.RUnlock() - return mw.syncData.rescanning + // mw.syncData.mu.RLock() + // defer mw.syncData.mu.RUnlock() + // return mw.syncData.rescanning + + return true } func (mw *MultiWallet) SetBlocksRescanProgressListener(blocksRescanProgressListener BlocksRescanProgressListener) { diff --git a/sync.go b/sync.go deleted file mode 100644 index 18cfcd09a..000000000 --- a/sync.go +++ /dev/null @@ -1,493 +0,0 @@ -package dcrlibwallet - -import ( - "context" - "encoding/json" - "fmt" - "net" - "sort" - "strings" - "sync" - - "decred.org/dcrwallet/v2/errors" - "decred.org/dcrwallet/v2/p2p" - w "decred.org/dcrwallet/v2/wallet" - "github.com/decred/dcrd/addrmgr/v2" - "github.com/planetdecred/dcrlibwallet/spv" -) - -// reading/writing of properties of this struct are protected by mutex.x -type syncData struct { - mu sync.RWMutex - - syncProgressListeners map[string]SyncProgressListener - showLogs bool - - synced bool - syncing bool - cancelSync context.CancelFunc - cancelRescan context.CancelFunc - syncCanceled chan struct{} - - // Flag to notify syncCanceled callback if the sync was canceled so as to be restarted. - restartSyncRequested bool - - rescanning bool - connectedPeers int32 - - *activeSyncData -} - -// reading/writing of properties of this struct are protected by syncData.mu. -type activeSyncData struct { - syncer *spv.Syncer - - syncStage int32 - - cfiltersFetchProgress CFiltersFetchProgressReport - headersFetchProgress HeadersFetchProgressReport - addressDiscoveryProgress AddressDiscoveryProgressReport - headersRescanProgress HeadersRescanProgressReport - - addressDiscoveryCompletedOrCanceled chan bool - - rescanStartTime int64 - - totalInactiveSeconds int64 -} - -const ( - InvalidSyncStage = -1 - CFiltersFetchSyncStage = 0 - HeadersFetchSyncStage = 1 - AddressDiscoverySyncStage = 2 - HeadersRescanSyncStage = 3 -) - -func (mw *MultiWallet) initActiveSyncData() { - - cfiltersFetchProgress := CFiltersFetchProgressReport{ - GeneralSyncProgress: &GeneralSyncProgress{}, - beginFetchCFiltersTimeStamp: 0, - startCFiltersHeight: -1, - cfiltersFetchTimeSpent: 0, - totalFetchedCFiltersCount: 0, - } - - headersFetchProgress := HeadersFetchProgressReport{ - GeneralSyncProgress: &GeneralSyncProgress{}, - beginFetchTimeStamp: -1, - headersFetchTimeSpent: -1, - totalFetchedHeadersCount: 0, - } - - addressDiscoveryProgress := AddressDiscoveryProgressReport{ - GeneralSyncProgress: &GeneralSyncProgress{}, - addressDiscoveryStartTime: -1, - totalDiscoveryTimeSpent: -1, - } - - headersRescanProgress := HeadersRescanProgressReport{} - headersRescanProgress.GeneralSyncProgress = &GeneralSyncProgress{} - - mw.syncData.mu.Lock() - mw.syncData.activeSyncData = &activeSyncData{ - syncStage: InvalidSyncStage, - - cfiltersFetchProgress: cfiltersFetchProgress, - headersFetchProgress: headersFetchProgress, - addressDiscoveryProgress: addressDiscoveryProgress, - headersRescanProgress: headersRescanProgress, - } - mw.syncData.mu.Unlock() -} - -func (mw *MultiWallet) IsSyncProgressListenerRegisteredFor(uniqueIdentifier string) bool { - mw.syncData.mu.RLock() - _, exists := mw.syncData.syncProgressListeners[uniqueIdentifier] - mw.syncData.mu.RUnlock() - return exists -} - -func (mw *MultiWallet) AddSyncProgressListener(syncProgressListener SyncProgressListener, uniqueIdentifier string) error { - if mw.IsSyncProgressListenerRegisteredFor(uniqueIdentifier) { - return errors.New(ErrListenerAlreadyExist) - } - - mw.syncData.mu.Lock() - mw.syncData.syncProgressListeners[uniqueIdentifier] = syncProgressListener - mw.syncData.mu.Unlock() - - // If sync is already on, notify this newly added listener of the current progress report. - return mw.PublishLastSyncProgress(uniqueIdentifier) -} - -func (mw *MultiWallet) RemoveSyncProgressListener(uniqueIdentifier string) { - mw.syncData.mu.Lock() - delete(mw.syncData.syncProgressListeners, uniqueIdentifier) - mw.syncData.mu.Unlock() -} - -func (mw *MultiWallet) syncProgressListeners() []SyncProgressListener { - mw.syncData.mu.RLock() - defer mw.syncData.mu.RUnlock() - - listeners := make([]SyncProgressListener, 0, len(mw.syncData.syncProgressListeners)) - for _, listener := range mw.syncData.syncProgressListeners { - listeners = append(listeners, listener) - } - - return listeners -} - -func (mw *MultiWallet) PublishLastSyncProgress(uniqueIdentifier string) error { - mw.syncData.mu.RLock() - defer mw.syncData.mu.RUnlock() - - syncProgressListener, exists := mw.syncData.syncProgressListeners[uniqueIdentifier] - if !exists { - return errors.New(ErrInvalid) - } - - if mw.syncData.syncing && mw.syncData.activeSyncData != nil { - switch mw.syncData.activeSyncData.syncStage { - case HeadersFetchSyncStage: - syncProgressListener.OnHeadersFetchProgress(&mw.syncData.headersFetchProgress) - case AddressDiscoverySyncStage: - syncProgressListener.OnAddressDiscoveryProgress(&mw.syncData.addressDiscoveryProgress) - case HeadersRescanSyncStage: - syncProgressListener.OnHeadersRescanProgress(&mw.syncData.headersRescanProgress) - } - } - - return nil -} - -func (mw *MultiWallet) EnableSyncLogs() { - mw.syncData.mu.Lock() - mw.syncData.showLogs = true - mw.syncData.mu.Unlock() -} - -func (mw *MultiWallet) SyncInactiveForPeriod(totalInactiveSeconds int64) { - mw.syncData.mu.Lock() - defer mw.syncData.mu.Unlock() - - if !mw.syncData.syncing || mw.syncData.activeSyncData == nil { - log.Debug("Not accounting for inactive time, wallet is not syncing.") - return - } - - mw.syncData.totalInactiveSeconds += totalInactiveSeconds - if mw.syncData.connectedPeers == 0 { - // assume it would take another 60 seconds to reconnect to peers - mw.syncData.totalInactiveSeconds += 60 - } -} - -func (mw *MultiWallet) SpvSync() error { - // prevent an attempt to sync when the previous syncing has not been canceled - if mw.IsSyncing() || mw.IsSynced() { - return errors.New(ErrSyncAlreadyInProgress) - } - - addr := &net.TCPAddr{IP: net.ParseIP("::1"), Port: 0} - addrManager := addrmgr.New(mw.rootDir, net.LookupIP) // TODO: be mindful of tor - lp := p2p.NewLocalPeer(mw.chainParams, addr, addrManager) - - var validPeerAddresses []string - peerAddresses := mw.ReadStringConfigValueForKey(SpvPersistentPeerAddressesConfigKey) - if peerAddresses != "" { - addresses := strings.Split(peerAddresses, ";") - for _, address := range addresses { - peerAddress, err := NormalizeAddress(address, mw.chainParams.DefaultPort) - if err != nil { - log.Errorf("SPV peer address(%s) is invalid: %v", peerAddress, err) - } else { - validPeerAddresses = append(validPeerAddresses, peerAddress) - } - } - - if len(validPeerAddresses) == 0 { - return errors.New(ErrInvalidPeers) - } - } - - // init activeSyncData to be used to hold data used - // to calculate sync estimates only during sync - mw.initActiveSyncData() - - wallets := make(map[int]*w.Wallet) - for id, wallet := range mw.wallets { - wallets[id] = wallet.Internal() - wallet.waitingForHeaders = true - wallet.syncing = true - } - - syncer := spv.NewSyncer(wallets, lp) - syncer.SetNotifications(mw.spvSyncNotificationCallbacks()) - if len(validPeerAddresses) > 0 { - syncer.SetPersistentPeers(validPeerAddresses) - } - - ctx, cancel := mw.contextWithShutdownCancel() - - var restartSyncRequested bool - - mw.syncData.mu.Lock() - restartSyncRequested = mw.syncData.restartSyncRequested - mw.syncData.restartSyncRequested = false - mw.syncData.syncing = true - mw.syncData.cancelSync = cancel - mw.syncData.syncCanceled = make(chan struct{}) - mw.syncData.syncer = syncer - mw.syncData.mu.Unlock() - - for _, listener := range mw.syncProgressListeners() { - listener.OnSyncStarted(restartSyncRequested) - } - - // syncer.Run uses a wait group to block the thread until the sync context - // expires or is canceled or some other error occurs such as - // losing connection to all persistent peers. - go func() { - syncError := syncer.Run(ctx) - //sync has ended or errored - if syncError != nil { - if syncError == context.DeadlineExceeded { - mw.notifySyncError(errors.Errorf("SPV synchronization deadline exceeded: %v", syncError)) - } else if syncError == context.Canceled { - close(mw.syncData.syncCanceled) - mw.notifySyncCanceled() - } else { - mw.notifySyncError(syncError) - } - } - - //reset sync variables - mw.resetSyncData() - }() - return nil -} - -func (mw *MultiWallet) RestartSpvSync() error { - mw.syncData.mu.Lock() - mw.syncData.restartSyncRequested = true - mw.syncData.mu.Unlock() - - mw.CancelSync() // necessary to unset the network backend. - return mw.SpvSync() -} - -func (mw *MultiWallet) CancelSync() { - mw.syncData.mu.RLock() - cancelSync := mw.syncData.cancelSync - mw.syncData.mu.RUnlock() - - if cancelSync != nil { - log.Info("Canceling sync. May take a while for sync to fully cancel.") - - // Stop running cspp mixers - for _, wallet := range mw.wallets { - if wallet.IsAccountMixerActive() { - log.Infof("[%d] Stopping cspp mixer", wallet.ID) - err := mw.StopAccountMixer(wallet.ID) - if err != nil { - log.Errorf("[%d] Error stopping cspp mixer: %v", wallet.ID, err) - } - } - } - - // Cancel the context used for syncer.Run in spvSync(). - // This may not immediately cause the sync process to terminate, - // but when it eventually terminates, syncer.Run will return `err == context.Canceled`. - cancelSync() - - // When sync terminates and syncer.Run returns `err == context.Canceled`, - // we will get notified on this channel. - <-mw.syncData.syncCanceled - - log.Info("Sync fully canceled.") - } -} - -func (wallet *Wallet) IsWaiting() bool { - return wallet.waitingForHeaders -} - -func (wallet *Wallet) IsSynced() bool { - return wallet.synced -} - -func (wallet *Wallet) IsSyncing() bool { - return wallet.syncing -} - -func (mw *MultiWallet) IsConnectedToDecredNetwork() bool { - mw.syncData.mu.RLock() - defer mw.syncData.mu.RUnlock() - return mw.syncData.syncing || mw.syncData.synced -} - -func (mw *MultiWallet) IsSynced() bool { - mw.syncData.mu.RLock() - defer mw.syncData.mu.RUnlock() - return mw.syncData.synced -} - -func (mw *MultiWallet) IsSyncing() bool { - mw.syncData.mu.RLock() - defer mw.syncData.mu.RUnlock() - return mw.syncData.syncing -} - -func (mw *MultiWallet) CurrentSyncStage() int32 { - mw.syncData.mu.RLock() - defer mw.syncData.mu.RUnlock() - - if mw.syncData != nil && mw.syncData.syncing { - return mw.syncData.syncStage - } - return InvalidSyncStage -} - -func (mw *MultiWallet) GeneralSyncProgress() *GeneralSyncProgress { - mw.syncData.mu.RLock() - defer mw.syncData.mu.RUnlock() - - if mw.syncData != nil && mw.syncData.syncing { - switch mw.syncData.syncStage { - case HeadersFetchSyncStage: - return mw.syncData.headersFetchProgress.GeneralSyncProgress - case AddressDiscoverySyncStage: - return mw.syncData.addressDiscoveryProgress.GeneralSyncProgress - case HeadersRescanSyncStage: - return mw.syncData.headersRescanProgress.GeneralSyncProgress - case CFiltersFetchSyncStage: - return mw.syncData.cfiltersFetchProgress.GeneralSyncProgress - } - } - - return nil -} - -func (mw *MultiWallet) ConnectedPeers() int32 { - mw.syncData.mu.RLock() - defer mw.syncData.mu.RUnlock() - return mw.syncData.connectedPeers -} - -func (mw *MultiWallet) PeerInfoRaw() ([]PeerInfo, error) { - if !mw.IsConnectedToDecredNetwork() { - return nil, errors.New(ErrNotConnected) - } - - syncer := mw.syncData.syncer - - infos := make([]PeerInfo, 0, len(syncer.GetRemotePeers())) - for _, rp := range syncer.GetRemotePeers() { - info := PeerInfo{ - ID: int32(rp.ID()), - Addr: rp.RemoteAddr().String(), - AddrLocal: rp.LocalAddr().String(), - Services: fmt.Sprintf("%08d", uint64(rp.Services())), - Version: rp.Pver(), - SubVer: rp.UA(), - StartingHeight: int64(rp.InitialHeight()), - BanScore: int32(rp.BanScore()), - } - - infos = append(infos, info) - } - - sort.Slice(infos, func(i, j int) bool { - return infos[i].ID < infos[j].ID - }) - - return infos, nil -} - -func (mw *MultiWallet) PeerInfo() (string, error) { - infos, err := mw.PeerInfoRaw() - if err != nil { - return "", err - } - - result, _ := json.Marshal(infos) - return string(result), nil -} - -func (mw *MultiWallet) GetBestBlock() *BlockInfo { - var bestBlock int32 = -1 - var blockInfo *BlockInfo - for _, wallet := range mw.wallets { - if !wallet.WalletOpened() { - continue - } - - walletBestBLock := wallet.GetBestBlock() - if walletBestBLock > bestBlock || bestBlock == -1 { - bestBlock = walletBestBLock - blockInfo = &BlockInfo{Height: bestBlock, Timestamp: wallet.GetBestBlockTimeStamp()} - } - } - - return blockInfo -} - -func (mw *MultiWallet) GetLowestBlock() *BlockInfo { - var lowestBlock int32 = -1 - var blockInfo *BlockInfo - for _, wallet := range mw.wallets { - if !wallet.WalletOpened() { - continue - } - walletBestBLock := wallet.GetBestBlock() - if walletBestBLock < lowestBlock || lowestBlock == -1 { - lowestBlock = walletBestBLock - blockInfo = &BlockInfo{Height: lowestBlock, Timestamp: wallet.GetBestBlockTimeStamp()} - } - } - - return blockInfo -} - -func (wallet *Wallet) GetBestBlock() int32 { - if wallet.Internal() == nil { - // This method is sometimes called after a wallet is deleted and causes crash. - log.Error("Attempting to read best block height without a loaded wallet.") - return 0 - } - - _, height := wallet.Internal().MainChainTip(wallet.shutdownContext()) - return height -} - -func (wallet *Wallet) GetBestBlockTimeStamp() int64 { - if wallet.Internal() == nil { - // This method is sometimes called after a wallet is deleted and causes crash. - log.Error("Attempting to read best block timestamp without a loaded wallet.") - return 0 - } - - ctx := wallet.shutdownContext() - _, height := wallet.Internal().MainChainTip(ctx) - identifier := w.NewBlockIdentifierFromHeight(height) - info, err := wallet.Internal().BlockInfo(ctx, identifier) - if err != nil { - log.Error(err) - return 0 - } - return info.Timestamp -} - -func (mw *MultiWallet) GetLowestBlockTimestamp() int64 { - var timestamp int64 = -1 - for _, wallet := range mw.wallets { - bestBlockTimestamp := wallet.GetBestBlockTimeStamp() - if bestBlockTimestamp < timestamp || timestamp == -1 { - timestamp = bestBlockTimestamp - } - } - return timestamp -} diff --git a/syncnotification.go b/syncnotification.go index 4e4444376..65edd71ba 100644 --- a/syncnotification.go +++ b/syncnotification.go @@ -5,7 +5,7 @@ import ( "time" "github.com/planetdecred/dcrlibwallet/spv" - "golang.org/x/sync/errgroup" + // "golang.org/x/sync/errgroup" ) func (mw *MultiWallet) spvSyncNotificationCallbacks() *spv.Notifications { @@ -32,550 +32,550 @@ func (mw *MultiWallet) spvSyncNotificationCallbacks() *spv.Notifications { } func (mw *MultiWallet) handlePeerCountUpdate(peerCount int32) { - mw.syncData.mu.Lock() - mw.syncData.connectedPeers = peerCount - shouldLog := mw.syncData.showLogs && mw.syncData.syncing - mw.syncData.mu.Unlock() - - for _, syncProgressListener := range mw.syncProgressListeners() { - syncProgressListener.OnPeerConnectedOrDisconnected(peerCount) - } - - if shouldLog { - if peerCount == 1 { - log.Infof("Connected to %d peer on %s.", peerCount, mw.chainParams.Name) - } else { - log.Infof("Connected to %d peers on %s.", peerCount, mw.chainParams.Name) - } - } + // mw.syncData.mu.Lock() + // mw.syncData.connectedPeers = peerCount + // shouldLog := mw.syncData.showLogs && mw.syncData.syncing + // mw.syncData.mu.Unlock() + + // for _, syncProgressListener := range mw.syncProgressListeners() { + // syncProgressListener.OnPeerConnectedOrDisconnected(peerCount) + // } + + // if shouldLog { + // if peerCount == 1 { + // log.Infof("Connected to %d peer on %s.", peerCount, mw.chainParams.Name) + // } else { + // log.Infof("Connected to %d peers on %s.", peerCount, mw.chainParams.Name) + // } + // } } // Fetch CFilters Callbacks func (mw *MultiWallet) fetchCFiltersStarted(walletID int) { - mw.syncData.mu.Lock() - mw.syncData.activeSyncData.syncStage = CFiltersFetchSyncStage - mw.syncData.activeSyncData.cfiltersFetchProgress.beginFetchCFiltersTimeStamp = time.Now().Unix() - mw.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount = 0 - showLogs := mw.syncData.showLogs - mw.syncData.mu.Unlock() - - if showLogs { - log.Infof("Step 1 of 3 - fetching %d block headers.") - } + // mw.syncData.mu.Lock() + // mw.syncData.activeSyncData.syncStage = CFiltersFetchSyncStage + // mw.syncData.activeSyncData.cfiltersFetchProgress.beginFetchCFiltersTimeStamp = time.Now().Unix() + // mw.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount = 0 + // showLogs := mw.syncData.showLogs + // mw.syncData.mu.Unlock() + + // if showLogs { + // log.Infof("Step 1 of 3 - fetching %d block headers.") + // } } func (mw *MultiWallet) fetchCFiltersProgress(walletID int, startCFiltersHeight, endCFiltersHeight int32) { // lock the mutex before reading and writing to mw.syncData.* - mw.syncData.mu.Lock() - - if mw.syncData.activeSyncData.cfiltersFetchProgress.startCFiltersHeight == -1 { - mw.syncData.activeSyncData.cfiltersFetchProgress.startCFiltersHeight = startCFiltersHeight - } - - wallet := mw.WalletWithID(walletID) - mw.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount += endCFiltersHeight - startCFiltersHeight - - totalCFiltersToFetch := wallet.GetBestBlock() - mw.syncData.activeSyncData.cfiltersFetchProgress.startCFiltersHeight - // cfiltersLeftToFetch := totalCFiltersToFetch - mw.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount - - cfiltersFetchProgress := float64(mw.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount) / float64(totalCFiltersToFetch) - - // If there was some period of inactivity, - // assume that this process started at some point in the future, - // thereby accounting for the total reported time of inactivity. - mw.syncData.activeSyncData.cfiltersFetchProgress.beginFetchCFiltersTimeStamp += mw.syncData.activeSyncData.totalInactiveSeconds - mw.syncData.activeSyncData.totalInactiveSeconds = 0 - - timeTakenSoFar := time.Now().Unix() - mw.syncData.activeSyncData.cfiltersFetchProgress.beginFetchCFiltersTimeStamp - if timeTakenSoFar < 1 { - timeTakenSoFar = 1 - } - estimatedTotalCFiltersFetchTime := float64(timeTakenSoFar) / cfiltersFetchProgress - - // Use CFilters fetch rate to estimate headers fetch time. - cfiltersFetchRate := float64(mw.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount) / float64(timeTakenSoFar) - estimatedHeadersLeftToFetch := mw.estimateBlockHeadersCountAfter(wallet.GetBestBlockTimeStamp()) - estimatedTotalHeadersFetchTime := float64(estimatedHeadersLeftToFetch) / cfiltersFetchRate - // increase estimated value by FetchPercentage - estimatedTotalHeadersFetchTime /= FetchPercentage - - estimatedDiscoveryTime := estimatedTotalHeadersFetchTime * DiscoveryPercentage - estimatedRescanTime := estimatedTotalHeadersFetchTime * RescanPercentage - estimatedTotalSyncTime := estimatedTotalCFiltersFetchTime + estimatedTotalHeadersFetchTime + estimatedDiscoveryTime + estimatedRescanTime - - totalSyncProgress := float64(timeTakenSoFar) / estimatedTotalSyncTime - totalTimeRemainingSeconds := int64(math.Round(estimatedTotalSyncTime)) - timeTakenSoFar - - // update headers fetching progress report including total progress percentage and total time remaining - mw.syncData.activeSyncData.cfiltersFetchProgress.TotalCFiltersToFetch = totalCFiltersToFetch - mw.syncData.activeSyncData.cfiltersFetchProgress.CurrentCFilterHeight = startCFiltersHeight - mw.syncData.activeSyncData.cfiltersFetchProgress.CFiltersFetchProgress = roundUp(cfiltersFetchProgress * 100.0) - mw.syncData.activeSyncData.cfiltersFetchProgress.TotalSyncProgress = roundUp(totalSyncProgress * 100.0) - mw.syncData.activeSyncData.cfiltersFetchProgress.TotalTimeRemainingSeconds = totalTimeRemainingSeconds - - mw.syncData.mu.Unlock() - - // notify progress listener of estimated progress report - mw.publishFetchCFiltersProgress() - - cfiltersFetchTimeRemaining := estimatedTotalCFiltersFetchTime - float64(timeTakenSoFar) - debugInfo := &DebugInfo{ - timeTakenSoFar, - totalTimeRemainingSeconds, - timeTakenSoFar, - int64(math.Round(cfiltersFetchTimeRemaining)), - } - mw.publishDebugInfo(debugInfo) + // mw.syncData.mu.Lock() + + // if mw.syncData.activeSyncData.cfiltersFetchProgress.startCFiltersHeight == -1 { + // mw.syncData.activeSyncData.cfiltersFetchProgress.startCFiltersHeight = startCFiltersHeight + // } + + // wallet := mw.WalletWithID(walletID) + // mw.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount += endCFiltersHeight - startCFiltersHeight + + // totalCFiltersToFetch := wallet.GetBestBlock() - mw.syncData.activeSyncData.cfiltersFetchProgress.startCFiltersHeight + // // cfiltersLeftToFetch := totalCFiltersToFetch - mw.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount + + // cfiltersFetchProgress := float64(mw.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount) / float64(totalCFiltersToFetch) + + // // If there was some period of inactivity, + // // assume that this process started at some point in the future, + // // thereby accounting for the total reported time of inactivity. + // mw.syncData.activeSyncData.cfiltersFetchProgress.beginFetchCFiltersTimeStamp += mw.syncData.activeSyncData.totalInactiveSeconds + // mw.syncData.activeSyncData.totalInactiveSeconds = 0 + + // timeTakenSoFar := time.Now().Unix() - mw.syncData.activeSyncData.cfiltersFetchProgress.beginFetchCFiltersTimeStamp + // if timeTakenSoFar < 1 { + // timeTakenSoFar = 1 + // } + // estimatedTotalCFiltersFetchTime := float64(timeTakenSoFar) / cfiltersFetchProgress + + // // Use CFilters fetch rate to estimate headers fetch time. + // cfiltersFetchRate := float64(mw.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount) / float64(timeTakenSoFar) + // estimatedHeadersLeftToFetch := mw.estimateBlockHeadersCountAfter(wallet.GetBestBlockTimeStamp()) + // estimatedTotalHeadersFetchTime := float64(estimatedHeadersLeftToFetch) / cfiltersFetchRate + // // increase estimated value by FetchPercentage + // estimatedTotalHeadersFetchTime /= FetchPercentage + + // estimatedDiscoveryTime := estimatedTotalHeadersFetchTime * DiscoveryPercentage + // estimatedRescanTime := estimatedTotalHeadersFetchTime * RescanPercentage + // estimatedTotalSyncTime := estimatedTotalCFiltersFetchTime + estimatedTotalHeadersFetchTime + estimatedDiscoveryTime + estimatedRescanTime + + // totalSyncProgress := float64(timeTakenSoFar) / estimatedTotalSyncTime + // totalTimeRemainingSeconds := int64(math.Round(estimatedTotalSyncTime)) - timeTakenSoFar + + // // update headers fetching progress report including total progress percentage and total time remaining + // mw.syncData.activeSyncData.cfiltersFetchProgress.TotalCFiltersToFetch = totalCFiltersToFetch + // mw.syncData.activeSyncData.cfiltersFetchProgress.CurrentCFilterHeight = startCFiltersHeight + // mw.syncData.activeSyncData.cfiltersFetchProgress.CFiltersFetchProgress = roundUp(cfiltersFetchProgress * 100.0) + // mw.syncData.activeSyncData.cfiltersFetchProgress.TotalSyncProgress = roundUp(totalSyncProgress * 100.0) + // mw.syncData.activeSyncData.cfiltersFetchProgress.TotalTimeRemainingSeconds = totalTimeRemainingSeconds + + // mw.syncData.mu.Unlock() + + // // notify progress listener of estimated progress report + // mw.publishFetchCFiltersProgress() + + // cfiltersFetchTimeRemaining := estimatedTotalCFiltersFetchTime - float64(timeTakenSoFar) + // debugInfo := &DebugInfo{ + // timeTakenSoFar, + // totalTimeRemainingSeconds, + // timeTakenSoFar, + // int64(math.Round(cfiltersFetchTimeRemaining)), + // } + // mw.publishDebugInfo(debugInfo) } func (mw *MultiWallet) publishFetchCFiltersProgress() { - for _, syncProgressListener := range mw.syncProgressListeners() { - syncProgressListener.OnCFiltersFetchProgress(&mw.syncData.cfiltersFetchProgress) - } + // for _, syncProgressListener := range mw.syncProgressListeners() { + // syncProgressListener.OnCFiltersFetchProgress(&mw.syncData.cfiltersFetchProgress) + // } } func (mw *MultiWallet) fetchCFiltersEnded(walletID int) { - mw.syncData.mu.Lock() - defer mw.syncData.mu.Unlock() + // mw.syncData.mu.Lock() + // defer mw.syncData.mu.Unlock() - mw.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent = time.Now().Unix() - mw.syncData.cfiltersFetchProgress.beginFetchCFiltersTimeStamp + // mw.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent = time.Now().Unix() - mw.syncData.cfiltersFetchProgress.beginFetchCFiltersTimeStamp - // If there is some period of inactivity reported at this stage, - // subtract it from the total stage time. - mw.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent -= mw.syncData.totalInactiveSeconds - mw.syncData.activeSyncData.totalInactiveSeconds = 0 + // // If there is some period of inactivity reported at this stage, + // // subtract it from the total stage time. + // mw.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent -= mw.syncData.totalInactiveSeconds + // mw.syncData.activeSyncData.totalInactiveSeconds = 0 } // Fetch Headers Callbacks func (mw *MultiWallet) fetchHeadersStarted(peerInitialHeight int32) { - if !mw.IsSyncing() { - return - } - - mw.syncData.mu.RLock() - headersFetchingStarted := mw.syncData.headersFetchProgress.beginFetchTimeStamp != -1 - showLogs := mw.syncData.showLogs - mw.syncData.mu.RUnlock() - - if headersFetchingStarted { - // This function gets called for each newly connected peer so - // ignore if headers fetching was already started. - return - } - - for _, wallet := range mw.wallets { - wallet.waitingForHeaders = true - } - - lowestBlockHeight := mw.GetLowestBlock().Height - - mw.syncData.mu.Lock() - mw.syncData.activeSyncData.syncStage = HeadersFetchSyncStage - mw.syncData.activeSyncData.headersFetchProgress.beginFetchTimeStamp = time.Now().Unix() - mw.syncData.activeSyncData.headersFetchProgress.startHeaderHeight = lowestBlockHeight - mw.syncData.headersFetchProgress.totalFetchedHeadersCount = 0 - mw.syncData.activeSyncData.totalInactiveSeconds = 0 - mw.syncData.mu.Unlock() - - if showLogs { - log.Infof("Step 1 of 3 - fetching %d block headers.", peerInitialHeight-lowestBlockHeight) - } + // if !mw.IsSyncing() { + // return + // } + + // mw.syncData.mu.RLock() + // headersFetchingStarted := mw.syncData.headersFetchProgress.beginFetchTimeStamp != -1 + // showLogs := mw.syncData.showLogs + // mw.syncData.mu.RUnlock() + + // if headersFetchingStarted { + // // This function gets called for each newly connected peer so + // // ignore if headers fetching was already started. + // return + // } + + // for _, wallet := range mw.wallets { + // wallet.WaitingForHeaders = true + // } + + // lowestBlockHeight := mw.GetLowestBlock().Height + + // mw.syncData.mu.Lock() + // mw.syncData.activeSyncData.syncStage = HeadersFetchSyncStage + // mw.syncData.activeSyncData.headersFetchProgress.beginFetchTimeStamp = time.Now().Unix() + // mw.syncData.activeSyncData.headersFetchProgress.startHeaderHeight = lowestBlockHeight + // mw.syncData.headersFetchProgress.totalFetchedHeadersCount = 0 + // mw.syncData.activeSyncData.totalInactiveSeconds = 0 + // mw.syncData.mu.Unlock() + + // if showLogs { + // log.Infof("Step 1 of 3 - fetching %d block headers.", peerInitialHeight-lowestBlockHeight) + // } } func (mw *MultiWallet) fetchHeadersProgress(lastFetchedHeaderHeight int32, lastFetchedHeaderTime int64) { - if !mw.IsSyncing() { - return - } - - mw.syncData.mu.RLock() - headersFetchingCompleted := mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent != -1 - mw.syncData.mu.RUnlock() - - if headersFetchingCompleted { - // This function gets called for each newly connected peer so ignore - // this call if the headers fetching phase was previously completed. - return - } - - for _, wallet := range mw.wallets { - if wallet.waitingForHeaders { - wallet.waitingForHeaders = wallet.GetBestBlock() > lastFetchedHeaderHeight - } - } - - // lock the mutex before reading and writing to mw.syncData.* - mw.syncData.mu.Lock() - - if lastFetchedHeaderHeight > mw.syncData.activeSyncData.headersFetchProgress.startHeaderHeight { - mw.syncData.activeSyncData.headersFetchProgress.totalFetchedHeadersCount = lastFetchedHeaderHeight - mw.syncData.activeSyncData.headersFetchProgress.startHeaderHeight - } - - headersLeftToFetch := mw.estimateBlockHeadersCountAfter(lastFetchedHeaderTime) - totalHeadersToFetch := lastFetchedHeaderHeight + headersLeftToFetch - headersFetchProgress := float64(mw.syncData.activeSyncData.headersFetchProgress.totalFetchedHeadersCount) / float64(totalHeadersToFetch) - - // If there was some period of inactivity, - // assume that this process started at some point in the future, - // thereby accounting for the total reported time of inactivity. - mw.syncData.activeSyncData.headersFetchProgress.beginFetchTimeStamp += mw.syncData.activeSyncData.totalInactiveSeconds - mw.syncData.activeSyncData.totalInactiveSeconds = 0 - - fetchTimeTakenSoFar := time.Now().Unix() - mw.syncData.activeSyncData.headersFetchProgress.beginFetchTimeStamp - if fetchTimeTakenSoFar < 1 { - fetchTimeTakenSoFar = 1 - } - estimatedTotalHeadersFetchTime := float64(fetchTimeTakenSoFar) / headersFetchProgress - - // For some reason, the actual total headers fetch time is more than the predicted/estimated time. - // Account for this difference by multiplying the estimatedTotalHeadersFetchTime by an incrementing factor. - // The incrementing factor is inversely proportional to the headers fetch progress, - // ranging from 0.5 to 0 as headers fetching progress increases from 0 to 1. - // todo, the above noted (mal)calculation may explain this difference. - // TODO: is this adjustment still needed since the calculation has been corrected. - adjustmentFactor := 0.5 * (1 - headersFetchProgress) - estimatedTotalHeadersFetchTime += estimatedTotalHeadersFetchTime * adjustmentFactor - - estimatedDiscoveryTime := estimatedTotalHeadersFetchTime * DiscoveryPercentage - estimatedRescanTime := estimatedTotalHeadersFetchTime * RescanPercentage - estimatedTotalSyncTime := float64(mw.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent) + - estimatedTotalHeadersFetchTime + estimatedDiscoveryTime + estimatedRescanTime - - totalSyncProgress := float64(fetchTimeTakenSoFar) / estimatedTotalSyncTime - totalTimeRemainingSeconds := int64(math.Round(estimatedTotalSyncTime)) - fetchTimeTakenSoFar - - // update headers fetching progress report including total progress percentage and total time remaining - mw.syncData.activeSyncData.headersFetchProgress.TotalHeadersToFetch = totalHeadersToFetch - mw.syncData.activeSyncData.headersFetchProgress.CurrentHeaderHeight = lastFetchedHeaderHeight - mw.syncData.activeSyncData.headersFetchProgress.CurrentHeaderTimestamp = lastFetchedHeaderTime - mw.syncData.activeSyncData.headersFetchProgress.HeadersFetchProgress = roundUp(headersFetchProgress * 100.0) - mw.syncData.activeSyncData.headersFetchProgress.TotalSyncProgress = roundUp(totalSyncProgress * 100.0) - mw.syncData.activeSyncData.headersFetchProgress.TotalTimeRemainingSeconds = totalTimeRemainingSeconds - - // unlock the mutex before issuing notification callbacks to prevent potential deadlock - // if any invoked callback takes a considerable amount of time to execute. - mw.syncData.mu.Unlock() - - // notify progress listener of estimated progress report - mw.publishFetchHeadersProgress() - - // todo: also log report if showLog == true - timeTakenSoFar := mw.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent + fetchTimeTakenSoFar - headersFetchTimeRemaining := estimatedTotalHeadersFetchTime - float64(fetchTimeTakenSoFar) - debugInfo := &DebugInfo{ - timeTakenSoFar, - totalTimeRemainingSeconds, - fetchTimeTakenSoFar, - int64(math.Round(headersFetchTimeRemaining)), - } - mw.publishDebugInfo(debugInfo) + // if !mw.IsSyncing() { + // return + // } + + // mw.syncData.mu.RLock() + // headersFetchingCompleted := mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent != -1 + // mw.syncData.mu.RUnlock() + + // if headersFetchingCompleted { + // // This function gets called for each newly connected peer so ignore + // // this call if the headers fetching phase was previously completed. + // return + // } + + // for _, wallet := range mw.wallets { + // if wallet.WaitingForHeaders { + // wallet.WaitingForHeaders = wallet.GetBestBlock() > lastFetchedHeaderHeight + // } + // } + + // // lock the mutex before reading and writing to mw.syncData.* + // mw.syncData.mu.Lock() + + // if lastFetchedHeaderHeight > mw.syncData.activeSyncData.headersFetchProgress.startHeaderHeight { + // mw.syncData.activeSyncData.headersFetchProgress.totalFetchedHeadersCount = lastFetchedHeaderHeight - mw.syncData.activeSyncData.headersFetchProgress.startHeaderHeight + // } + + // headersLeftToFetch := mw.estimateBlockHeadersCountAfter(lastFetchedHeaderTime) + // totalHeadersToFetch := lastFetchedHeaderHeight + headersLeftToFetch + // headersFetchProgress := float64(mw.syncData.activeSyncData.headersFetchProgress.totalFetchedHeadersCount) / float64(totalHeadersToFetch) + + // // If there was some period of inactivity, + // // assume that this process started at some point in the future, + // // thereby accounting for the total reported time of inactivity. + // mw.syncData.activeSyncData.headersFetchProgress.beginFetchTimeStamp += mw.syncData.activeSyncData.totalInactiveSeconds + // mw.syncData.activeSyncData.totalInactiveSeconds = 0 + + // fetchTimeTakenSoFar := time.Now().Unix() - mw.syncData.activeSyncData.headersFetchProgress.beginFetchTimeStamp + // if fetchTimeTakenSoFar < 1 { + // fetchTimeTakenSoFar = 1 + // } + // estimatedTotalHeadersFetchTime := float64(fetchTimeTakenSoFar) / headersFetchProgress + + // // For some reason, the actual total headers fetch time is more than the predicted/estimated time. + // // Account for this difference by multiplying the estimatedTotalHeadersFetchTime by an incrementing factor. + // // The incrementing factor is inversely proportional to the headers fetch progress, + // // ranging from 0.5 to 0 as headers fetching progress increases from 0 to 1. + // // todo, the above noted (mal)calculation may explain this difference. + // // TODO: is this adjustment still needed since the calculation has been corrected. + // adjustmentFactor := 0.5 * (1 - headersFetchProgress) + // estimatedTotalHeadersFetchTime += estimatedTotalHeadersFetchTime * adjustmentFactor + + // estimatedDiscoveryTime := estimatedTotalHeadersFetchTime * DiscoveryPercentage + // estimatedRescanTime := estimatedTotalHeadersFetchTime * RescanPercentage + // estimatedTotalSyncTime := float64(mw.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent) + + // estimatedTotalHeadersFetchTime + estimatedDiscoveryTime + estimatedRescanTime + + // totalSyncProgress := float64(fetchTimeTakenSoFar) / estimatedTotalSyncTime + // totalTimeRemainingSeconds := int64(math.Round(estimatedTotalSyncTime)) - fetchTimeTakenSoFar + + // // update headers fetching progress report including total progress percentage and total time remaining + // mw.syncData.activeSyncData.headersFetchProgress.TotalHeadersToFetch = totalHeadersToFetch + // mw.syncData.activeSyncData.headersFetchProgress.CurrentHeaderHeight = lastFetchedHeaderHeight + // mw.syncData.activeSyncData.headersFetchProgress.CurrentHeaderTimestamp = lastFetchedHeaderTime + // mw.syncData.activeSyncData.headersFetchProgress.HeadersFetchProgress = roundUp(headersFetchProgress * 100.0) + // mw.syncData.activeSyncData.headersFetchProgress.TotalSyncProgress = roundUp(totalSyncProgress * 100.0) + // mw.syncData.activeSyncData.headersFetchProgress.TotalTimeRemainingSeconds = totalTimeRemainingSeconds + + // // unlock the mutex before issuing notification callbacks to prevent potential deadlock + // // if any invoked callback takes a considerable amount of time to execute. + // mw.syncData.mu.Unlock() + + // // notify progress listener of estimated progress report + // mw.publishFetchHeadersProgress() + + // // todo: also log report if showLog == true + // timeTakenSoFar := mw.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent + fetchTimeTakenSoFar + // headersFetchTimeRemaining := estimatedTotalHeadersFetchTime - float64(fetchTimeTakenSoFar) + // debugInfo := &DebugInfo{ + // timeTakenSoFar, + // totalTimeRemainingSeconds, + // fetchTimeTakenSoFar, + // int64(math.Round(headersFetchTimeRemaining)), + // } + // mw.publishDebugInfo(debugInfo) } func (mw *MultiWallet) publishFetchHeadersProgress() { - for _, syncProgressListener := range mw.syncProgressListeners() { - syncProgressListener.OnHeadersFetchProgress(&mw.syncData.headersFetchProgress) - } + // for _, syncProgressListener := range mw.syncProgressListeners() { + // syncProgressListener.OnHeadersFetchProgress(&mw.syncData.headersFetchProgress) + // } } func (mw *MultiWallet) fetchHeadersFinished() { - mw.syncData.mu.Lock() - defer mw.syncData.mu.Unlock() - - if !mw.syncData.syncing { - // ignore if sync is not in progress - return - } - - mw.syncData.activeSyncData.headersFetchProgress.startHeaderHeight = -1 - mw.syncData.headersFetchProgress.totalFetchedHeadersCount = 0 - mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent = time.Now().Unix() - mw.syncData.headersFetchProgress.beginFetchTimeStamp - - // If there is some period of inactivity reported at this stage, - // subtract it from the total stage time. - mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent -= mw.syncData.totalInactiveSeconds - mw.syncData.activeSyncData.totalInactiveSeconds = 0 - - if mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent < 150 { - // This ensures that minimum ETA used for stage 2 (address discovery) is 120 seconds (80% of 150 seconds). - mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent = 150 - } - - if mw.syncData.showLogs && mw.syncData.syncing { - log.Info("Fetch headers completed.") - } + // mw.syncData.mu.Lock() + // defer mw.syncData.mu.Unlock() + + // if !mw.syncData.syncing { + // // ignore if sync is not in progress + // return + // } + + // mw.syncData.activeSyncData.headersFetchProgress.startHeaderHeight = -1 + // mw.syncData.headersFetchProgress.totalFetchedHeadersCount = 0 + // mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent = time.Now().Unix() - mw.syncData.headersFetchProgress.beginFetchTimeStamp + + // // If there is some period of inactivity reported at this stage, + // // subtract it from the total stage time. + // mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent -= mw.syncData.totalInactiveSeconds + // mw.syncData.activeSyncData.totalInactiveSeconds = 0 + + // if mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent < 150 { + // // This ensures that minimum ETA used for stage 2 (address discovery) is 120 seconds (80% of 150 seconds). + // mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent = 150 + // } + + // if mw.syncData.showLogs && mw.syncData.syncing { + // log.Info("Fetch headers completed.") + // } } // Address/Account Discovery Callbacks func (mw *MultiWallet) discoverAddressesStarted(walletID int) { - if !mw.IsSyncing() { - return - } - - mw.syncData.mu.RLock() - addressDiscoveryAlreadyStarted := mw.syncData.activeSyncData.addressDiscoveryProgress.addressDiscoveryStartTime != -1 - totalHeadersFetchTime := float64(mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent) - mw.syncData.mu.RUnlock() - - if addressDiscoveryAlreadyStarted { - return - } - - mw.syncData.mu.Lock() - mw.syncData.activeSyncData.syncStage = AddressDiscoverySyncStage - mw.syncData.activeSyncData.addressDiscoveryProgress.addressDiscoveryStartTime = time.Now().Unix() - mw.syncData.activeSyncData.addressDiscoveryProgress.WalletID = walletID - mw.syncData.addressDiscoveryCompletedOrCanceled = make(chan bool) - mw.syncData.mu.Unlock() - - go mw.updateAddressDiscoveryProgress(totalHeadersFetchTime) - - if mw.syncData.showLogs { - log.Info("Step 2 of 3 - discovering used addresses.") - } + // if !mw.IsSyncing() { + // return + // } + + // mw.syncData.mu.RLock() + // addressDiscoveryAlreadyStarted := mw.syncData.activeSyncData.addressDiscoveryProgress.addressDiscoveryStartTime != -1 + // totalHeadersFetchTime := float64(mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent) + // mw.syncData.mu.RUnlock() + + // if addressDiscoveryAlreadyStarted { + // return + // } + + // mw.syncData.mu.Lock() + // mw.syncData.activeSyncData.syncStage = AddressDiscoverySyncStage + // mw.syncData.activeSyncData.addressDiscoveryProgress.addressDiscoveryStartTime = time.Now().Unix() + // mw.syncData.activeSyncData.addressDiscoveryProgress.WalletID = walletID + // mw.syncData.addressDiscoveryCompletedOrCanceled = make(chan bool) + // mw.syncData.mu.Unlock() + + // go mw.updateAddressDiscoveryProgress(totalHeadersFetchTime) + + // if mw.syncData.showLogs { + // log.Info("Step 2 of 3 - discovering used addresses.") + // } } func (mw *MultiWallet) updateAddressDiscoveryProgress(totalHeadersFetchTime float64) { // use ticker to calculate and broadcast address discovery progress every second - everySecondTicker := time.NewTicker(1 * time.Second) - - // these values will be used every second to calculate the total sync progress - estimatedDiscoveryTime := totalHeadersFetchTime * DiscoveryPercentage - estimatedRescanTime := totalHeadersFetchTime * RescanPercentage - - // track last logged time remaining and total percent to avoid re-logging same message - var lastTimeRemaining int64 - var lastTotalPercent int32 = -1 - - for { - if !mw.IsSyncing() { - return - } - - // If there was some period of inactivity, - // assume that this process started at some point in the future, - // thereby accounting for the total reported time of inactivity. - mw.syncData.mu.Lock() - mw.syncData.addressDiscoveryProgress.addressDiscoveryStartTime += mw.syncData.totalInactiveSeconds - mw.syncData.totalInactiveSeconds = 0 - addressDiscoveryStartTime := mw.syncData.addressDiscoveryProgress.addressDiscoveryStartTime - totalCfiltersFetchTime := float64(mw.syncData.cfiltersFetchProgress.cfiltersFetchTimeSpent) - showLogs := mw.syncData.showLogs - mw.syncData.mu.Unlock() - - select { - case <-mw.syncData.addressDiscoveryCompletedOrCanceled: - // stop calculating and broadcasting address discovery progress - everySecondTicker.Stop() - if showLogs { - log.Info("Address discovery complete.") - } - return - - case <-everySecondTicker.C: - // calculate address discovery progress - elapsedDiscoveryTime := float64(time.Now().Unix() - addressDiscoveryStartTime) - discoveryProgress := (elapsedDiscoveryTime / estimatedDiscoveryTime) * 100 - - var totalSyncTime float64 - if elapsedDiscoveryTime > estimatedDiscoveryTime { - totalSyncTime = totalCfiltersFetchTime + totalHeadersFetchTime + elapsedDiscoveryTime + estimatedRescanTime - } else { - totalSyncTime = totalCfiltersFetchTime + totalHeadersFetchTime + estimatedDiscoveryTime + estimatedRescanTime - } - - totalElapsedTime := totalCfiltersFetchTime + totalHeadersFetchTime + elapsedDiscoveryTime - totalProgress := (totalElapsedTime / totalSyncTime) * 100 - - remainingAccountDiscoveryTime := math.Round(estimatedDiscoveryTime - elapsedDiscoveryTime) - if remainingAccountDiscoveryTime < 0 { - remainingAccountDiscoveryTime = 0 - } - - totalProgressPercent := int32(math.Round(totalProgress)) - totalTimeRemainingSeconds := int64(math.Round(remainingAccountDiscoveryTime + estimatedRescanTime)) - - // update address discovery progress, total progress and total time remaining - mw.syncData.mu.Lock() - mw.syncData.addressDiscoveryProgress.AddressDiscoveryProgress = int32(math.Round(discoveryProgress)) - mw.syncData.addressDiscoveryProgress.TotalSyncProgress = totalProgressPercent - mw.syncData.addressDiscoveryProgress.TotalTimeRemainingSeconds = totalTimeRemainingSeconds - mw.syncData.mu.Unlock() - - mw.publishAddressDiscoveryProgress() - - debugInfo := &DebugInfo{ - int64(math.Round(totalElapsedTime)), - totalTimeRemainingSeconds, - int64(math.Round(elapsedDiscoveryTime)), - int64(math.Round(remainingAccountDiscoveryTime)), - } - mw.publishDebugInfo(debugInfo) - - if showLogs { - // avoid logging same message multiple times - if totalProgressPercent != lastTotalPercent || totalTimeRemainingSeconds != lastTimeRemaining { - log.Infof("Syncing %d%%, %s remaining, discovering used addresses.", - totalProgressPercent, CalculateTotalTimeRemaining(totalTimeRemainingSeconds)) - - lastTotalPercent = totalProgressPercent - lastTimeRemaining = totalTimeRemainingSeconds - } - } - } - } + // everySecondTicker := time.NewTicker(1 * time.Second) + + // // these values will be used every second to calculate the total sync progress + // estimatedDiscoveryTime := totalHeadersFetchTime * DiscoveryPercentage + // estimatedRescanTime := totalHeadersFetchTime * RescanPercentage + + // // track last logged time remaining and total percent to avoid re-logging same message + // var lastTimeRemaining int64 + // var lastTotalPercent int32 = -1 + + // for { + // if !mw.IsSyncing() { + // return + // } + + // // If there was some period of inactivity, + // // assume that this process started at some point in the future, + // // thereby accounting for the total reported time of inactivity. + // mw.syncData.mu.Lock() + // mw.syncData.addressDiscoveryProgress.addressDiscoveryStartTime += mw.syncData.totalInactiveSeconds + // mw.syncData.totalInactiveSeconds = 0 + // addressDiscoveryStartTime := mw.syncData.addressDiscoveryProgress.addressDiscoveryStartTime + // totalCfiltersFetchTime := float64(mw.syncData.cfiltersFetchProgress.cfiltersFetchTimeSpent) + // showLogs := mw.syncData.showLogs + // mw.syncData.mu.Unlock() + + // select { + // case <-mw.syncData.addressDiscoveryCompletedOrCanceled: + // // stop calculating and broadcasting address discovery progress + // everySecondTicker.Stop() + // if showLogs { + // log.Info("Address discovery complete.") + // } + // return + + // case <-everySecondTicker.C: + // // calculate address discovery progress + // elapsedDiscoveryTime := float64(time.Now().Unix() - addressDiscoveryStartTime) + // discoveryProgress := (elapsedDiscoveryTime / estimatedDiscoveryTime) * 100 + + // var totalSyncTime float64 + // if elapsedDiscoveryTime > estimatedDiscoveryTime { + // totalSyncTime = totalCfiltersFetchTime + totalHeadersFetchTime + elapsedDiscoveryTime + estimatedRescanTime + // } else { + // totalSyncTime = totalCfiltersFetchTime + totalHeadersFetchTime + estimatedDiscoveryTime + estimatedRescanTime + // } + + // totalElapsedTime := totalCfiltersFetchTime + totalHeadersFetchTime + elapsedDiscoveryTime + // totalProgress := (totalElapsedTime / totalSyncTime) * 100 + + // remainingAccountDiscoveryTime := math.Round(estimatedDiscoveryTime - elapsedDiscoveryTime) + // if remainingAccountDiscoveryTime < 0 { + // remainingAccountDiscoveryTime = 0 + // } + + // totalProgressPercent := int32(math.Round(totalProgress)) + // totalTimeRemainingSeconds := int64(math.Round(remainingAccountDiscoveryTime + estimatedRescanTime)) + + // // update address discovery progress, total progress and total time remaining + // mw.syncData.mu.Lock() + // mw.syncData.addressDiscoveryProgress.AddressDiscoveryProgress = int32(math.Round(discoveryProgress)) + // mw.syncData.addressDiscoveryProgress.TotalSyncProgress = totalProgressPercent + // mw.syncData.addressDiscoveryProgress.TotalTimeRemainingSeconds = totalTimeRemainingSeconds + // mw.syncData.mu.Unlock() + + // mw.publishAddressDiscoveryProgress() + + // debugInfo := &DebugInfo{ + // int64(math.Round(totalElapsedTime)), + // totalTimeRemainingSeconds, + // int64(math.Round(elapsedDiscoveryTime)), + // int64(math.Round(remainingAccountDiscoveryTime)), + // } + // mw.publishDebugInfo(debugInfo) + + // if showLogs { + // // avoid logging same message multiple times + // if totalProgressPercent != lastTotalPercent || totalTimeRemainingSeconds != lastTimeRemaining { + // log.Infof("Syncing %d%%, %s remaining, discovering used addresses.", + // totalProgressPercent, CalculateTotalTimeRemaining(totalTimeRemainingSeconds)) + + // lastTotalPercent = totalProgressPercent + // lastTimeRemaining = totalTimeRemainingSeconds + // } + // } + // } + // } } func (mw *MultiWallet) publishAddressDiscoveryProgress() { - for _, syncProgressListener := range mw.syncProgressListeners() { - syncProgressListener.OnAddressDiscoveryProgress(&mw.syncData.activeSyncData.addressDiscoveryProgress) - } + // for _, syncProgressListener := range mw.syncProgressListeners() { + // syncProgressListener.OnAddressDiscoveryProgress(&mw.syncData.activeSyncData.addressDiscoveryProgress) + // } } func (mw *MultiWallet) discoverAddressesFinished(walletID int) { - if !mw.IsSyncing() { - return - } + // if !mw.IsSyncing() { + // return + // } mw.stopUpdatingAddressDiscoveryProgress() } func (mw *MultiWallet) stopUpdatingAddressDiscoveryProgress() { - mw.syncData.mu.Lock() - if mw.syncData.activeSyncData != nil && mw.syncData.activeSyncData.addressDiscoveryCompletedOrCanceled != nil { - close(mw.syncData.activeSyncData.addressDiscoveryCompletedOrCanceled) - mw.syncData.activeSyncData.addressDiscoveryCompletedOrCanceled = nil - mw.syncData.activeSyncData.addressDiscoveryProgress.totalDiscoveryTimeSpent = time.Now().Unix() - mw.syncData.addressDiscoveryProgress.addressDiscoveryStartTime - } - mw.syncData.mu.Unlock() + // mw.syncData.mu.Lock() + // if mw.syncData.activeSyncData != nil && mw.syncData.activeSyncData.addressDiscoveryCompletedOrCanceled != nil { + // close(mw.syncData.activeSyncData.addressDiscoveryCompletedOrCanceled) + // mw.syncData.activeSyncData.addressDiscoveryCompletedOrCanceled = nil + // mw.syncData.activeSyncData.addressDiscoveryProgress.totalDiscoveryTimeSpent = time.Now().Unix() - mw.syncData.addressDiscoveryProgress.addressDiscoveryStartTime + // } + // mw.syncData.mu.Unlock() } // Blocks Scan Callbacks func (mw *MultiWallet) rescanStarted(walletID int) { - mw.stopUpdatingAddressDiscoveryProgress() + // mw.stopUpdatingAddressDiscoveryProgress() - mw.syncData.mu.Lock() - defer mw.syncData.mu.Unlock() + // mw.syncData.mu.Lock() + // defer mw.syncData.mu.Unlock() - if !mw.syncData.syncing { - // ignore if sync is not in progress - return - } + // if !mw.syncData.syncing { + // // ignore if sync is not in progress + // return + // } - mw.syncData.activeSyncData.syncStage = HeadersRescanSyncStage - mw.syncData.activeSyncData.rescanStartTime = time.Now().Unix() + // mw.syncData.activeSyncData.syncStage = HeadersRescanSyncStage + // mw.syncData.activeSyncData.rescanStartTime = time.Now().Unix() - // retain last total progress report from address discovery phase - mw.syncData.activeSyncData.headersRescanProgress.TotalTimeRemainingSeconds = mw.syncData.activeSyncData.addressDiscoveryProgress.TotalTimeRemainingSeconds - mw.syncData.activeSyncData.headersRescanProgress.TotalSyncProgress = mw.syncData.activeSyncData.addressDiscoveryProgress.TotalSyncProgress - mw.syncData.activeSyncData.headersRescanProgress.WalletID = walletID + // // retain last total progress report from address discovery phase + // mw.syncData.activeSyncData.headersRescanProgress.TotalTimeRemainingSeconds = mw.syncData.activeSyncData.addressDiscoveryProgress.TotalTimeRemainingSeconds + // mw.syncData.activeSyncData.headersRescanProgress.TotalSyncProgress = mw.syncData.activeSyncData.addressDiscoveryProgress.TotalSyncProgress + // mw.syncData.activeSyncData.headersRescanProgress.WalletID = walletID - if mw.syncData.showLogs && mw.syncData.syncing { - log.Info("Step 3 of 3 - Scanning block headers.") - } + // if mw.syncData.showLogs && mw.syncData.syncing { + // log.Info("Step 3 of 3 - Scanning block headers.") + // } } func (mw *MultiWallet) rescanProgress(walletID int, rescannedThrough int32) { - if !mw.IsSyncing() { - // ignore if sync is not in progress - return - } - - wallet := mw.wallets[walletID] - totalHeadersToScan := wallet.GetBestBlock() - - rescanRate := float64(rescannedThrough) / float64(totalHeadersToScan) - - mw.syncData.mu.Lock() - - // If there was some period of inactivity, - // assume that this process started at some point in the future, - // thereby accounting for the total reported time of inactivity. - mw.syncData.activeSyncData.rescanStartTime += mw.syncData.activeSyncData.totalInactiveSeconds - mw.syncData.activeSyncData.totalInactiveSeconds = 0 - - elapsedRescanTime := time.Now().Unix() - mw.syncData.activeSyncData.rescanStartTime - estimatedTotalRescanTime := int64(math.Round(float64(elapsedRescanTime) / rescanRate)) - totalTimeRemainingSeconds := estimatedTotalRescanTime - elapsedRescanTime - totalElapsedTime := mw.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent + mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent + - mw.syncData.activeSyncData.addressDiscoveryProgress.totalDiscoveryTimeSpent + elapsedRescanTime - - mw.syncData.activeSyncData.headersRescanProgress.WalletID = walletID - mw.syncData.activeSyncData.headersRescanProgress.TotalHeadersToScan = totalHeadersToScan - mw.syncData.activeSyncData.headersRescanProgress.RescanProgress = int32(math.Round(rescanRate * 100)) - mw.syncData.activeSyncData.headersRescanProgress.CurrentRescanHeight = rescannedThrough - mw.syncData.activeSyncData.headersRescanProgress.RescanTimeRemaining = totalTimeRemainingSeconds - - // do not update total time taken and total progress percent if elapsedRescanTime is 0 - // because the estimatedTotalRescanTime will be inaccurate (also 0) - // which will make the estimatedTotalSyncTime equal to totalElapsedTime - // giving the wrong impression that the process is complete - if elapsedRescanTime > 0 { - estimatedTotalSyncTime := mw.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent + mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent + - mw.syncData.activeSyncData.addressDiscoveryProgress.totalDiscoveryTimeSpent + estimatedTotalRescanTime - totalProgress := (float64(totalElapsedTime) / float64(estimatedTotalSyncTime)) * 100 - - mw.syncData.activeSyncData.headersRescanProgress.TotalTimeRemainingSeconds = totalTimeRemainingSeconds - mw.syncData.activeSyncData.headersRescanProgress.TotalSyncProgress = int32(math.Round(totalProgress)) - } - - mw.syncData.mu.Unlock() - - mw.publishHeadersRescanProgress() - - debugInfo := &DebugInfo{ - totalElapsedTime, - totalTimeRemainingSeconds, - elapsedRescanTime, - totalTimeRemainingSeconds, - } - mw.publishDebugInfo(debugInfo) - - mw.syncData.mu.RLock() - if mw.syncData.showLogs { - log.Infof("Syncing %d%%, %s remaining, scanning %d of %d block headers.", - mw.syncData.activeSyncData.headersRescanProgress.TotalSyncProgress, - CalculateTotalTimeRemaining(mw.syncData.activeSyncData.headersRescanProgress.TotalTimeRemainingSeconds), - mw.syncData.activeSyncData.headersRescanProgress.CurrentRescanHeight, - mw.syncData.activeSyncData.headersRescanProgress.TotalHeadersToScan, - ) - } - mw.syncData.mu.RUnlock() + // if !mw.IsSyncing() { + // // ignore if sync is not in progress + // return + // } + + // wallet := mw.wallets[walletID] + // totalHeadersToScan := wallet.GetBestBlock() + + // rescanRate := float64(rescannedThrough) / float64(totalHeadersToScan) + + // mw.syncData.mu.Lock() + + // // If there was some period of inactivity, + // // assume that this process started at some point in the future, + // // thereby accounting for the total reported time of inactivity. + // mw.syncData.activeSyncData.rescanStartTime += mw.syncData.activeSyncData.totalInactiveSeconds + // mw.syncData.activeSyncData.totalInactiveSeconds = 0 + + // elapsedRescanTime := time.Now().Unix() - mw.syncData.activeSyncData.rescanStartTime + // estimatedTotalRescanTime := int64(math.Round(float64(elapsedRescanTime) / rescanRate)) + // totalTimeRemainingSeconds := estimatedTotalRescanTime - elapsedRescanTime + // totalElapsedTime := mw.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent + mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent + + // mw.syncData.activeSyncData.addressDiscoveryProgress.totalDiscoveryTimeSpent + elapsedRescanTime + + // mw.syncData.activeSyncData.headersRescanProgress.WalletID = walletID + // mw.syncData.activeSyncData.headersRescanProgress.TotalHeadersToScan = totalHeadersToScan + // mw.syncData.activeSyncData.headersRescanProgress.RescanProgress = int32(math.Round(rescanRate * 100)) + // mw.syncData.activeSyncData.headersRescanProgress.CurrentRescanHeight = rescannedThrough + // mw.syncData.activeSyncData.headersRescanProgress.RescanTimeRemaining = totalTimeRemainingSeconds + + // // do not update total time taken and total progress percent if elapsedRescanTime is 0 + // // because the estimatedTotalRescanTime will be inaccurate (also 0) + // // which will make the estimatedTotalSyncTime equal to totalElapsedTime + // // giving the wrong impression that the process is complete + // if elapsedRescanTime > 0 { + // estimatedTotalSyncTime := mw.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent + mw.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent + + // mw.syncData.activeSyncData.addressDiscoveryProgress.totalDiscoveryTimeSpent + estimatedTotalRescanTime + // totalProgress := (float64(totalElapsedTime) / float64(estimatedTotalSyncTime)) * 100 + + // mw.syncData.activeSyncData.headersRescanProgress.TotalTimeRemainingSeconds = totalTimeRemainingSeconds + // mw.syncData.activeSyncData.headersRescanProgress.TotalSyncProgress = int32(math.Round(totalProgress)) + // } + + // mw.syncData.mu.Unlock() + + // mw.publishHeadersRescanProgress() + + // debugInfo := &DebugInfo{ + // totalElapsedTime, + // totalTimeRemainingSeconds, + // elapsedRescanTime, + // totalTimeRemainingSeconds, + // } + // mw.publishDebugInfo(debugInfo) + + // mw.syncData.mu.RLock() + // if mw.syncData.showLogs { + // log.Infof("Syncing %d%%, %s remaining, scanning %d of %d block headers.", + // mw.syncData.activeSyncData.headersRescanProgress.TotalSyncProgress, + // CalculateTotalTimeRemaining(mw.syncData.activeSyncData.headersRescanProgress.TotalTimeRemainingSeconds), + // mw.syncData.activeSyncData.headersRescanProgress.CurrentRescanHeight, + // mw.syncData.activeSyncData.headersRescanProgress.TotalHeadersToScan, + // ) + // } + // mw.syncData.mu.RUnlock() } func (mw *MultiWallet) publishHeadersRescanProgress() { - for _, syncProgressListener := range mw.syncProgressListeners() { - syncProgressListener.OnHeadersRescanProgress(&mw.syncData.activeSyncData.headersRescanProgress) - } + // for _, syncProgressListener := range mw.syncProgressListeners() { + // syncProgressListener.OnHeadersRescanProgress(&mw.syncData.activeSyncData.headersRescanProgress) + // } } func (mw *MultiWallet) rescanFinished(walletID int) { - if !mw.IsSyncing() { - // ignore if sync is not in progress - return - } - - mw.syncData.mu.Lock() - mw.syncData.activeSyncData.headersRescanProgress.WalletID = walletID - mw.syncData.activeSyncData.headersRescanProgress.TotalTimeRemainingSeconds = 0 - mw.syncData.activeSyncData.headersRescanProgress.TotalSyncProgress = 100 - - // Reset these value so that address discovery would - // not be skipped for the next wallet. - mw.syncData.activeSyncData.addressDiscoveryProgress.addressDiscoveryStartTime = -1 - mw.syncData.activeSyncData.addressDiscoveryProgress.totalDiscoveryTimeSpent = -1 - mw.syncData.mu.Unlock() - - mw.publishHeadersRescanProgress() + // if !mw.IsSyncing() { + // // ignore if sync is not in progress + // return + // } + + // mw.syncData.mu.Lock() + // mw.syncData.activeSyncData.headersRescanProgress.WalletID = walletID + // mw.syncData.activeSyncData.headersRescanProgress.TotalTimeRemainingSeconds = 0 + // mw.syncData.activeSyncData.headersRescanProgress.TotalSyncProgress = 100 + + // // Reset these value so that address discovery would + // // not be skipped for the next wallet. + // mw.syncData.activeSyncData.addressDiscoveryProgress.addressDiscoveryStartTime = -1 + // mw.syncData.activeSyncData.addressDiscoveryProgress.totalDiscoveryTimeSpent = -1 + // mw.syncData.mu.Unlock() + + // mw.publishHeadersRescanProgress() } func (mw *MultiWallet) publishDebugInfo(debugInfo *DebugInfo) { - for _, syncProgressListener := range mw.syncProgressListeners() { - syncProgressListener.Debug(debugInfo) - } + // for _, syncProgressListener := range mw.syncProgressListeners() { + // syncProgressListener.Debug(debugInfo) + // } } /** Helper functions start here */ @@ -592,95 +592,95 @@ func (mw *MultiWallet) estimateBlockHeadersCountAfter(lastHeaderTime int64) int3 } func (mw *MultiWallet) notifySyncError(err error) { - for _, syncProgressListener := range mw.syncProgressListeners() { - syncProgressListener.OnSyncEndedWithError(err) - } + // for _, syncProgressListener := range mw.syncProgressListeners() { + // syncProgressListener.OnSyncEndedWithError(err) + // } } func (mw *MultiWallet) notifySyncCanceled() { - mw.syncData.mu.RLock() - restartSyncRequested := mw.syncData.restartSyncRequested - mw.syncData.mu.RUnlock() + // mw.syncData.mu.RLock() + // restartSyncRequested := mw.syncData.restartSyncRequested + // mw.syncData.mu.RUnlock() - for _, syncProgressListener := range mw.syncProgressListeners() { - syncProgressListener.OnSyncCanceled(restartSyncRequested) - } + // for _, syncProgressListener := range mw.syncProgressListeners() { + // syncProgressListener.OnSyncCanceled(restartSyncRequested) + // } } func (mw *MultiWallet) resetSyncData() { // It's possible that sync ends or errors while address discovery is ongoing. // If this happens, it's important to stop the address discovery process before // resetting sync data. - mw.stopUpdatingAddressDiscoveryProgress() - - mw.syncData.mu.Lock() - mw.syncData.syncing = false - mw.syncData.synced = false - mw.syncData.cancelSync = nil - mw.syncData.syncCanceled = nil - mw.syncData.activeSyncData = nil - mw.syncData.mu.Unlock() - - for _, wallet := range mw.wallets { - wallet.waitingForHeaders = true - wallet.LockWallet() // lock wallet if previously unlocked to perform account discovery. - } + // mw.stopUpdatingAddressDiscoveryProgress() + + // mw.syncData.mu.Lock() + // mw.syncData.syncing = false + // mw.syncData.synced = false + // mw.syncData.cancelSync = nil + // mw.syncData.syncCanceled = nil + // mw.syncData.activeSyncData = nil + // mw.syncData.mu.Unlock() + + // for _, wallet := range mw.wallets { + // wallet.WaitingForHeaders = true + // wallet.LockWallet() // lock wallet if previously unlocked to perform account discovery. + // } } func (mw *MultiWallet) synced(walletID int, synced bool) { - indexTransactions := func() { - // begin indexing transactions after sync is completed, - // syncProgressListeners.OnSynced() will be invoked after transactions are indexed - var txIndexing errgroup.Group - for _, wallet := range mw.wallets { - txIndexing.Go(wallet.IndexTransactions) - } - - go func() { - err := txIndexing.Wait() - if err != nil { - log.Errorf("Tx Index Error: %v", err) - } - - for _, syncProgressListener := range mw.syncProgressListeners() { - if synced { - syncProgressListener.OnSyncCompleted() - } else { - syncProgressListener.OnSyncCanceled(false) - } - } - }() - } - - mw.syncData.mu.RLock() - allWalletsSynced := mw.syncData.synced - mw.syncData.mu.RUnlock() - - if allWalletsSynced && synced { - indexTransactions() - return - } - - wallet := mw.wallets[walletID] - wallet.synced = synced - wallet.syncing = false - mw.listenForTransactions(wallet.ID) - - if !wallet.Internal().Locked() { - wallet.LockWallet() // lock wallet if previously unlocked to perform account discovery. - err := mw.markWalletAsDiscoveredAccounts(walletID) - if err != nil { - log.Error(err) - } - } - - if mw.OpenedWalletsCount() == mw.SyncedWalletsCount() { - mw.syncData.mu.Lock() - mw.syncData.syncing = false - mw.syncData.synced = true - mw.syncData.mu.Unlock() - - indexTransactions() - } + // indexTransactions := func() { + // // begin indexing transactions after sync is completed, + // // syncProgressListeners.OnSynced() will be invoked after transactions are indexed + // var txIndexing errgroup.Group + // for _, wallet := range mw.wallets { + // txIndexing.Go(wallet.IndexTransactions) + // } + + // go func() { + // err := txIndexing.Wait() + // if err != nil { + // log.Errorf("Tx Index Error: %v", err) + // } + + // for _, syncProgressListener := range mw.syncProgressListeners() { + // if synced { + // syncProgressListener.OnSyncCompleted() + // } else { + // syncProgressListener.OnSyncCanceled(false) + // } + // } + // }() + // } + + // mw.syncData.mu.RLock() + // allWalletsSynced := mw.syncData.synced + // mw.syncData.mu.RUnlock() + + // if allWalletsSynced && synced { + // indexTransactions() + // return + // } + + // wallet := mw.wallets[walletID] + // wallet.Synced = synced + // wallet.Syncing = false + // // mw.listenForTransactions(wallet.ID) + + // if !wallet.Internal().Locked() { + // wallet.LockWallet() // lock wallet if previously unlocked to perform account discovery. + // err := mw.markWalletAsDiscoveredAccounts(walletID) + // if err != nil { + // log.Error(err) + // } + // } + + // if mw.OpenedWalletsCount() == mw.SyncedWalletsCount() { + // mw.syncData.mu.Lock() + // mw.syncData.syncing = false + // mw.syncData.synced = true + // mw.syncData.mu.Unlock() + + // indexTransactions() + // } } diff --git a/txandblocknotifications.go b/txandblocknotifications.go deleted file mode 100644 index dd55166a9..000000000 --- a/txandblocknotifications.go +++ /dev/null @@ -1,159 +0,0 @@ -package dcrlibwallet - -import ( - "encoding/json" - - "decred.org/dcrwallet/v2/errors" -) - -func (mw *MultiWallet) listenForTransactions(walletID int) { - go func() { - - wallet := mw.wallets[walletID] - n := wallet.Internal().NtfnServer.TransactionNotifications() - - for { - select { - case v := <-n.C: - if v == nil { - return - } - for _, transaction := range v.UnminedTransactions { - tempTransaction, err := wallet.decodeTransactionWithTxSummary(&transaction, nil) - if err != nil { - log.Errorf("[%d] Error ntfn parse tx: %v", wallet.ID, err) - return - } - - overwritten, err := wallet.walletDataDB.SaveOrUpdate(&Transaction{}, tempTransaction) - if err != nil { - log.Errorf("[%d] New Tx save err: %v", wallet.ID, err) - return - } - - if !overwritten { - log.Infof("[%d] New Transaction %s", wallet.ID, tempTransaction.Hash) - - result, err := json.Marshal(tempTransaction) - if err != nil { - log.Error(err) - } else { - mw.mempoolTransactionNotification(string(result)) - } - } - } - - for _, block := range v.AttachedBlocks { - blockHash := block.Header.BlockHash() - for _, transaction := range block.Transactions { - tempTransaction, err := wallet.decodeTransactionWithTxSummary(&transaction, &blockHash) - if err != nil { - log.Errorf("[%d] Error ntfn parse tx: %v", wallet.ID, err) - return - } - - _, err = wallet.walletDataDB.SaveOrUpdate(&Transaction{}, tempTransaction) - if err != nil { - log.Errorf("[%d] Incoming block replace tx error :%v", wallet.ID, err) - return - } - mw.publishTransactionConfirmed(wallet.ID, transaction.Hash.String(), int32(block.Header.Height)) - } - - mw.publishBlockAttached(wallet.ID, int32(block.Header.Height)) - } - - if len(v.AttachedBlocks) > 0 { - mw.checkWalletMixers() - } - - case <-mw.syncData.syncCanceled: - n.Done() - } - } - }() -} - -// AddTxAndBlockNotificationListener registers a set of functions to be invoked -// when a transaction or block update is processed by the wallet. If async is -// true, the provided callback methods will be called from separate goroutines, -// allowing notification senders to continue their operation without waiting -// for the listener to complete processing the notification. This asyncrhonous -// handling is especially important for cases where the wallet process that -// sends the notification temporarily prevents access to other wallet features -// until all notification handlers finish processing the notification. If a -// notification handler were to try to access such features, it would result -// in a deadlock. -func (mw *MultiWallet) AddTxAndBlockNotificationListener(txAndBlockNotificationListener TxAndBlockNotificationListener, async bool, uniqueIdentifier string) error { - mw.notificationListenersMu.Lock() - defer mw.notificationListenersMu.Unlock() - - _, ok := mw.txAndBlockNotificationListeners[uniqueIdentifier] - if ok { - return errors.New(ErrListenerAlreadyExist) - } - - if async { - mw.txAndBlockNotificationListeners[uniqueIdentifier] = &asyncTxAndBlockNotificationListener{ - l: txAndBlockNotificationListener, - } - } else { - mw.txAndBlockNotificationListeners[uniqueIdentifier] = txAndBlockNotificationListener - } - - return nil -} - -func (mw *MultiWallet) RemoveTxAndBlockNotificationListener(uniqueIdentifier string) { - mw.notificationListenersMu.Lock() - defer mw.notificationListenersMu.Unlock() - - delete(mw.txAndBlockNotificationListeners, uniqueIdentifier) -} - -func (mw *MultiWallet) checkWalletMixers() { - for _, wallet := range mw.wallets { - if wallet.IsAccountMixerActive() { - unmixedAccount := wallet.ReadInt32ConfigValueForKey(AccountMixerUnmixedAccount, -1) - hasMixableOutput, err := wallet.accountHasMixableOutput(unmixedAccount) - if err != nil { - log.Errorf("Error checking for mixable outputs: %v", err) - } - - if !hasMixableOutput { - log.Infof("[%d] unmixed account does not have a mixable output, stopping account mixer", wallet.ID) - err = mw.StopAccountMixer(wallet.ID) - if err != nil { - log.Errorf("Error stopping account mixer: %v", err) - } - } - } - } -} - -func (mw *MultiWallet) mempoolTransactionNotification(transaction string) { - mw.notificationListenersMu.RLock() - defer mw.notificationListenersMu.RUnlock() - - for _, txAndBlockNotifcationListener := range mw.txAndBlockNotificationListeners { - txAndBlockNotifcationListener.OnTransaction(transaction) - } -} - -func (mw *MultiWallet) publishTransactionConfirmed(walletID int, transactionHash string, blockHeight int32) { - mw.notificationListenersMu.RLock() - defer mw.notificationListenersMu.RUnlock() - - for _, txAndBlockNotifcationListener := range mw.txAndBlockNotificationListeners { - txAndBlockNotifcationListener.OnTransactionConfirmed(walletID, transactionHash, blockHeight) - } -} - -func (mw *MultiWallet) publishBlockAttached(walletID int, blockHeight int32) { - mw.notificationListenersMu.RLock() - defer mw.notificationListenersMu.RUnlock() - - for _, txAndBlockNotifcationListener := range mw.txAndBlockNotificationListeners { - txAndBlockNotifcationListener.OnBlockAttached(walletID, blockHeight) - } -} diff --git a/types.go b/types.go index 05d4bccdc..c39bb5dbe 100644 --- a/types.go +++ b/types.go @@ -36,10 +36,10 @@ type CSPPConfig struct { ChangeAccount uint32 } -type WalletsIterator struct { - currentIndex int - wallets []*Wallet -} +// type WalletsIterator struct { +// currentIndex int +// wallets []*Wallet +// } type BlockInfo struct { Height int32 diff --git a/utils.go b/utils.go index 9e4068919..68387a7e2 100644 --- a/utils.go +++ b/utils.go @@ -26,6 +26,7 @@ import ( "github.com/decred/dcrd/hdkeychain/v3" "github.com/decred/dcrd/wire" "github.com/planetdecred/dcrlibwallet/internal/loader" + ) const ( @@ -63,15 +64,6 @@ func (mw *MultiWallet) RequiredConfirmations() int32 { return DefaultRequiredConfirmations } -func (wallet *Wallet) RequiredConfirmations() int32 { - var spendUnconfirmed bool - wallet.readUserConfigValue(true, SpendUnconfirmedConfigKey, &spendUnconfirmed) - if spendUnconfirmed { - return 0 - } - return DefaultRequiredConfirmations -} - func (mw *MultiWallet) listenForShutdown() { mw.cancelFuncs = make([]context.CancelFunc, 0) @@ -84,17 +76,6 @@ func (mw *MultiWallet) listenForShutdown() { }() } -func (wallet *Wallet) shutdownContextWithCancel() (context.Context, context.CancelFunc) { - ctx, cancel := context.WithCancel(context.Background()) - wallet.cancelFuncs = append(wallet.cancelFuncs, cancel) - return ctx, cancel -} - -func (wallet *Wallet) shutdownContext() (ctx context.Context) { - ctx, _ = wallet.shutdownContextWithCancel() - return -} - func (mw *MultiWallet) contextWithShutdownCancel() (context.Context, context.CancelFunc) { ctx, cancel := context.WithCancel(context.Background()) mw.cancelFuncs = append(mw.cancelFuncs, cancel) @@ -207,12 +188,12 @@ func ShannonEntropy(text string) (entropy float64) { func TransactionDirectionName(direction int32) string { switch direction { - case TxDirectionSent: - return "Sent" - case TxDirectionReceived: - return "Received" - case TxDirectionTransferred: - return "Yourself" + // case TxDirectionSent: + // return "Sent" + // case TxDirectionReceived: + // return "Received" + // case TxDirectionTransferred: + // return "Yourself" default: return "invalid" } diff --git a/vsp.go b/vsp.go deleted file mode 100644 index 95eb37224..000000000 --- a/vsp.go +++ /dev/null @@ -1,192 +0,0 @@ -package dcrlibwallet - -import ( - "context" - "crypto/ed25519" - "encoding/base64" - "fmt" - "strings" - - "decred.org/dcrwallet/v2/errors" - "github.com/planetdecred/dcrlibwallet/internal/vsp" -) - -// VSPClient loads or creates a VSP client instance for the specified host. -func (wallet *Wallet) VSPClient(host string, pubKey []byte) (*vsp.Client, error) { - wallet.vspClientsMu.Lock() - defer wallet.vspClientsMu.Unlock() - client, ok := wallet.vspClients[host] - if ok { - return client, nil - } - - cfg := vsp.Config{ - URL: host, - PubKey: base64.StdEncoding.EncodeToString(pubKey), - Dialer: nil, // optional, but consider providing a value - Wallet: wallet.Internal(), - } - client, err := vsp.New(cfg) - if err != nil { - return nil, err - } - wallet.vspClients[host] = client - return client, nil -} - -// KnownVSPs returns a list of known VSPs. This list may be updated by calling -// ReloadVSPList. This method is safe for concurrent access. -func (mw *MultiWallet) KnownVSPs() []*VSP { - mw.vspMu.RLock() - defer mw.vspMu.RUnlock() - return mw.vsps // TODO: Return a copy. -} - -// SaveVSP marks a VSP as known and will be susbequently included as part of -// known VSPs. -func (mw *MultiWallet) SaveVSP(host string) (err error) { - // check if host already exists - vspDbData := mw.getVSPDBData() - for _, savedHost := range vspDbData.SavedHosts { - if savedHost == host { - return fmt.Errorf("duplicate host %s", host) - } - } - - // validate host network - info, err := vspInfo(host) - if err != nil { - return err - } - - // TODO: defaultVSPs() uses strings.Contains(network, vspInfo.Network). - if info.Network != mw.NetType() { - return fmt.Errorf("invalid net %s", info.Network) - } - - vspDbData.SavedHosts = append(vspDbData.SavedHosts, host) - mw.updateVSPDBData(vspDbData) - - mw.vspMu.Lock() - mw.vsps = append(mw.vsps, &VSP{Host: host, VspInfoResponse: info}) - mw.vspMu.Unlock() - - return -} - -// LastUsedVSP returns the host of the last used VSP, as saved by the -// SaveLastUsedVSP() method. -func (mw *MultiWallet) LastUsedVSP() string { - return mw.getVSPDBData().LastUsedVSP -} - -// SaveLastUsedVSP saves the host of the last used VSP. -func (mw *MultiWallet) SaveLastUsedVSP(host string) { - vspDbData := mw.getVSPDBData() - vspDbData.LastUsedVSP = host - mw.updateVSPDBData(vspDbData) -} - -type vspDbData struct { - SavedHosts []string - LastUsedVSP string -} - -func (mw *MultiWallet) getVSPDBData() *vspDbData { - vspDbData := new(vspDbData) - mw.ReadUserConfigValue(KnownVSPsConfigKey, vspDbData) - return vspDbData -} - -func (mw *MultiWallet) updateVSPDBData(data *vspDbData) { - mw.SaveUserConfigValue(KnownVSPsConfigKey, data) -} - -// ReloadVSPList reloads the list of known VSPs. -// This method makes multiple network calls; should be called in a goroutine -// to prevent blocking the UI thread. -func (mw *MultiWallet) ReloadVSPList(ctx context.Context) { - log.Debugf("Reloading list of known VSPs") - defer log.Debugf("Reloaded list of known VSPs") - - vspDbData := mw.getVSPDBData() - vspList := make(map[string]*VspInfoResponse) - for _, host := range vspDbData.SavedHosts { - vspInfo, err := vspInfo(host) - if err != nil { - // User saved this VSP. Log an error message. - log.Errorf("get vsp info error for %s: %v", host, err) - } else { - vspList[host] = vspInfo - } - if ctx.Err() != nil { - return // context canceled, abort - } - } - - otherVSPHosts, err := defaultVSPs(mw.NetType()) - if err != nil { - log.Debugf("get default vsp list error: %v", err) - } - for _, host := range otherVSPHosts { - if _, wasAdded := vspList[host]; wasAdded { - continue - } - vspInfo, err := vspInfo(host) - if err != nil { - log.Debugf("vsp info error for %s: %v\n", host, err) // debug only, user didn't request this VSP - } else { - vspList[host] = vspInfo - } - if ctx.Err() != nil { - return // context canceled, abort - } - } - - mw.vspMu.Lock() - mw.vsps = make([]*VSP, 0, len(vspList)) - for host, info := range vspList { - mw.vsps = append(mw.vsps, &VSP{Host: host, VspInfoResponse: info}) - } - mw.vspMu.Unlock() -} - -func vspInfo(vspHost string) (*VspInfoResponse, error) { - vspInfoResponse := new(VspInfoResponse) - resp, respBytes, err := HttpGet(vspHost+"/api/v3/vspinfo", vspInfoResponse) - if err != nil { - return nil, err - } - - // Validate server response. - sigStr := resp.Header.Get("VSP-Server-Signature") - sig, err := base64.StdEncoding.DecodeString(sigStr) - if err != nil { - return nil, fmt.Errorf("error validating VSP signature: %v", err) - } - if !ed25519.Verify(vspInfoResponse.PubKey, respBytes, sig) { - return nil, errors.New("bad signature from VSP") - } - - return vspInfoResponse, nil -} - -// defaultVSPs returns a list of known VSPs. -func defaultVSPs(network string) ([]string, error) { - var vspInfoResponse map[string]*VspInfoResponse - _, _, err := HttpGet("https://api.decred.org/?c=vsp", &vspInfoResponse) - if err != nil { - return nil, err - } - - // The above API does not return the pubKeys for the - // VSPs. Only return the host since we'll still need - // to make another API call to get the VSP pubKeys. - vsps := make([]string, 0) - for url, vspInfo := range vspInfoResponse { - if strings.Contains(network, vspInfo.Network) { - vsps = append(vsps, "https://"+url) - } - } - return vsps, nil -} diff --git a/wallets.go b/wallets.go index c5f97cc7b..7bb87906a 100644 --- a/wallets.go +++ b/wallets.go @@ -1,29 +1,29 @@ package dcrlibwallet -func (mw *MultiWallet) AllWallets() (wallets []*Wallet) { - for _, wallet := range mw.wallets { - wallets = append(wallets, wallet) - } - return wallets -} +// func (mw *MultiWallet) AllWallets() (wallets []*Wallet) { +// for _, wallet := range mw.wallets { +// wallets = append(wallets, wallet) +// } +// return wallets +// } -func (mw *MultiWallet) WalletsIterator() *WalletsIterator { - return &WalletsIterator{ - currentIndex: 0, - wallets: mw.AllWallets(), - } -} +// func (mw *MultiWallet) WalletsIterator() *WalletsIterator { +// return &WalletsIterator{ +// currentIndex: 0, +// wallets: mw.AllWallets(), +// } +// } -func (walletsIterator *WalletsIterator) Next() *Wallet { - if walletsIterator.currentIndex < len(walletsIterator.wallets) { - wallet := walletsIterator.wallets[walletsIterator.currentIndex] - walletsIterator.currentIndex++ - return wallet - } +// func (walletsIterator *WalletsIterator) Next() *Wallet { +// if walletsIterator.currentIndex < len(walletsIterator.wallets) { +// wallet := walletsIterator.wallets[walletsIterator.currentIndex] +// walletsIterator.currentIndex++ +// return wallet +// } - return nil -} +// return nil +// } -func (walletsIterator *WalletsIterator) Reset() { - walletsIterator.currentIndex = 0 -} +// func (walletsIterator *WalletsIterator) Reset() { +// walletsIterator.currentIndex = 0 +// } diff --git a/account_mixer.go b/wallets/dcr/account_mixer.go similarity index 79% rename from account_mixer.go rename to wallets/dcr/account_mixer.go index 957fc5807..620648171 100644 --- a/account_mixer.go +++ b/wallets/dcr/account_mixer.go @@ -1,4 +1,4 @@ -package dcrlibwallet +package dcr import ( "context" @@ -23,23 +23,23 @@ const ( MixedAccountBranch = int32(udb.ExternalBranch) ) -func (mw *MultiWallet) AddAccountMixerNotificationListener(accountMixerNotificationListener AccountMixerNotificationListener, uniqueIdentifier string) error { - mw.notificationListenersMu.Lock() - defer mw.notificationListenersMu.Unlock() +func (wallet *Wallet) AddAccountMixerNotificationListener(accountMixerNotificationListener AccountMixerNotificationListener, uniqueIdentifier string) error { + wallet.notificationListenersMu.Lock() + defer wallet.notificationListenersMu.Unlock() - if _, ok := mw.accountMixerNotificationListener[uniqueIdentifier]; ok { + if _, ok := wallet.accountMixerNotificationListener[uniqueIdentifier]; ok { return errors.New(ErrListenerAlreadyExist) } - mw.accountMixerNotificationListener[uniqueIdentifier] = accountMixerNotificationListener + wallet.accountMixerNotificationListener[uniqueIdentifier] = accountMixerNotificationListener return nil } -func (mw *MultiWallet) RemoveAccountMixerNotificationListener(uniqueIdentifier string) { - mw.notificationListenersMu.Lock() - defer mw.notificationListenersMu.Unlock() +func (wallet *Wallet) RemoveAccountMixerNotificationListener(uniqueIdentifier string) { + wallet.notificationListenersMu.Lock() + defer wallet.notificationListenersMu.Unlock() - delete(mw.accountMixerNotificationListener, uniqueIdentifier) + delete(wallet.accountMixerNotificationListener, uniqueIdentifier) } // CreateMixerAccounts creates the two accounts needed for the account mixer. This function @@ -133,8 +133,7 @@ func (wallet *Wallet) ClearMixerConfig() { wallet.SetBoolConfigValueForKey(AccountMixerConfigSet, false) } -func (mw *MultiWallet) ReadyToMix(walletID int) (bool, error) { - wallet := mw.WalletWithID(walletID) +func (wallet *Wallet) ReadyToMix(walletID int) (bool, error) { if wallet == nil { return false, errors.New(ErrNotExist) } @@ -150,12 +149,11 @@ func (mw *MultiWallet) ReadyToMix(walletID int) (bool, error) { } // StartAccountMixer starts the automatic account mixer -func (mw *MultiWallet) StartAccountMixer(walletID int, walletPassphrase string) error { - if !mw.IsConnectedToDecredNetwork() { +func (wallet *Wallet) StartAccountMixer(walletID int, walletPassphrase string) error { + if !wallet.IsConnectedToDecredNetwork() { return errors.New(ErrNotConnected) } - wallet := mw.WalletWithID(walletID) if wallet == nil { return errors.New(ErrNotExist) } @@ -192,20 +190,20 @@ func (mw *MultiWallet) StartAccountMixer(walletID int, walletPassphrase string) go func() { log.Info("Running account mixer") - if mw.accountMixerNotificationListener != nil { - mw.publishAccountMixerStarted(walletID) + if wallet.accountMixerNotificationListener != nil { + wallet.publishAccountMixerStarted(walletID) } - ctx, cancel := mw.contextWithShutdownCancel() - wallet.cancelAccountMixer = cancel + ctx, cancel := wallet.contextWithShutdownCancel() + wallet.CancelAccountMixer = cancel err = tb.Run(ctx, []byte(walletPassphrase)) if err != nil { log.Errorf("AccountMixer instance errored: %v", err) } - wallet.cancelAccountMixer = nil - if mw.accountMixerNotificationListener != nil { - mw.publishAccountMixerEnded(walletID) + wallet.CancelAccountMixer = nil + if wallet.accountMixerNotificationListener != nil { + wallet.publishAccountMixerEnded(walletID) } }() @@ -256,19 +254,17 @@ func (wallet *Wallet) readCSPPConfig() *CSPPConfig { } // StopAccountMixer stops the active account mixer -func (mw *MultiWallet) StopAccountMixer(walletID int) error { - - wallet := mw.WalletWithID(walletID) +func (wallet *Wallet) StopAccountMixer(walletID int) error { if wallet == nil { return errors.New(ErrNotExist) } - if wallet.cancelAccountMixer == nil { + if wallet.CancelAccountMixer == nil { return errors.New(ErrInvalid) } - wallet.cancelAccountMixer() - wallet.cancelAccountMixer = nil + wallet.CancelAccountMixer() + wallet.CancelAccountMixer = nil return nil } @@ -281,7 +277,7 @@ func (wallet *Wallet) accountHasMixableOutput(accountNumber int32) (bool, error) // fetch all utxos in account to extract details for the utxos selected by user // use targetAmount = 0 to fetch ALL utxos in account - inputDetail, err := wallet.Internal().SelectInputs(wallet.shutdownContext(), dcrutil.Amount(0), policy) + inputDetail, err := wallet.Internal().SelectInputs(wallet.ShutdownContext(), dcrutil.Amount(0), policy) if err != nil { return false, nil } @@ -300,7 +296,7 @@ func (wallet *Wallet) accountHasMixableOutput(accountNumber int32) (bool, error) return hasMixableOutput, nil } - lockedOutpoints, err := wallet.Internal().LockedOutpoints(wallet.shutdownContext(), accountName) + lockedOutpoints, err := wallet.Internal().LockedOutpoints(wallet.ShutdownContext(), accountName) if err != nil { return hasMixableOutput, nil } @@ -312,23 +308,23 @@ func (wallet *Wallet) accountHasMixableOutput(accountNumber int32) (bool, error) // IsAccountMixerActive returns true if account mixer is active func (wallet *Wallet) IsAccountMixerActive() bool { - return wallet.cancelAccountMixer != nil + return wallet.CancelAccountMixer != nil } -func (mw *MultiWallet) publishAccountMixerStarted(walletID int) { - mw.notificationListenersMu.RLock() - defer mw.notificationListenersMu.RUnlock() +func (wallet *Wallet) publishAccountMixerStarted(walletID int) { + wallet.notificationListenersMu.RLock() + defer wallet.notificationListenersMu.RUnlock() - for _, accountMixerNotificationListener := range mw.accountMixerNotificationListener { + for _, accountMixerNotificationListener := range wallet.accountMixerNotificationListener { accountMixerNotificationListener.OnAccountMixerStarted(walletID) } } -func (mw *MultiWallet) publishAccountMixerEnded(walletID int) { - mw.notificationListenersMu.RLock() - defer mw.notificationListenersMu.RUnlock() +func (wallet *Wallet) publishAccountMixerEnded(walletID int) { + wallet.notificationListenersMu.RLock() + defer wallet.notificationListenersMu.RUnlock() - for _, accountMixerNotificationListener := range mw.accountMixerNotificationListener { + for _, accountMixerNotificationListener := range wallet.accountMixerNotificationListener { accountMixerNotificationListener.OnAccountMixerEnded(walletID) } } diff --git a/accounts.go b/wallets/dcr/accounts.go similarity index 90% rename from accounts.go rename to wallets/dcr/accounts.go index 4fe3e2624..05ad1d851 100644 --- a/accounts.go +++ b/wallets/dcr/accounts.go @@ -1,4 +1,4 @@ -package dcrlibwallet +package dcr import ( "encoding/json" @@ -31,7 +31,7 @@ func (wallet *Wallet) GetAccounts() (string, error) { } func (wallet *Wallet) GetAccountsRaw() (*Accounts, error) { - resp, err := wallet.Internal().Accounts(wallet.shutdownContext()) + resp, err := wallet.Internal().Accounts(wallet.ShutdownContext()) if err != nil { return nil, err } @@ -105,7 +105,7 @@ func (wallet *Wallet) GetAccount(accountNumber int32) (*Account, error) { } func (wallet *Wallet) GetAccountBalance(accountNumber int32) (*Balance, error) { - balance, err := wallet.Internal().AccountBalance(wallet.shutdownContext(), uint32(accountNumber), wallet.RequiredConfirmations()) + balance, err := wallet.Internal().AccountBalance(wallet.ShutdownContext(), uint32(accountNumber), wallet.RequiredConfirmations()) if err != nil { return nil, err } @@ -122,7 +122,7 @@ func (wallet *Wallet) GetAccountBalance(accountNumber int32) (*Balance, error) { } func (wallet *Wallet) SpendableForAccount(account int32) (int64, error) { - bals, err := wallet.Internal().AccountBalance(wallet.shutdownContext(), uint32(account), wallet.RequiredConfirmations()) + bals, err := wallet.Internal().AccountBalance(wallet.ShutdownContext(), uint32(account), wallet.RequiredConfirmations()) if err != nil { log.Error(err) return 0, translateError(err) @@ -138,7 +138,7 @@ func (wallet *Wallet) UnspentOutputs(account int32) ([]*UnspentOutput, error) { // fetch all utxos in account to extract details for the utxos selected by user // use targetAmount = 0 to fetch ALL utxos in account - inputDetail, err := wallet.Internal().SelectInputs(wallet.shutdownContext(), dcrutil.Amount(0), policy) + inputDetail, err := wallet.Internal().SelectInputs(wallet.ShutdownContext(), dcrutil.Amount(0), policy) if err != nil { return nil, err @@ -147,7 +147,7 @@ func (wallet *Wallet) UnspentOutputs(account int32) ([]*UnspentOutput, error) { unspentOutputs := make([]*UnspentOutput, len(inputDetail.Inputs)) for i, input := range inputDetail.Inputs { - outputInfo, err := wallet.Internal().OutputInfo(wallet.shutdownContext(), &input.PreviousOutPoint) + outputInfo, err := wallet.Internal().OutputInfo(wallet.ShutdownContext(), &input.PreviousOutPoint) if err != nil { return nil, err } @@ -197,7 +197,7 @@ func (wallet *Wallet) NextAccount(accountName string) (int32, error) { return -1, errors.New(ErrWalletLocked) } - ctx := wallet.shutdownContext() + ctx := wallet.ShutdownContext() accountNumber, err := wallet.Internal().NextAccount(ctx, accountName) if err != nil { @@ -208,7 +208,7 @@ func (wallet *Wallet) NextAccount(accountName string) (int32, error) { } func (wallet *Wallet) RenameAccount(accountNumber int32, newName string) error { - err := wallet.Internal().RenameAccount(wallet.shutdownContext(), uint32(accountNumber), newName) + err := wallet.Internal().RenameAccount(wallet.ShutdownContext(), uint32(accountNumber), newName) if err != nil { return translateError(err) } @@ -225,21 +225,21 @@ func (wallet *Wallet) AccountName(accountNumber int32) (string, error) { } func (wallet *Wallet) AccountNameRaw(accountNumber uint32) (string, error) { - return wallet.Internal().AccountName(wallet.shutdownContext(), accountNumber) + return wallet.Internal().AccountName(wallet.ShutdownContext(), accountNumber) } func (wallet *Wallet) AccountNumber(accountName string) (int32, error) { - accountNumber, err := wallet.Internal().AccountNumber(wallet.shutdownContext(), accountName) + accountNumber, err := wallet.Internal().AccountNumber(wallet.ShutdownContext(), accountName) return int32(accountNumber), translateError(err) } func (wallet *Wallet) HasAccount(accountName string) bool { - _, err := wallet.Internal().AccountNumber(wallet.shutdownContext(), accountName) + _, err := wallet.Internal().AccountNumber(wallet.ShutdownContext(), accountName) return err == nil } func (wallet *Wallet) HDPathForAccount(accountNumber int32) (string, error) { - cointype, err := wallet.Internal().CoinType(wallet.shutdownContext()) + cointype, err := wallet.Internal().CoinType(wallet.ShutdownContext()) if err != nil { return "", translateError(err) } diff --git a/address.go b/wallets/dcr/address.go similarity index 86% rename from address.go rename to wallets/dcr/address.go index c42b6cf8b..9a69620c6 100644 --- a/address.go +++ b/wallets/dcr/address.go @@ -1,10 +1,11 @@ -package dcrlibwallet +package dcr import ( "fmt" "decred.org/dcrwallet/v2/errors" w "decred.org/dcrwallet/v2/wallet" + "github.com/decred/dcrd/chaincfg/v3" "github.com/decred/dcrd/txscript/v4/stdaddr" ) @@ -17,8 +18,8 @@ type AddressInfo struct { AccountName string } -func (mw *MultiWallet) IsAddressValid(address string) bool { - _, err := stdaddr.DecodeAddress(address, mw.chainParams) +func (wallet *Wallet) IsAddressValid(address string, chainParams *chaincfg.Params) bool { + _, err := stdaddr.DecodeAddress(address, chainParams) return err == nil } @@ -28,7 +29,7 @@ func (wallet *Wallet) HaveAddress(address string) bool { return false } - have, err := wallet.Internal().HaveAddress(wallet.shutdownContext(), addr) + have, err := wallet.Internal().HaveAddress(wallet.ShutdownContext(), addr) if err != nil { return false } @@ -42,7 +43,7 @@ func (wallet *Wallet) AccountOfAddress(address string) (string, error) { return "", translateError(err) } - a, err := wallet.Internal().KnownAddress(wallet.shutdownContext(), addr) + a, err := wallet.Internal().KnownAddress(wallet.ShutdownContext(), addr) if err != nil { return "", translateError(err) } @@ -60,7 +61,7 @@ func (wallet *Wallet) AddressInfo(address string) (*AddressInfo, error) { Address: address, } - known, _ := wallet.Internal().KnownAddress(wallet.shutdownContext(), addr) + known, _ := wallet.Internal().KnownAddress(wallet.ShutdownContext(), addr) if known != nil { addressInfo.IsMine = true addressInfo.AccountName = known.AccountName() @@ -104,7 +105,7 @@ func (wallet *Wallet) NextAddress(account int32) (string, error) { // the newly incremented index) is returned below by CurrentAddress. // NOTE: This workaround will be unnecessary once this anomaly is corrected // upstream. - _, err := wallet.Internal().NewExternalAddress(wallet.shutdownContext(), uint32(account), w.WithGapPolicyWrap()) + _, err := wallet.Internal().NewExternalAddress(wallet.ShutdownContext(), uint32(account), w.WithGapPolicyWrap()) if err != nil { log.Errorf("NewExternalAddress error: %w", err) return "", err @@ -119,7 +120,7 @@ func (wallet *Wallet) AddressPubKey(address string) (string, error) { return "", err } - known, err := wallet.Internal().KnownAddress(wallet.shutdownContext(), addr) + known, err := wallet.Internal().KnownAddress(wallet.ShutdownContext(), addr) if err != nil { return "", err } diff --git a/consensus.go b/wallets/dcr/consensus.go similarity index 98% rename from consensus.go rename to wallets/dcr/consensus.go index baae75db5..9a353614b 100644 --- a/consensus.go +++ b/wallets/dcr/consensus.go @@ -1,4 +1,4 @@ -package dcrlibwallet +package dcr import ( "fmt" @@ -101,7 +101,7 @@ func (wallet *Wallet) SetVoteChoice(agendaID, choiceID, hash string, passphrase } defer wallet.LockWallet() - ctx := wallet.shutdownContext() + ctx := wallet.ShutdownContext() // get choices choices, _, err := wallet.Internal().AgendaChoices(ctx, ticketHash) // returns saved prefs for current agendas @@ -208,7 +208,7 @@ func (wallet *Wallet) AllVoteAgendas(hash string, newestFirst bool) ([]*Agenda, ticketHash = hash } - ctx := wallet.shutdownContext() + ctx := wallet.ShutdownContext() choices, _, err := wallet.Internal().AgendaChoices(ctx, ticketHash) // returns saved prefs for current agendas if err != nil { return nil, err diff --git a/decodetx.go b/wallets/dcr/decodetx.go similarity index 99% rename from decodetx.go rename to wallets/dcr/decodetx.go index effea74c9..20b4b60c3 100644 --- a/decodetx.go +++ b/wallets/dcr/decodetx.go @@ -1,4 +1,4 @@ -package dcrlibwallet +package dcr import ( "fmt" diff --git a/wallets/dcr/errors.go b/wallets/dcr/errors.go new file mode 100644 index 000000000..7bec27cfb --- /dev/null +++ b/wallets/dcr/errors.go @@ -0,0 +1,61 @@ +package dcr + +import ( + "decred.org/dcrwallet/v2/errors" + "github.com/asdine/storm" +) + +const ( + // Error Codes + ErrInsufficientBalance = "insufficient_balance" + ErrInvalid = "invalid" + ErrWalletLocked = "wallet_locked" + ErrWalletDatabaseInUse = "wallet_db_in_use" + ErrWalletNotLoaded = "wallet_not_loaded" + ErrWalletNotFound = "wallet_not_found" + ErrWalletNameExist = "wallet_name_exists" + ErrReservedWalletName = "wallet_name_reserved" + ErrWalletIsRestored = "wallet_is_restored" + ErrWalletIsWatchOnly = "watch_only_wallet" + ErrUnusableSeed = "unusable_seed" + ErrPassphraseRequired = "passphrase_required" + ErrInvalidPassphrase = "invalid_passphrase" + ErrNotConnected = "not_connected" + ErrExist = "exists" + ErrNotExist = "not_exists" + ErrEmptySeed = "empty_seed" + ErrInvalidAddress = "invalid_address" + ErrInvalidAuth = "invalid_auth" + ErrUnavailable = "unavailable" + ErrContextCanceled = "context_canceled" + ErrFailedPrecondition = "failed_precondition" + ErrSyncAlreadyInProgress = "sync_already_in_progress" + ErrNoPeers = "no_peers" + ErrInvalidPeers = "invalid_peers" + ErrListenerAlreadyExist = "listener_already_exist" + ErrLoggerAlreadyRegistered = "logger_already_registered" + ErrLogRotatorAlreadyInitialized = "log_rotator_already_initialized" + ErrAddressDiscoveryNotDone = "address_discovery_not_done" + ErrChangingPassphrase = "err_changing_passphrase" + ErrSavingWallet = "err_saving_wallet" + ErrIndexOutOfRange = "err_index_out_of_range" + ErrNoMixableOutput = "err_no_mixable_output" + ErrInvalidVoteBit = "err_invalid_vote_bit" +) + +// todo, should update this method to translate more error kinds. +func translateError(err error) error { + if err, ok := err.(*errors.Error); ok { + switch err.Kind { + case errors.InsufficientBalance: + return errors.New(ErrInsufficientBalance) + case errors.NotExist, storm.ErrNotFound: + return errors.New(ErrNotExist) + case errors.Passphrase: + return errors.New(ErrInvalidPassphrase) + case errors.NoPeers: + return errors.New(ErrNoPeers) + } + } + return err +} diff --git a/wallets/dcr/log.go b/wallets/dcr/log.go new file mode 100644 index 000000000..5b13d7839 --- /dev/null +++ b/wallets/dcr/log.go @@ -0,0 +1,176 @@ +// Copyright (c) 2013-2017 The btcsuite developers +// Copyright (c) 2015-2018 The Decred developers +// Use of this source code is governed by an ISC +// license that can be found in the LICENSE file. + +package dcr + +import ( + "os" + + "decred.org/dcrwallet/v2/errors" + "decred.org/dcrwallet/v2/p2p" + "decred.org/dcrwallet/v2/ticketbuyer" + "decred.org/dcrwallet/v2/wallet" + "decred.org/dcrwallet/v2/wallet/udb" + "github.com/decred/dcrd/addrmgr/v2" + "github.com/decred/dcrd/connmgr/v3" + "github.com/decred/slog" + "github.com/jrick/logrotate/rotator" + "github.com/planetdecred/dcrlibwallet/internal/loader" + "github.com/planetdecred/dcrlibwallet/internal/vsp" + "github.com/planetdecred/dcrlibwallet/spv" +) + +// logWriter implements an io.Writer that outputs to both standard output and +// the write-end pipe of an initialized log rotator. +type logWriter struct{} + +func (logWriter) Write(p []byte) (n int, err error) { + os.Stdout.Write(p) + logRotator.Write(p) + return len(p), nil +} + +// Loggers per subsystem. A single backend logger is created and all subsytem +// loggers created from it will write to the backend. When adding new +// subsystems, add the subsystem logger variable here and to the +// subsystemLoggers map. +// +// Loggers can not be used before the log rotator has been initialized with a +// log file. This must be performed early during application startup by calling +// initLogRotator. +var ( + // backendLog is the logging backend used to create all subsystem loggers. + // The backend must not be used before the log rotator has been initialized, + // or data races and/or nil pointer dereferences will occur. + backendLog = slog.NewBackend(logWriter{}) + + // logRotator is one of the logging outputs. It should be closed on + // application shutdown. + logRotator *rotator.Rotator + + log = backendLog.Logger("DLWL") + loaderLog = backendLog.Logger("LODR") + walletLog = backendLog.Logger("WLLT") + tkbyLog = backendLog.Logger("TKBY") + syncLog = backendLog.Logger("SYNC") + grpcLog = backendLog.Logger("GRPC") + legacyRPCLog = backendLog.Logger("RPCS") + cmgrLog = backendLog.Logger("CMGR") + amgrLog = backendLog.Logger("AMGR") + vspcLog = backendLog.Logger("VSPC") +) + +// Initialize package-global logger variables. +func init() { + loader.UseLogger(loaderLog) + wallet.UseLogger(walletLog) + udb.UseLogger(walletLog) + ticketbuyer.UseLogger(tkbyLog) + spv.UseLogger(syncLog) + p2p.UseLogger(syncLog) + connmgr.UseLogger(cmgrLog) + addrmgr.UseLogger(amgrLog) + vsp.UseLogger(vspcLog) +} + +// subsystemLoggers maps each subsystem identifier to its associated logger. +var subsystemLoggers = map[string]slog.Logger{ + "DLWL": log, + "LODR": loaderLog, + "WLLT": walletLog, + "TKBY": tkbyLog, + "SYNC": syncLog, + "GRPC": grpcLog, + "RPCS": legacyRPCLog, + "CMGR": cmgrLog, + "AMGR": amgrLog, + "VSPC": vspcLog, +} + +// initLogRotator initializes the logging rotater to write logs to logFile and +// create roll files in the same directory. It must be called before the +// package-global log rotater variables are used. +func initLogRotator(logFile string) error { + r, err := rotator.New(logFile, 10*1024, false, 3) + if err != nil { + return errors.Errorf("failed to create file rotator: %v", err) + } + + logRotator = r + return nil +} + +// UseLoggers sets the subsystem logs to use the provided loggers. +func UseLoggers(main, loaderLog, walletLog, tkbyLog, + syncLog, cmgrLog, amgrLog slog.Logger) { + log = main + loader.UseLogger(loaderLog) + wallet.UseLogger(walletLog) + udb.UseLogger(walletLog) + ticketbuyer.UseLogger(tkbyLog) + spv.UseLogger(syncLog) + p2p.UseLogger(syncLog) + connmgr.UseLogger(cmgrLog) + addrmgr.UseLogger(amgrLog) +} + +// UseLogger sets the subsystem logs to use the provided logger. +func UseLogger(logger slog.Logger) { + UseLoggers(logger, logger, logger, logger, logger, logger, logger) +} + +// RegisterLogger should be called before logRotator is initialized. +func RegisterLogger(tag string) (slog.Logger, error) { + if logRotator != nil { + return nil, errors.E(ErrLogRotatorAlreadyInitialized) + } + + if _, exists := subsystemLoggers[tag]; exists { + return nil, errors.E(ErrLoggerAlreadyRegistered) + } + + logger := backendLog.Logger(tag) + subsystemLoggers[tag] = logger + + return logger, nil +} + +func SetLogLevels(logLevel string) { + _, ok := slog.LevelFromString(logLevel) + if !ok { + return + } + + // Configure all sub-systems with the new logging level. Dynamically + // create loggers as needed. + for subsystemID := range subsystemLoggers { + setLogLevel(subsystemID, logLevel) + } +} + +// setLogLevel sets the logging level for provided subsystem. Invalid +// subsystems are ignored. Uninitialized subsystems are dynamically created as +// needed. +func setLogLevel(subsystemID string, logLevel string) { + // Ignore invalid subsystems. + logger, ok := subsystemLoggers[subsystemID] + if !ok { + return + } + + // Defaults to info if the log level is invalid. + level, _ := slog.LevelFromString(logLevel) + logger.SetLevel(level) +} + +// Log writes a message to the log using LevelInfo. +func Log(m string) { + log.Info(m) +} + +// LogT writes a tagged message to the log using LevelInfo. +func LogT(tag, m string) { + log.Infof("%s: %s", tag, m) +} diff --git a/message.go b/wallets/dcr/message.go similarity index 73% rename from message.go rename to wallets/dcr/message.go index 989a1c045..5d21da557 100644 --- a/message.go +++ b/wallets/dcr/message.go @@ -1,9 +1,11 @@ -package dcrlibwallet +package dcr import ( "decred.org/dcrwallet/v2/errors" w "decred.org/dcrwallet/v2/wallet" "github.com/decred/dcrd/txscript/v4/stdaddr" + "github.com/decred/dcrd/chaincfg/v3" + ) func (wallet *Wallet) SignMessage(passphrase []byte, address string, message string) ([]byte, error) { @@ -13,10 +15,10 @@ func (wallet *Wallet) SignMessage(passphrase []byte, address string, message str } defer wallet.LockWallet() - return wallet.signMessage(address, message) + return wallet.SignMessageDirect(address, message) } -func (wallet *Wallet) signMessage(address string, message string) ([]byte, error) { +func (wallet *Wallet) SignMessageDirect(address string, message string) ([]byte, error) { addr, err := stdaddr.DecodeAddress(address, wallet.chainParams) if err != nil { return nil, translateError(err) @@ -31,7 +33,7 @@ func (wallet *Wallet) signMessage(address string, message string) ([]byte, error return nil, errors.New(ErrInvalidAddress) } - sig, err := wallet.Internal().SignMessage(wallet.shutdownContext(), message, addr) + sig, err := wallet.Internal().SignMessage(wallet.ShutdownContext(), message, addr) if err != nil { return nil, translateError(err) } @@ -39,10 +41,10 @@ func (wallet *Wallet) signMessage(address string, message string) ([]byte, error return sig, nil } -func (mw *MultiWallet) VerifyMessage(address string, message string, signatureBase64 string) (bool, error) { +func (wallet *Wallet) VerifyMessage(address string, message string, signatureBase64 string, chainParams *chaincfg.Params) (bool, error) { var valid bool - addr, err := stdaddr.DecodeAddress(address, mw.chainParams) + addr, err := stdaddr.DecodeAddress(address, chainParams) if err != nil { return false, translateError(err) } @@ -61,7 +63,7 @@ func (mw *MultiWallet) VerifyMessage(address string, message string, signatureBa return false, errors.New(ErrInvalidAddress) } - valid, err = w.VerifyMessage(message, addr, signature, mw.chainParams) + valid, err = w.VerifyMessage(message, addr, signature, chainParams) if err != nil { return false, translateError(err) } diff --git a/wallets/dcr/multiwallet_config.go b/wallets/dcr/multiwallet_config.go new file mode 100644 index 000000000..b4b33a166 --- /dev/null +++ b/wallets/dcr/multiwallet_config.go @@ -0,0 +1,152 @@ +package dcr + +import ( + // "github.com/asdine/storm" +) + +const ( + userConfigBucketName = "user_config" + + LogLevelConfigKey = "log_level" + + SpendUnconfirmedConfigKey = "spend_unconfirmed" + CurrencyConversionConfigKey = "currency_conversion_option" + + IsStartupSecuritySetConfigKey = "startup_security_set" + StartupSecurityTypeConfigKey = "startup_security_type" + UseBiometricConfigKey = "use_biometric" + + IncomingTxNotificationsConfigKey = "tx_notification_enabled" + BeepNewBlocksConfigKey = "beep_new_blocks" + + SyncOnCellularConfigKey = "always_sync" + NetworkModeConfigKey = "network_mode" + SpvPersistentPeerAddressesConfigKey = "spv_peer_addresses" + UserAgentConfigKey = "user_agent" + + PoliteiaNotificationConfigKey = "politeia_notification" + + LastTxHashConfigKey = "last_tx_hash" + + KnownVSPsConfigKey = "known_vsps" + + TicketBuyerVSPHostConfigKey = "tb_vsp_host" + TicketBuyerWalletConfigKey = "tb_wallet_id" + TicketBuyerAccountConfigKey = "tb_account_number" + TicketBuyerATMConfigKey = "tb_amount_to_maintain" + + PassphraseTypePin int32 = 0 + PassphraseTypePass int32 = 1 +) + +type configSaveFn = func(key string, value interface{}) error +type configReadFn = func(multiwallet bool, key string, valueOut interface{}) error + +// func (mw *MultiWallet) walletConfigSetFn(walletID int) configSaveFn { +// return func(key string, value interface{}) error { +// walletUniqueKey := WalletUniqueConfigKey(walletID, key) +// return mw.db.Set(userConfigBucketName, walletUniqueKey, value) +// } +// } + +// func (mw *MultiWallet) walletConfigReadFn(walletID int) configReadFn { +// return func(multiwallet bool, key string, valueOut interface{}) error { +// if !multiwallet { +// key = WalletUniqueConfigKey(walletID, key) +// } +// return mw.db.Get(userConfigBucketName, key, valueOut) +// } +// } + +// func (mw *MultiWallet) SaveUserConfigValue(key string, value interface{}) { +// err := mw.db.Set(userConfigBucketName, key, value) +// if err != nil { +// log.Errorf("error setting config value for key: %s, error: %v", key, err) +// } +// } + +// func (mw *MultiWallet) ReadUserConfigValue(key string, valueOut interface{}) error { +// err := mw.db.Get(userConfigBucketName, key, valueOut) +// if err != nil && err != storm.ErrNotFound { +// log.Errorf("error reading config value for key: %s, error: %v", key, err) +// } +// return err +// } + +// func (mw *MultiWallet) DeleteUserConfigValueForKey(key string) { +// err := mw.db.Delete(userConfigBucketName, key) +// if err != nil { +// log.Errorf("error deleting config value for key: %s, error: %v", key, err) +// } +// } + +// func (mw *MultiWallet) ClearConfig() { +// err := mw.db.Drop(userConfigBucketName) +// if err != nil { +// log.Errorf("error deleting config bucket: %v", err) +// } +// } + +// func (mw *MultiWallet) SetBoolConfigValueForKey(key string, value bool) { +// mw.SaveUserConfigValue(key, value) +// } + +// func (mw *MultiWallet) SetDoubleConfigValueForKey(key string, value float64) { +// mw.SaveUserConfigValue(key, value) +// } + +// func (mw *MultiWallet) SetIntConfigValueForKey(key string, value int) { +// mw.SaveUserConfigValue(key, value) +// } + +// func (mw *MultiWallet) SetInt32ConfigValueForKey(key string, value int32) { +// mw.SaveUserConfigValue(key, value) +// } + +// func (mw *MultiWallet) SetLongConfigValueForKey(key string, value int64) { +// mw.SaveUserConfigValue(key, value) +// } + +// func (mw *MultiWallet) SetStringConfigValueForKey(key, value string) { +// mw.SaveUserConfigValue(key, value) +// } + +// func (mw *MultiWallet) ReadBoolConfigValueForKey(key string, defaultValue bool) (valueOut bool) { +// if err := mw.ReadUserConfigValue(key, &valueOut); err == storm.ErrNotFound { +// valueOut = defaultValue +// } +// return +// } + +// func (mw *MultiWallet) ReadDoubleConfigValueForKey(key string, defaultValue float64) (valueOut float64) { +// if err := mw.ReadUserConfigValue(key, &valueOut); err == storm.ErrNotFound { +// valueOut = defaultValue +// } +// return +// } + +// func (mw *MultiWallet) ReadIntConfigValueForKey(key string, defaultValue int) (valueOut int) { +// if err := mw.ReadUserConfigValue(key, &valueOut); err == storm.ErrNotFound { +// valueOut = defaultValue +// } +// return +// } + +// func (mw *MultiWallet) ReadInt32ConfigValueForKey(key string, defaultValue int32) (valueOut int32) { +// if err := mw.ReadUserConfigValue(key, &valueOut); err == storm.ErrNotFound { +// valueOut = defaultValue +// } +// return +// } + +// func (mw *MultiWallet) ReadLongConfigValueForKey(key string, defaultValue int64) (valueOut int64) { +// if err := mw.ReadUserConfigValue(key, &valueOut); err == storm.ErrNotFound { +// valueOut = defaultValue +// } +// return +// } + +// func (mw *MultiWallet) ReadStringConfigValueForKey(key string) (valueOut string) { +// mw.ReadUserConfigValue(key, &valueOut) +// return +// } diff --git a/wallets/dcr/sync.go b/wallets/dcr/sync.go new file mode 100644 index 000000000..fa299fe8b --- /dev/null +++ b/wallets/dcr/sync.go @@ -0,0 +1,489 @@ +package dcr + +import ( + "context" + "encoding/json" + "fmt" + "net" + "sort" + "strings" + "sync" + + "decred.org/dcrwallet/v2/errors" + "decred.org/dcrwallet/v2/p2p" + w "decred.org/dcrwallet/v2/wallet" + "github.com/decred/dcrd/addrmgr/v2" + "github.com/planetdecred/dcrlibwallet/spv" +) + +// reading/writing of properties of this struct are protected by mutex.x +type SyncData struct { + mu sync.RWMutex + + SyncProgressListeners map[string]SyncProgressListener + showLogs bool + + synced bool + syncing bool + cancelSync context.CancelFunc + cancelRescan context.CancelFunc + syncCanceled chan struct{} + + // Flag to notify syncCanceled callback if the sync was canceled so as to be restarted. + restartSyncRequested bool + + rescanning bool + connectedPeers int32 + + *activeSyncData +} + +// reading/writing of properties of this struct are protected by syncData.mu. +type activeSyncData struct { + syncer *spv.Syncer + + syncStage int32 + + cfiltersFetchProgress CFiltersFetchProgressReport + headersFetchProgress HeadersFetchProgressReport + addressDiscoveryProgress AddressDiscoveryProgressReport + headersRescanProgress HeadersRescanProgressReport + + addressDiscoveryCompletedOrCanceled chan bool + + rescanStartTime int64 + + totalInactiveSeconds int64 +} + +const ( + InvalidSyncStage = -1 + CFiltersFetchSyncStage = 0 + HeadersFetchSyncStage = 1 + AddressDiscoverySyncStage = 2 + HeadersRescanSyncStage = 3 +) + +func (wallet *Wallet) initActiveSyncData() { + + cfiltersFetchProgress := CFiltersFetchProgressReport{ + GeneralSyncProgress: &GeneralSyncProgress{}, + beginFetchCFiltersTimeStamp: 0, + startCFiltersHeight: -1, + cfiltersFetchTimeSpent: 0, + totalFetchedCFiltersCount: 0, + } + + headersFetchProgress := HeadersFetchProgressReport{ + GeneralSyncProgress: &GeneralSyncProgress{}, + beginFetchTimeStamp: -1, + headersFetchTimeSpent: -1, + totalFetchedHeadersCount: 0, + } + + addressDiscoveryProgress := AddressDiscoveryProgressReport{ + GeneralSyncProgress: &GeneralSyncProgress{}, + addressDiscoveryStartTime: -1, + totalDiscoveryTimeSpent: -1, + } + + headersRescanProgress := HeadersRescanProgressReport{} + headersRescanProgress.GeneralSyncProgress = &GeneralSyncProgress{} + + wallet.syncData.mu.Lock() + wallet.syncData.activeSyncData = &activeSyncData{ + syncStage: InvalidSyncStage, + + cfiltersFetchProgress: cfiltersFetchProgress, + headersFetchProgress: headersFetchProgress, + addressDiscoveryProgress: addressDiscoveryProgress, + headersRescanProgress: headersRescanProgress, + } + wallet.syncData.mu.Unlock() +} + +func (wallet *Wallet) IsSyncProgressListenerRegisteredFor(uniqueIdentifier string) bool { + wallet.syncData.mu.RLock() + _, exists := wallet.syncData.SyncProgressListeners[uniqueIdentifier] + wallet.syncData.mu.RUnlock() + return exists +} + +func (wallet *Wallet) AddSyncProgressListener(syncProgressListener SyncProgressListener, uniqueIdentifier string) error { + if wallet.IsSyncProgressListenerRegisteredFor(uniqueIdentifier) { + return errors.New(ErrListenerAlreadyExist) + } + + wallet.syncData.mu.Lock() + wallet.syncData.SyncProgressListeners[uniqueIdentifier] = syncProgressListener + wallet.syncData.mu.Unlock() + + // If sync is already on, notify this newly added listener of the current progress report. + return wallet.PublishLastSyncProgress(uniqueIdentifier) +} + +func (wallet *Wallet) RemoveSyncProgressListener(uniqueIdentifier string) { + wallet.syncData.mu.Lock() + delete(wallet.syncData.SyncProgressListeners, uniqueIdentifier) + wallet.syncData.mu.Unlock() +} + +func (wallet *Wallet) syncProgressListeners() []SyncProgressListener { + wallet.syncData.mu.RLock() + defer wallet.syncData.mu.RUnlock() + + listeners := make([]SyncProgressListener, 0, len(wallet.syncData.SyncProgressListeners)) + for _, listener := range wallet.syncData.SyncProgressListeners { + listeners = append(listeners, listener) + } + + return listeners +} + +func (wallet *Wallet) PublishLastSyncProgress(uniqueIdentifier string) error { + wallet.syncData.mu.RLock() + defer wallet.syncData.mu.RUnlock() + + syncProgressListener, exists := wallet.syncData.SyncProgressListeners[uniqueIdentifier] + if !exists { + return errors.New(ErrInvalid) + } + + if wallet.syncData.syncing && wallet.syncData.activeSyncData != nil { + switch wallet.syncData.activeSyncData.syncStage { + case HeadersFetchSyncStage: + syncProgressListener.OnHeadersFetchProgress(&wallet.syncData.headersFetchProgress) + case AddressDiscoverySyncStage: + syncProgressListener.OnAddressDiscoveryProgress(&wallet.syncData.addressDiscoveryProgress) + case HeadersRescanSyncStage: + syncProgressListener.OnHeadersRescanProgress(&wallet.syncData.headersRescanProgress) + } + } + + return nil +} + +func (wallet *Wallet) EnableSyncLogs() { + wallet.syncData.mu.Lock() + wallet.syncData.showLogs = true + wallet.syncData.mu.Unlock() +} + +func (wallet *Wallet) SyncInactiveForPeriod(totalInactiveSeconds int64) { + wallet.syncData.mu.Lock() + defer wallet.syncData.mu.Unlock() + + if !wallet.syncData.syncing || wallet.syncData.activeSyncData == nil { + log.Debug("Not accounting for inactive time, wallet is not syncing.") + return + } + + wallet.syncData.totalInactiveSeconds += totalInactiveSeconds + if wallet.syncData.connectedPeers == 0 { + // assume it would take another 60 seconds to reconnect to peers + wallet.syncData.totalInactiveSeconds += 60 + } +} + +func (wallet *Wallet) SpvSync() error { + // prevent an attempt to sync when the previous syncing has not been canceled + if wallet.IsSyncing() || wallet.IsSynced() { + return errors.New(ErrSyncAlreadyInProgress) + } + + addr := &net.TCPAddr{IP: net.ParseIP("::1"), Port: 0} + addrManager := addrmgr.New(wallet.rootDir, net.LookupIP) // TODO: be mindful of tor + lp := p2p.NewLocalPeer(wallet.chainParams, addr, addrManager) + + var validPeerAddresses []string + peerAddresses := wallet.ReadStringConfigValueForKey(SpvPersistentPeerAddressesConfigKey, "") + if peerAddresses != "" { + addresses := strings.Split(peerAddresses, ";") + for _, address := range addresses { + peerAddress, err := NormalizeAddress(address, wallet.chainParams.DefaultPort) + if err != nil { + log.Errorf("SPV peer address(%s) is invalid: %v", peerAddress, err) + } else { + validPeerAddresses = append(validPeerAddresses, peerAddress) + } + } + + if len(validPeerAddresses) == 0 { + return errors.New(ErrInvalidPeers) + } + } + + // init activeSyncData to be used to hold data used + // to calculate sync estimates only during sync + wallet.initActiveSyncData() + + wallets := make(map[int]*w.Wallet) + wallets[wallet.ID] = wallet.Internal() + wallet.WaitingForHeaders = true + wallet.Syncing = true + + syncer := spv.NewSyncer(wallets, lp) + syncer.SetNotifications(wallet.spvSyncNotificationCallbacks()) + if len(validPeerAddresses) > 0 { + syncer.SetPersistentPeers(validPeerAddresses) + } + + ctx, cancel := wallet.contextWithShutdownCancel() + + var restartSyncRequested bool + + wallet.syncData.mu.Lock() + restartSyncRequested = wallet.syncData.restartSyncRequested + wallet.syncData.restartSyncRequested = false + wallet.syncData.syncing = true + wallet.syncData.cancelSync = cancel + wallet.syncData.syncCanceled = make(chan struct{}) + wallet.syncData.syncer = syncer + wallet.syncData.mu.Unlock() + + for _, listener := range wallet.syncProgressListeners() { + listener.OnSyncStarted(restartSyncRequested) + } + + // syncer.Run uses a wait group to block the thread until the sync context + // expires or is canceled or some other error occurs such as + // losing connection to all persistent peers. + go func() { + syncError := syncer.Run(ctx) + //sync has ended or errored + if syncError != nil { + if syncError == context.DeadlineExceeded { + wallet.notifySyncError(errors.Errorf("SPV synchronization deadline exceeded: %v", syncError)) + } else if syncError == context.Canceled { + close(wallet.syncData.syncCanceled) + wallet.notifySyncCanceled() + } else { + wallet.notifySyncError(syncError) + } + } + + //reset sync variables + wallet.resetSyncData() + }() + return nil +} + +func (wallet *Wallet) RestartSpvSync() error { + wallet.syncData.mu.Lock() + wallet.syncData.restartSyncRequested = true + wallet.syncData.mu.Unlock() + + wallet.CancelSync() // necessary to unset the network backend. + return wallet.SpvSync() +} + +func (wallet *Wallet) CancelSync() { + wallet.syncData.mu.RLock() + cancelSync := wallet.syncData.cancelSync + wallet.syncData.mu.RUnlock() + + if cancelSync != nil { + log.Info("Canceling sync. May take a while for sync to fully cancel.") + + // Stop running cspp mixers + if wallet.IsAccountMixerActive() { + log.Infof("[%d] Stopping cspp mixer", wallet.ID) + err := wallet.StopAccountMixer(wallet.ID) + if err != nil { + log.Errorf("[%d] Error stopping cspp mixer: %v", wallet.ID, err) + } + } + + // Cancel the context used for syncer.Run in spvSync(). + // This may not immediately cause the sync process to terminate, + // but when it eventually terminates, syncer.Run will return `err == context.Canceled`. + cancelSync() + + // When sync terminates and syncer.Run returns `err == context.Canceled`, + // we will get notified on this channel. + <-wallet.syncData.syncCanceled + + log.Info("Sync fully canceled.") + } +} + +func (wallet *Wallet) IsWaiting() bool { + return wallet.WaitingForHeaders +} + +func (wallet *Wallet) IsSynced() bool { + return wallet.Synced +} + +func (wallet *Wallet) IsSyncing() bool { + return wallet.Syncing +} + +func (wallet *Wallet) IsConnectedToDecredNetwork() bool { + wallet.syncData.mu.RLock() + defer wallet.syncData.mu.RUnlock() + return wallet.syncData.syncing || wallet.syncData.synced +} + +// func (wallet *Wallet) IsSynced() bool { +// wallet.syncData.mu.RLock() +// defer wallet.syncData.mu.RUnlock() +// return wallet.syncData.synced +// } + +// func (wallet *Wallet) IsSyncing() bool { +// wallet.syncData.mu.RLock() +// defer wallet.syncData.mu.RUnlock() +// return wallet.syncData.syncing +// } + +func (wallet *Wallet) CurrentSyncStage() int32 { + wallet.syncData.mu.RLock() + defer wallet.syncData.mu.RUnlock() + + if wallet.syncData != nil && wallet.syncData.syncing { + return wallet.syncData.syncStage + } + return InvalidSyncStage +} + +func (wallet *Wallet) GeneralSyncProgress() *GeneralSyncProgress { + wallet.syncData.mu.RLock() + defer wallet.syncData.mu.RUnlock() + + if wallet.syncData != nil && wallet.syncData.syncing { + switch wallet.syncData.syncStage { + case HeadersFetchSyncStage: + return wallet.syncData.headersFetchProgress.GeneralSyncProgress + case AddressDiscoverySyncStage: + return wallet.syncData.addressDiscoveryProgress.GeneralSyncProgress + case HeadersRescanSyncStage: + return wallet.syncData.headersRescanProgress.GeneralSyncProgress + case CFiltersFetchSyncStage: + return wallet.syncData.cfiltersFetchProgress.GeneralSyncProgress + } + } + + return nil +} + +func (wallet *Wallet) ConnectedPeers() int32 { + wallet.syncData.mu.RLock() + defer wallet.syncData.mu.RUnlock() + return wallet.syncData.connectedPeers +} + +func (wallet *Wallet) PeerInfoRaw() ([]PeerInfo, error) { + if !wallet.IsConnectedToDecredNetwork() { + return nil, errors.New(ErrNotConnected) + } + + syncer := wallet.syncData.syncer + + infos := make([]PeerInfo, 0, len(syncer.GetRemotePeers())) + for _, rp := range syncer.GetRemotePeers() { + info := PeerInfo{ + ID: int32(rp.ID()), + Addr: rp.RemoteAddr().String(), + AddrLocal: rp.LocalAddr().String(), + Services: fmt.Sprintf("%08d", uint64(rp.Services())), + Version: rp.Pver(), + SubVer: rp.UA(), + StartingHeight: int64(rp.InitialHeight()), + BanScore: int32(rp.BanScore()), + } + + infos = append(infos, info) + } + + sort.Slice(infos, func(i, j int) bool { + return infos[i].ID < infos[j].ID + }) + + return infos, nil +} + +func (wallet *Wallet) PeerInfo() (string, error) { + infos, err := wallet.PeerInfoRaw() + if err != nil { + return "", err + } + + result, _ := json.Marshal(infos) + return string(result), nil +} + +// func (wallet *Wallet) GetBestBlock() *BlockInfo { +// var bestBlock int32 = -1 +// var blockInfo *BlockInfo +// for _, wallet := range wallet.wallets { +// if !wallet.WalletOpened() { +// continue +// } + +// walletBestBLock := wallet.GetBestBlock() +// if walletBestBLock > bestBlock || bestBlock == -1 { +// bestBlock = walletBestBLock +// blockInfo = &BlockInfo{Height: bestBlock, Timestamp: wallet.GetBestBlockTimeStamp()} +// } +// } + +// return blockInfo +// } + +func (wallet *Wallet) GetLowestBlock() *BlockInfo { + var lowestBlock int32 = -1 + var blockInfo *BlockInfo + // for _, wallet := range wallet.wallets { + // if !wallet.WalletOpened() { + // continue + // } + walletBestBLock := wallet.GetBestBlock() + if walletBestBLock < lowestBlock || lowestBlock == -1 { + lowestBlock = walletBestBLock + blockInfo = &BlockInfo{Height: lowestBlock, Timestamp: wallet.GetBestBlockTimeStamp()} + } + // } + + return blockInfo +} + +func (wallet *Wallet) GetBestBlock() int32 { + if wallet.Internal() == nil { + // This method is sometimes called after a wallet is deleted and causes crash. + log.Error("Attempting to read best block height without a loaded wallet.") + return 0 + } + + _, height := wallet.Internal().MainChainTip(wallet.ShutdownContext()) + return height +} + +func (wallet *Wallet) GetBestBlockTimeStamp() int64 { + if wallet.Internal() == nil { + // This method is sometimes called after a wallet is deleted and causes crash. + log.Error("Attempting to read best block timestamp without a loaded wallet.") + return 0 + } + + ctx := wallet.ShutdownContext() + _, height := wallet.Internal().MainChainTip(ctx) + identifier := w.NewBlockIdentifierFromHeight(height) + info, err := wallet.Internal().BlockInfo(ctx, identifier) + if err != nil { + log.Error(err) + return 0 + } + return info.Timestamp +} + +// func (wallet *Wallet) GetLowestBlockTimestamp() int64 { +// var timestamp int64 = -1 +// for _, wallet := range wallet.wallets { +// bestBlockTimestamp := wallet.GetBestBlockTimeStamp() +// if bestBlockTimestamp < timestamp || timestamp == -1 { +// timestamp = bestBlockTimestamp +// } +// } +// return timestamp +// } diff --git a/wallets/dcr/syncnotification.go b/wallets/dcr/syncnotification.go new file mode 100644 index 000000000..877741d8f --- /dev/null +++ b/wallets/dcr/syncnotification.go @@ -0,0 +1,682 @@ +package dcr + +import ( + "math" + "time" + + "github.com/planetdecred/dcrlibwallet/spv" + // "golang.org/x/sync/errgroup" +) + +func (w *Wallet) spvSyncNotificationCallbacks() *spv.Notifications { + return &spv.Notifications{ + PeerConnected: func(peerCount int32, addr string) { + w.handlePeerCountUpdate(peerCount) + }, + PeerDisconnected: func(peerCount int32, addr string) { + w.handlePeerCountUpdate(peerCount) + }, + Synced: w.synced, + FetchHeadersStarted: w.fetchHeadersStarted, + FetchHeadersProgress: w.fetchHeadersProgress, + FetchHeadersFinished: w.fetchHeadersFinished, + FetchMissingCFiltersStarted: w.fetchCFiltersStarted, + FetchMissingCFiltersProgress: w.fetchCFiltersProgress, + FetchMissingCFiltersFinished: w.fetchCFiltersEnded, + DiscoverAddressesStarted: w.discoverAddressesStarted, + DiscoverAddressesFinished: w.discoverAddressesFinished, + RescanStarted: w.rescanStarted, + RescanProgress: w.rescanProgress, + RescanFinished: w.rescanFinished, + } +} + +func (w *Wallet) handlePeerCountUpdate(peerCount int32) { + w.syncData.mu.Lock() + w.syncData.connectedPeers = peerCount + shouldLog := w.syncData.showLogs && w.syncData.syncing + w.syncData.mu.Unlock() + + for _, syncProgressListener := range w.syncProgressListeners() { + syncProgressListener.OnPeerConnectedOrDisconnected(peerCount) + } + + if shouldLog { + if peerCount == 1 { + log.Infof("Connected to %d peer on %s.", peerCount, w.chainParams.Name) + } else { + log.Infof("Connected to %d peers on %s.", peerCount, w.chainParams.Name) + } + } +} + +// Fetch CFilters Callbacks + +func (w *Wallet) fetchCFiltersStarted(walletID int) { + w.syncData.mu.Lock() + w.syncData.activeSyncData.syncStage = CFiltersFetchSyncStage + w.syncData.activeSyncData.cfiltersFetchProgress.beginFetchCFiltersTimeStamp = time.Now().Unix() + w.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount = 0 + showLogs := w.syncData.showLogs + w.syncData.mu.Unlock() + + if showLogs { + log.Infof("Step 1 of 3 - fetching %d block headers.") + } +} + +func (w *Wallet) fetchCFiltersProgress(walletID int, startCFiltersHeight, endCFiltersHeight int32) { + + // lock the mutex before reading and writing to w.syncData.* + w.syncData.mu.Lock() + + if w.syncData.activeSyncData.cfiltersFetchProgress.startCFiltersHeight == -1 { + w.syncData.activeSyncData.cfiltersFetchProgress.startCFiltersHeight = startCFiltersHeight + } + + // wallet := w.WalletWithID(walletID) + w.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount += endCFiltersHeight - startCFiltersHeight + + totalCFiltersToFetch := w.GetBestBlock() - w.syncData.activeSyncData.cfiltersFetchProgress.startCFiltersHeight + // cfiltersLeftToFetch := totalCFiltersToFetch - w.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount + + cfiltersFetchProgress := float64(w.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount) / float64(totalCFiltersToFetch) + + // If there was some period of inactivity, + // assume that this process started at some point in the future, + // thereby accounting for the total reported time of inactivity. + w.syncData.activeSyncData.cfiltersFetchProgress.beginFetchCFiltersTimeStamp += w.syncData.activeSyncData.totalInactiveSeconds + w.syncData.activeSyncData.totalInactiveSeconds = 0 + + timeTakenSoFar := time.Now().Unix() - w.syncData.activeSyncData.cfiltersFetchProgress.beginFetchCFiltersTimeStamp + if timeTakenSoFar < 1 { + timeTakenSoFar = 1 + } + estimatedTotalCFiltersFetchTime := float64(timeTakenSoFar) / cfiltersFetchProgress + + // Use CFilters fetch rate to estimate headers fetch time. + cfiltersFetchRate := float64(w.syncData.activeSyncData.cfiltersFetchProgress.totalFetchedCFiltersCount) / float64(timeTakenSoFar) + estimatedHeadersLeftToFetch := w.estimateBlockHeadersCountAfter(w.GetBestBlockTimeStamp()) + estimatedTotalHeadersFetchTime := float64(estimatedHeadersLeftToFetch) / cfiltersFetchRate + // increase estimated value by FetchPercentage + estimatedTotalHeadersFetchTime /= FetchPercentage + + estimatedDiscoveryTime := estimatedTotalHeadersFetchTime * DiscoveryPercentage + estimatedRescanTime := estimatedTotalHeadersFetchTime * RescanPercentage + estimatedTotalSyncTime := estimatedTotalCFiltersFetchTime + estimatedTotalHeadersFetchTime + estimatedDiscoveryTime + estimatedRescanTime + + totalSyncProgress := float64(timeTakenSoFar) / estimatedTotalSyncTime + totalTimeRemainingSeconds := int64(math.Round(estimatedTotalSyncTime)) - timeTakenSoFar + + // update headers fetching progress report including total progress percentage and total time remaining + w.syncData.activeSyncData.cfiltersFetchProgress.TotalCFiltersToFetch = totalCFiltersToFetch + w.syncData.activeSyncData.cfiltersFetchProgress.CurrentCFilterHeight = startCFiltersHeight + w.syncData.activeSyncData.cfiltersFetchProgress.CFiltersFetchProgress = roundUp(cfiltersFetchProgress * 100.0) + w.syncData.activeSyncData.cfiltersFetchProgress.TotalSyncProgress = roundUp(totalSyncProgress * 100.0) + w.syncData.activeSyncData.cfiltersFetchProgress.TotalTimeRemainingSeconds = totalTimeRemainingSeconds + + w.syncData.mu.Unlock() + + // notify progress listener of estimated progress report + w.publishFetchCFiltersProgress() + + cfiltersFetchTimeRemaining := estimatedTotalCFiltersFetchTime - float64(timeTakenSoFar) + debugInfo := &DebugInfo{ + timeTakenSoFar, + totalTimeRemainingSeconds, + timeTakenSoFar, + int64(math.Round(cfiltersFetchTimeRemaining)), + } + w.publishDebugInfo(debugInfo) +} + +func (w *Wallet) publishFetchCFiltersProgress() { + for _, syncProgressListener := range w.syncProgressListeners() { + syncProgressListener.OnCFiltersFetchProgress(&w.syncData.cfiltersFetchProgress) + } +} + +func (w *Wallet) fetchCFiltersEnded(walletID int) { + w.syncData.mu.Lock() + defer w.syncData.mu.Unlock() + + w.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent = time.Now().Unix() - w.syncData.cfiltersFetchProgress.beginFetchCFiltersTimeStamp + + // If there is some period of inactivity reported at this stage, + // subtract it from the total stage time. + w.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent -= w.syncData.totalInactiveSeconds + w.syncData.activeSyncData.totalInactiveSeconds = 0 +} + +// Fetch Headers Callbacks + +func (w *Wallet) fetchHeadersStarted(peerInitialHeight int32) { + if !w.IsSyncing() { + return + } + + w.syncData.mu.RLock() + headersFetchingStarted := w.syncData.headersFetchProgress.beginFetchTimeStamp != -1 + showLogs := w.syncData.showLogs + w.syncData.mu.RUnlock() + + if headersFetchingStarted { + // This function gets called for each newly connected peer so + // ignore if headers fetching was already started. + return + } + + w.WaitingForHeaders = true + + lowestBlockHeight := w.GetLowestBlock().Height + + w.syncData.mu.Lock() + w.syncData.activeSyncData.syncStage = HeadersFetchSyncStage + w.syncData.activeSyncData.headersFetchProgress.beginFetchTimeStamp = time.Now().Unix() + w.syncData.activeSyncData.headersFetchProgress.startHeaderHeight = lowestBlockHeight + w.syncData.headersFetchProgress.totalFetchedHeadersCount = 0 + w.syncData.activeSyncData.totalInactiveSeconds = 0 + w.syncData.mu.Unlock() + + if showLogs { + log.Infof("Step 1 of 3 - fetching %d block headers.", peerInitialHeight-lowestBlockHeight) + } +} + +func (w *Wallet) fetchHeadersProgress(lastFetchedHeaderHeight int32, lastFetchedHeaderTime int64) { + if !w.IsSyncing() { + return + } + + w.syncData.mu.RLock() + headersFetchingCompleted := w.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent != -1 + w.syncData.mu.RUnlock() + + if headersFetchingCompleted { + // This function gets called for each newly connected peer so ignore + // this call if the headers fetching phase was previously completed. + return + } + + // for _, wallet := range w.wallets { + if w.WaitingForHeaders { + w.WaitingForHeaders = w.GetBestBlock() > lastFetchedHeaderHeight + } + // } + + // lock the mutex before reading and writing to w.syncData.* + w.syncData.mu.Lock() + + if lastFetchedHeaderHeight > w.syncData.activeSyncData.headersFetchProgress.startHeaderHeight { + w.syncData.activeSyncData.headersFetchProgress.totalFetchedHeadersCount = lastFetchedHeaderHeight - w.syncData.activeSyncData.headersFetchProgress.startHeaderHeight + } + + headersLeftToFetch := w.estimateBlockHeadersCountAfter(lastFetchedHeaderTime) + totalHeadersToFetch := lastFetchedHeaderHeight + headersLeftToFetch + headersFetchProgress := float64(w.syncData.activeSyncData.headersFetchProgress.totalFetchedHeadersCount) / float64(totalHeadersToFetch) + + // If there was some period of inactivity, + // assume that this process started at some point in the future, + // thereby accounting for the total reported time of inactivity. + w.syncData.activeSyncData.headersFetchProgress.beginFetchTimeStamp += w.syncData.activeSyncData.totalInactiveSeconds + w.syncData.activeSyncData.totalInactiveSeconds = 0 + + fetchTimeTakenSoFar := time.Now().Unix() - w.syncData.activeSyncData.headersFetchProgress.beginFetchTimeStamp + if fetchTimeTakenSoFar < 1 { + fetchTimeTakenSoFar = 1 + } + estimatedTotalHeadersFetchTime := float64(fetchTimeTakenSoFar) / headersFetchProgress + + // For some reason, the actual total headers fetch time is more than the predicted/estimated time. + // Account for this difference by multiplying the estimatedTotalHeadersFetchTime by an incrementing factor. + // The incrementing factor is inversely proportional to the headers fetch progress, + // ranging from 0.5 to 0 as headers fetching progress increases from 0 to 1. + // todo, the above noted (mal)calculation may explain this difference. + // TODO: is this adjustment still needed since the calculation has been corrected. + adjustmentFactor := 0.5 * (1 - headersFetchProgress) + estimatedTotalHeadersFetchTime += estimatedTotalHeadersFetchTime * adjustmentFactor + + estimatedDiscoveryTime := estimatedTotalHeadersFetchTime * DiscoveryPercentage + estimatedRescanTime := estimatedTotalHeadersFetchTime * RescanPercentage + estimatedTotalSyncTime := float64(w.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent) + + estimatedTotalHeadersFetchTime + estimatedDiscoveryTime + estimatedRescanTime + + totalSyncProgress := float64(fetchTimeTakenSoFar) / estimatedTotalSyncTime + totalTimeRemainingSeconds := int64(math.Round(estimatedTotalSyncTime)) - fetchTimeTakenSoFar + + // update headers fetching progress report including total progress percentage and total time remaining + w.syncData.activeSyncData.headersFetchProgress.TotalHeadersToFetch = totalHeadersToFetch + w.syncData.activeSyncData.headersFetchProgress.CurrentHeaderHeight = lastFetchedHeaderHeight + w.syncData.activeSyncData.headersFetchProgress.CurrentHeaderTimestamp = lastFetchedHeaderTime + w.syncData.activeSyncData.headersFetchProgress.HeadersFetchProgress = roundUp(headersFetchProgress * 100.0) + w.syncData.activeSyncData.headersFetchProgress.TotalSyncProgress = roundUp(totalSyncProgress * 100.0) + w.syncData.activeSyncData.headersFetchProgress.TotalTimeRemainingSeconds = totalTimeRemainingSeconds + + // unlock the mutex before issuing notification callbacks to prevent potential deadlock + // if any invoked callback takes a considerable amount of time to execute. + w.syncData.mu.Unlock() + + // notify progress listener of estimated progress report + w.publishFetchHeadersProgress() + + // todo: also log report if showLog == true + timeTakenSoFar := w.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent + fetchTimeTakenSoFar + headersFetchTimeRemaining := estimatedTotalHeadersFetchTime - float64(fetchTimeTakenSoFar) + debugInfo := &DebugInfo{ + timeTakenSoFar, + totalTimeRemainingSeconds, + fetchTimeTakenSoFar, + int64(math.Round(headersFetchTimeRemaining)), + } + w.publishDebugInfo(debugInfo) +} + +func (w *Wallet) publishFetchHeadersProgress() { + for _, syncProgressListener := range w.syncProgressListeners() { + syncProgressListener.OnHeadersFetchProgress(&w.syncData.headersFetchProgress) + } +} + +func (w *Wallet) fetchHeadersFinished() { + w.syncData.mu.Lock() + defer w.syncData.mu.Unlock() + + if !w.syncData.syncing { + // ignore if sync is not in progress + return + } + + w.syncData.activeSyncData.headersFetchProgress.startHeaderHeight = -1 + w.syncData.headersFetchProgress.totalFetchedHeadersCount = 0 + w.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent = time.Now().Unix() - w.syncData.headersFetchProgress.beginFetchTimeStamp + + // If there is some period of inactivity reported at this stage, + // subtract it from the total stage time. + w.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent -= w.syncData.totalInactiveSeconds + w.syncData.activeSyncData.totalInactiveSeconds = 0 + + if w.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent < 150 { + // This ensures that minimum ETA used for stage 2 (address discovery) is 120 seconds (80% of 150 seconds). + w.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent = 150 + } + + if w.syncData.showLogs && w.syncData.syncing { + log.Info("Fetch headers completed.") + } +} + +// Address/Account Discovery Callbacks + +func (w *Wallet) discoverAddressesStarted(walletID int) { + if !w.IsSyncing() { + return + } + + w.syncData.mu.RLock() + addressDiscoveryAlreadyStarted := w.syncData.activeSyncData.addressDiscoveryProgress.addressDiscoveryStartTime != -1 + totalHeadersFetchTime := float64(w.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent) + w.syncData.mu.RUnlock() + + if addressDiscoveryAlreadyStarted { + return + } + + w.syncData.mu.Lock() + w.syncData.activeSyncData.syncStage = AddressDiscoverySyncStage + w.syncData.activeSyncData.addressDiscoveryProgress.addressDiscoveryStartTime = time.Now().Unix() + w.syncData.activeSyncData.addressDiscoveryProgress.WalletID = walletID + w.syncData.addressDiscoveryCompletedOrCanceled = make(chan bool) + w.syncData.mu.Unlock() + + go w.updateAddressDiscoveryProgress(totalHeadersFetchTime) + + if w.syncData.showLogs { + log.Info("Step 2 of 3 - discovering used addresses.") + } +} + +func (w *Wallet) updateAddressDiscoveryProgress(totalHeadersFetchTime float64) { + // use ticker to calculate and broadcast address discovery progress every second + everySecondTicker := time.NewTicker(1 * time.Second) + + // these values will be used every second to calculate the total sync progress + estimatedDiscoveryTime := totalHeadersFetchTime * DiscoveryPercentage + estimatedRescanTime := totalHeadersFetchTime * RescanPercentage + + // track last logged time remaining and total percent to avoid re-logging same message + var lastTimeRemaining int64 + var lastTotalPercent int32 = -1 + + for { + if !w.IsSyncing() { + return + } + + // If there was some period of inactivity, + // assume that this process started at some point in the future, + // thereby accounting for the total reported time of inactivity. + w.syncData.mu.Lock() + w.syncData.addressDiscoveryProgress.addressDiscoveryStartTime += w.syncData.totalInactiveSeconds + w.syncData.totalInactiveSeconds = 0 + addressDiscoveryStartTime := w.syncData.addressDiscoveryProgress.addressDiscoveryStartTime + totalCfiltersFetchTime := float64(w.syncData.cfiltersFetchProgress.cfiltersFetchTimeSpent) + showLogs := w.syncData.showLogs + w.syncData.mu.Unlock() + + select { + case <-w.syncData.addressDiscoveryCompletedOrCanceled: + // stop calculating and broadcasting address discovery progress + everySecondTicker.Stop() + if showLogs { + log.Info("Address discovery complete.") + } + return + + case <-everySecondTicker.C: + // calculate address discovery progress + elapsedDiscoveryTime := float64(time.Now().Unix() - addressDiscoveryStartTime) + discoveryProgress := (elapsedDiscoveryTime / estimatedDiscoveryTime) * 100 + + var totalSyncTime float64 + if elapsedDiscoveryTime > estimatedDiscoveryTime { + totalSyncTime = totalCfiltersFetchTime + totalHeadersFetchTime + elapsedDiscoveryTime + estimatedRescanTime + } else { + totalSyncTime = totalCfiltersFetchTime + totalHeadersFetchTime + estimatedDiscoveryTime + estimatedRescanTime + } + + totalElapsedTime := totalCfiltersFetchTime + totalHeadersFetchTime + elapsedDiscoveryTime + totalProgress := (totalElapsedTime / totalSyncTime) * 100 + + remainingAccountDiscoveryTime := math.Round(estimatedDiscoveryTime - elapsedDiscoveryTime) + if remainingAccountDiscoveryTime < 0 { + remainingAccountDiscoveryTime = 0 + } + + totalProgressPercent := int32(math.Round(totalProgress)) + totalTimeRemainingSeconds := int64(math.Round(remainingAccountDiscoveryTime + estimatedRescanTime)) + + // update address discovery progress, total progress and total time remaining + w.syncData.mu.Lock() + w.syncData.addressDiscoveryProgress.AddressDiscoveryProgress = int32(math.Round(discoveryProgress)) + w.syncData.addressDiscoveryProgress.TotalSyncProgress = totalProgressPercent + w.syncData.addressDiscoveryProgress.TotalTimeRemainingSeconds = totalTimeRemainingSeconds + w.syncData.mu.Unlock() + + w.publishAddressDiscoveryProgress() + + debugInfo := &DebugInfo{ + int64(math.Round(totalElapsedTime)), + totalTimeRemainingSeconds, + int64(math.Round(elapsedDiscoveryTime)), + int64(math.Round(remainingAccountDiscoveryTime)), + } + w.publishDebugInfo(debugInfo) + + if showLogs { + // avoid logging same message multiple times + if totalProgressPercent != lastTotalPercent || totalTimeRemainingSeconds != lastTimeRemaining { + log.Infof("Syncing %d%%, %s remaining, discovering used addresses.", + totalProgressPercent, CalculateTotalTimeRemaining(totalTimeRemainingSeconds)) + + lastTotalPercent = totalProgressPercent + lastTimeRemaining = totalTimeRemainingSeconds + } + } + } + } +} + +func (w *Wallet) publishAddressDiscoveryProgress() { + for _, syncProgressListener := range w.syncProgressListeners() { + syncProgressListener.OnAddressDiscoveryProgress(&w.syncData.activeSyncData.addressDiscoveryProgress) + } +} + +func (w *Wallet) discoverAddressesFinished(walletID int) { + if !w.IsSyncing() { + return + } + + w.stopUpdatingAddressDiscoveryProgress() +} + +func (w *Wallet) stopUpdatingAddressDiscoveryProgress() { + w.syncData.mu.Lock() + if w.syncData.activeSyncData != nil && w.syncData.activeSyncData.addressDiscoveryCompletedOrCanceled != nil { + close(w.syncData.activeSyncData.addressDiscoveryCompletedOrCanceled) + w.syncData.activeSyncData.addressDiscoveryCompletedOrCanceled = nil + w.syncData.activeSyncData.addressDiscoveryProgress.totalDiscoveryTimeSpent = time.Now().Unix() - w.syncData.addressDiscoveryProgress.addressDiscoveryStartTime + } + w.syncData.mu.Unlock() +} + +// Blocks Scan Callbacks + +func (w *Wallet) rescanStarted(walletID int) { + w.stopUpdatingAddressDiscoveryProgress() + + w.syncData.mu.Lock() + defer w.syncData.mu.Unlock() + + if !w.syncData.syncing { + // ignore if sync is not in progress + return + } + + w.syncData.activeSyncData.syncStage = HeadersRescanSyncStage + w.syncData.activeSyncData.rescanStartTime = time.Now().Unix() + + // retain last total progress report from address discovery phase + w.syncData.activeSyncData.headersRescanProgress.TotalTimeRemainingSeconds = w.syncData.activeSyncData.addressDiscoveryProgress.TotalTimeRemainingSeconds + w.syncData.activeSyncData.headersRescanProgress.TotalSyncProgress = w.syncData.activeSyncData.addressDiscoveryProgress.TotalSyncProgress + w.syncData.activeSyncData.headersRescanProgress.WalletID = walletID + + if w.syncData.showLogs && w.syncData.syncing { + log.Info("Step 3 of 3 - Scanning block headers.") + } +} + +func (w *Wallet) rescanProgress(walletID int, rescannedThrough int32) { + if !w.IsSyncing() { + // ignore if sync is not in progress + return + } + + totalHeadersToScan := w.GetBestBlock() + + rescanRate := float64(rescannedThrough) / float64(totalHeadersToScan) + + w.syncData.mu.Lock() + + // If there was some period of inactivity, + // assume that this process started at some point in the future, + // thereby accounting for the total reported time of inactivity. + w.syncData.activeSyncData.rescanStartTime += w.syncData.activeSyncData.totalInactiveSeconds + w.syncData.activeSyncData.totalInactiveSeconds = 0 + + elapsedRescanTime := time.Now().Unix() - w.syncData.activeSyncData.rescanStartTime + estimatedTotalRescanTime := int64(math.Round(float64(elapsedRescanTime) / rescanRate)) + totalTimeRemainingSeconds := estimatedTotalRescanTime - elapsedRescanTime + totalElapsedTime := w.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent + w.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent + + w.syncData.activeSyncData.addressDiscoveryProgress.totalDiscoveryTimeSpent + elapsedRescanTime + + w.syncData.activeSyncData.headersRescanProgress.WalletID = walletID + w.syncData.activeSyncData.headersRescanProgress.TotalHeadersToScan = totalHeadersToScan + w.syncData.activeSyncData.headersRescanProgress.RescanProgress = int32(math.Round(rescanRate * 100)) + w.syncData.activeSyncData.headersRescanProgress.CurrentRescanHeight = rescannedThrough + w.syncData.activeSyncData.headersRescanProgress.RescanTimeRemaining = totalTimeRemainingSeconds + + // do not update total time taken and total progress percent if elapsedRescanTime is 0 + // because the estimatedTotalRescanTime will be inaccurate (also 0) + // which will make the estimatedTotalSyncTime equal to totalElapsedTime + // giving the wrong impression that the process is complete + if elapsedRescanTime > 0 { + estimatedTotalSyncTime := w.syncData.activeSyncData.cfiltersFetchProgress.cfiltersFetchTimeSpent + w.syncData.activeSyncData.headersFetchProgress.headersFetchTimeSpent + + w.syncData.activeSyncData.addressDiscoveryProgress.totalDiscoveryTimeSpent + estimatedTotalRescanTime + totalProgress := (float64(totalElapsedTime) / float64(estimatedTotalSyncTime)) * 100 + + w.syncData.activeSyncData.headersRescanProgress.TotalTimeRemainingSeconds = totalTimeRemainingSeconds + w.syncData.activeSyncData.headersRescanProgress.TotalSyncProgress = int32(math.Round(totalProgress)) + } + + w.syncData.mu.Unlock() + + w.publishHeadersRescanProgress() + + debugInfo := &DebugInfo{ + totalElapsedTime, + totalTimeRemainingSeconds, + elapsedRescanTime, + totalTimeRemainingSeconds, + } + w.publishDebugInfo(debugInfo) + + w.syncData.mu.RLock() + if w.syncData.showLogs { + log.Infof("Syncing %d%%, %s remaining, scanning %d of %d block headers.", + w.syncData.activeSyncData.headersRescanProgress.TotalSyncProgress, + CalculateTotalTimeRemaining(w.syncData.activeSyncData.headersRescanProgress.TotalTimeRemainingSeconds), + w.syncData.activeSyncData.headersRescanProgress.CurrentRescanHeight, + w.syncData.activeSyncData.headersRescanProgress.TotalHeadersToScan, + ) + } + w.syncData.mu.RUnlock() +} + +func (w *Wallet) publishHeadersRescanProgress() { + for _, syncProgressListener := range w.syncProgressListeners() { + syncProgressListener.OnHeadersRescanProgress(&w.syncData.activeSyncData.headersRescanProgress) + } +} + +func (w *Wallet) rescanFinished(walletID int) { + if !w.IsSyncing() { + // ignore if sync is not in progress + return + } + + w.syncData.mu.Lock() + w.syncData.activeSyncData.headersRescanProgress.WalletID = walletID + w.syncData.activeSyncData.headersRescanProgress.TotalTimeRemainingSeconds = 0 + w.syncData.activeSyncData.headersRescanProgress.TotalSyncProgress = 100 + + // Reset these value so that address discovery would + // not be skipped for the next wallet. + w.syncData.activeSyncData.addressDiscoveryProgress.addressDiscoveryStartTime = -1 + w.syncData.activeSyncData.addressDiscoveryProgress.totalDiscoveryTimeSpent = -1 + w.syncData.mu.Unlock() + + w.publishHeadersRescanProgress() +} + +func (w *Wallet) publishDebugInfo(debugInfo *DebugInfo) { + for _, syncProgressListener := range w.syncProgressListeners() { + syncProgressListener.Debug(debugInfo) + } +} + +/** Helper functions start here */ + +func (w *Wallet) estimateBlockHeadersCountAfter(lastHeaderTime int64) int32 { + // Use the difference between current time (now) and last reported block time, + // to estimate total headers to fetch. + timeDifferenceInSeconds := float64(time.Now().Unix() - lastHeaderTime) + targetTimePerBlockInSeconds := w.chainParams.TargetTimePerBlock.Seconds() + estimatedHeadersDifference := timeDifferenceInSeconds / targetTimePerBlockInSeconds + + // return next integer value (upper limit) if estimatedHeadersDifference is a fraction + return int32(math.Ceil(estimatedHeadersDifference)) +} + +func (w *Wallet) notifySyncError(err error) { + for _, syncProgressListener := range w.syncProgressListeners() { + syncProgressListener.OnSyncEndedWithError(err) + } +} + +func (w *Wallet) notifySyncCanceled() { + w.syncData.mu.RLock() + restartSyncRequested := w.syncData.restartSyncRequested + w.syncData.mu.RUnlock() + + for _, syncProgressListener := range w.syncProgressListeners() { + syncProgressListener.OnSyncCanceled(restartSyncRequested) + } +} + +func (w *Wallet) resetSyncData() { + // It's possible that sync ends or errors while address discovery is ongoing. + // If this happens, it's important to stop the address discovery process before + // resetting sync data. + w.stopUpdatingAddressDiscoveryProgress() + + w.syncData.mu.Lock() + w.syncData.syncing = false + w.syncData.synced = false + w.syncData.cancelSync = nil + w.syncData.syncCanceled = nil + w.syncData.activeSyncData = nil + w.syncData.mu.Unlock() + + for _, wallet := range w.wallets { + wallet.WaitingForHeaders = true + wallet.LockWallet() // lock wallet if previously unlocked to perform account discovery. + } +} + +func (w *Wallet) synced(walletID int, synced bool) { + + indexTransactions := func() { + // begin indexing transactions after sync is completed, + // syncProgressListeners.OnSynced() will be invoked after transactions are indexed + var txIndexing errgroup.Group + for _, wallet := range w.wallets { + txIndexing.Go(wallet.IndexTransactions) + } + + go func() { + err := txIndexing.Wait() + if err != nil { + log.Errorf("Tx Index Error: %v", err) + } + + for _, syncProgressListener := range w.syncProgressListeners() { + if synced { + syncProgressListener.OnSyncCompleted() + } else { + syncProgressListener.OnSyncCanceled(false) + } + } + }() + } + + w.syncData.mu.RLock() + allWalletsSynced := w.syncData.synced + w.syncData.mu.RUnlock() + + if allWalletsSynced && synced { + indexTransactions() + return + } + + w.Synced = synced + w.Syncing = false + w.listenForTransactions(wallet.ID) + + if !w.Internal().Locked() { + w.LockWallet() // lock wallet if previously unlocked to perform account discovery. + err := w.markWalletAsDiscoveredAccounts(walletID) + if err != nil { + log.Error(err) + } + } + + if w.OpenedWalletsCount() == w.SyncedWalletsCount() { + w.syncData.mu.Lock() + w.syncData.syncing = false + w.syncData.synced = true + w.syncData.mu.Unlock() + + indexTransactions() + } +} diff --git a/ticket.go b/wallets/dcr/ticket.go similarity index 70% rename from ticket.go rename to wallets/dcr/ticket.go index e1b2de3a9..fcd7c959d 100644 --- a/ticket.go +++ b/wallets/dcr/ticket.go @@ -1,11 +1,11 @@ -package dcrlibwallet +package dcr import ( "context" "fmt" "runtime/trace" "sync" - "time" + // "time" "decred.org/dcrwallet/v2/errors" w "decred.org/dcrwallet/v2/wallet" @@ -13,7 +13,7 @@ import ( "github.com/decred/dcrd/dcrutil/v4" "github.com/decred/dcrd/wire" "github.com/planetdecred/dcrlibwallet/internal/vsp" - "github.com/planetdecred/dcrlibwallet/utils" + // "github.com/planetdecred/dcrlibwallet/utils" ) func (wallet *Wallet) TotalStakingRewards() (int64, error) { @@ -30,27 +30,27 @@ func (wallet *Wallet) TotalStakingRewards() (int64, error) { return totalRewards, nil } -func (mw *MultiWallet) TotalStakingRewards() (int64, error) { - var totalRewards int64 - for _, wal := range mw.wallets { - walletTotalRewards, err := wal.TotalStakingRewards() - if err != nil { - return 0, err - } +// func (mw *MultiWallet) TotalStakingRewards() (int64, error) { +// var totalRewards int64 +// for _, wal := range mw.wallets { +// walletTotalRewards, err := wal.TotalStakingRewards() +// if err != nil { +// return 0, err +// } - totalRewards += walletTotalRewards - } +// totalRewards += walletTotalRewards +// } - return totalRewards, nil -} +// return totalRewards, nil +// } -func (mw *MultiWallet) TicketMaturity() int32 { - return int32(mw.chainParams.TicketMaturity) -} +// func (mw *MultiWallet) TicketMaturity() int32 { +// return int32(mw.chainParams.TicketMaturity) +// } -func (mw *MultiWallet) TicketExpiry() int32 { - return int32(mw.chainParams.TicketExpiry) -} +// func (mw *MultiWallet) TicketExpiry() int32 { +// return int32(mw.chainParams.TicketExpiry) +// } func (wallet *Wallet) StakingOverview() (stOverview *StakingOverview, err error) { stOverview = &StakingOverview{} @@ -91,34 +91,34 @@ func (wallet *Wallet) StakingOverview() (stOverview *StakingOverview, err error) return stOverview, nil } -func (mw *MultiWallet) StakingOverview() (stOverview *StakingOverview, err error) { - stOverview = &StakingOverview{} +// func (mw *MultiWallet) StakingOverview() (stOverview *StakingOverview, err error) { +// stOverview = &StakingOverview{} - for _, wallet := range mw.wallets { - st, err := wallet.StakingOverview() - if err != nil { - return nil, err - } +// for _, wallet := range mw.wallets { +// st, err := wallet.StakingOverview() +// if err != nil { +// return nil, err +// } - stOverview.Unmined += st.Unmined - stOverview.Immature += st.Immature - stOverview.Live += st.Live - stOverview.Voted += st.Voted - stOverview.Revoked += st.Revoked - stOverview.Expired += st.Expired - } +// stOverview.Unmined += st.Unmined +// stOverview.Immature += st.Immature +// stOverview.Live += st.Live +// stOverview.Voted += st.Voted +// stOverview.Revoked += st.Revoked +// stOverview.Expired += st.Expired +// } - stOverview.All = stOverview.Unmined + stOverview.Immature + stOverview.Live + stOverview.Voted + - stOverview.Revoked + stOverview.Expired +// stOverview.All = stOverview.Unmined + stOverview.Immature + stOverview.Live + stOverview.Voted + +// stOverview.Revoked + stOverview.Expired - return stOverview, nil -} +// return stOverview, nil +// } // TicketPrice returns the price of a ticket for the next block, also known as // the stake difficulty. May be incorrect if blockchain sync is ongoing or if // blockchain is not up-to-date. func (wallet *Wallet) TicketPrice() (*TicketPriceResponse, error) { - ctx := wallet.shutdownContext() + ctx := wallet.ShutdownContext() sdiff, err := wallet.Internal().NextStakeDifficulty(ctx) if err != nil { return nil, err @@ -132,21 +132,21 @@ func (wallet *Wallet) TicketPrice() (*TicketPriceResponse, error) { return resp, nil } -func (mw *MultiWallet) TicketPrice() (*TicketPriceResponse, error) { - bestBlock := mw.GetBestBlock() - for _, wal := range mw.wallets { - resp, err := wal.TicketPrice() - if err != nil { - return nil, err - } +// func (mw *MultiWallet) TicketPrice() (*TicketPriceResponse, error) { +// bestBlock := mw.GetBestBlock() +// for _, wal := range mw.wallets { +// resp, err := wal.TicketPrice() +// if err != nil { +// return nil, err +// } - if resp.Height == bestBlock.Height { - return resp, nil - } - } +// if resp.Height == bestBlock.Height { +// return resp, nil +// } +// } - return nil, errors.New(ErrWalletNotFound) -} +// return nil, errors.New(ErrWalletNotFound) +// } // PurchaseTickets purchases tickets from the wallet. // Returns a slice of hashes for tickets purchased. @@ -194,7 +194,7 @@ func (wallet *Wallet) PurchaseTickets(account, numTickets int32, vspHost string, request.MixedSplitAccount = csppCfg.TicketSplitAccount } - ctx := wallet.shutdownContext() + ctx := wallet.ShutdownContext() ticketsResponse, err := wallet.Internal().PurchaseTickets(ctx, networkBackend, request) if err != nil { return nil, err @@ -205,77 +205,77 @@ func (wallet *Wallet) PurchaseTickets(account, numTickets int32, vspHost string, // VSPTicketInfo returns vsp-related info for a given ticket. Returns an error // if the ticket is not yet assigned to a VSP. -func (mw *MultiWallet) VSPTicketInfo(walletID int, hash string) (*VSPTicketInfo, error) { - wallet := mw.WalletWithID(walletID) - if wallet == nil { - return nil, fmt.Errorf("no wallet with ID %d", walletID) - } - - ticketHash, err := chainhash.NewHashFromStr(hash) - if err != nil { - return nil, err - } - - // Read the VSP info for this ticket from the wallet db. - ctx := wallet.shutdownContext() - walletTicketInfo, err := wallet.Internal().VSPTicketInfo(ctx, ticketHash) - if err != nil { - return nil, err - } - - ticketInfo := &VSPTicketInfo{ - VSP: walletTicketInfo.Host, - FeeTxHash: walletTicketInfo.FeeHash.String(), - FeeTxStatus: VSPFeeStatus(walletTicketInfo.FeeTxStatus), - } - - // Cannot submit a ticketstatus api request to the VSP if - // the wallet is locked. Return just the wallet info. - if wallet.IsLocked() { - return ticketInfo, nil - } - - vspClient, err := wallet.VSPClient(walletTicketInfo.Host, walletTicketInfo.PubKey) - if err != nil { - log.Warnf("unable to get vsp ticket info for %s: %v", hash, err) - return ticketInfo, nil - } - vspTicketStatus, err := vspClient.TicketStatus(ctx, ticketHash) - if err != nil { - log.Warnf("unable to get vsp ticket info for %s: %v", hash, err) - return ticketInfo, nil - } - - // Parse the fee status returned by the vsp. - var vspFeeStatus VSPFeeStatus - switch vspTicketStatus.FeeTxStatus { - case "received": // received but not broadcast - vspFeeStatus = VSPFeeProcessStarted - case "broadcast": // broadcast but not confirmed - vspFeeStatus = VSPFeeProcessPaid - case "confirmed": // broadcast and confirmed - vspFeeStatus = VSPFeeProcessConfirmed - case "error": - vspFeeStatus = VSPFeeProcessErrored - default: - vspFeeStatus = VSPFeeProcessErrored - log.Warnf("VSP responded with %v for %v", vspTicketStatus.FeeTxStatus, ticketHash) - } - - // Sanity check and log any observed discrepancies. - if ticketInfo.FeeTxHash != vspTicketStatus.FeeTxHash { - log.Warnf("wallet fee tx hash %s differs from vsp fee tx hash %s for ticket %s", - ticketInfo.FeeTxHash, vspTicketStatus.FeeTxHash, ticketHash) - ticketInfo.FeeTxHash = vspTicketStatus.FeeTxHash - } - if ticketInfo.FeeTxStatus != vspFeeStatus { - log.Warnf("wallet fee status %q differs from vsp fee status %q for ticket %s", - ticketInfo.FeeTxStatus, vspFeeStatus, ticketHash) - ticketInfo.FeeTxStatus = vspFeeStatus - } - - return ticketInfo, nil -} +// func (mw *MultiWallet) VSPTicketInfo(walletID int, hash string) (*VSPTicketInfo, error) { +// wallet := mw.WalletWithID(walletID) +// if wallet == nil { +// return nil, fmt.Errorf("no wallet with ID %d", walletID) +// } + +// ticketHash, err := chainhash.NewHashFromStr(hash) +// if err != nil { +// return nil, err +// } + +// // Read the VSP info for this ticket from the wallet db. +// ctx := wallet.shutdownContext() +// walletTicketInfo, err := wallet.Internal().VSPTicketInfo(ctx, ticketHash) +// if err != nil { +// return nil, err +// } + +// ticketInfo := &VSPTicketInfo{ +// VSP: walletTicketInfo.Host, +// FeeTxHash: walletTicketInfo.FeeHash.String(), +// FeeTxStatus: VSPFeeStatus(walletTicketInfo.FeeTxStatus), +// } + +// // Cannot submit a ticketstatus api request to the VSP if +// // the wallet is locked. Return just the wallet info. +// if wallet.IsLocked() { +// return ticketInfo, nil +// } + +// vspClient, err := wallet.VSPClient(walletTicketInfo.Host, walletTicketInfo.PubKey) +// if err != nil { +// log.Warnf("unable to get vsp ticket info for %s: %v", hash, err) +// return ticketInfo, nil +// } +// vspTicketStatus, err := vspClient.TicketStatus(ctx, ticketHash) +// if err != nil { +// log.Warnf("unable to get vsp ticket info for %s: %v", hash, err) +// return ticketInfo, nil +// } + +// // Parse the fee status returned by the vsp. +// var vspFeeStatus VSPFeeStatus +// switch vspTicketStatus.FeeTxStatus { +// case "received": // received but not broadcast +// vspFeeStatus = VSPFeeProcessStarted +// case "broadcast": // broadcast but not confirmed +// vspFeeStatus = VSPFeeProcessPaid +// case "confirmed": // broadcast and confirmed +// vspFeeStatus = VSPFeeProcessConfirmed +// case "error": +// vspFeeStatus = VSPFeeProcessErrored +// default: +// vspFeeStatus = VSPFeeProcessErrored +// log.Warnf("VSP responded with %v for %v", vspTicketStatus.FeeTxStatus, ticketHash) +// } + +// // Sanity check and log any observed discrepancies. +// if ticketInfo.FeeTxHash != vspTicketStatus.FeeTxHash { +// log.Warnf("wallet fee tx hash %s differs from vsp fee tx hash %s for ticket %s", +// ticketInfo.FeeTxHash, vspTicketStatus.FeeTxHash, ticketHash) +// ticketInfo.FeeTxHash = vspTicketStatus.FeeTxHash +// } +// if ticketInfo.FeeTxStatus != vspFeeStatus { +// log.Warnf("wallet fee status %q differs from vsp fee status %q for ticket %s", +// ticketInfo.FeeTxStatus, vspFeeStatus, ticketHash) +// ticketInfo.FeeTxStatus = vspFeeStatus +// } + +// return ticketInfo, nil +// } // StartTicketBuyer starts the automatic ticket buyer. The wallet // should already be configured with the required parameters using @@ -295,7 +295,7 @@ func (wallet *Wallet) StartTicketBuyer(passphrase []byte) error { return errors.New("Ticket buyer already running") } - ctx, cancel := wallet.shutdownContextWithCancel() + ctx, cancel := wallet.ShutdownContextWithCancel() wallet.cancelAutoTicketBuyer = cancel wallet.cancelAutoTicketBuyerMu.Unlock() @@ -545,23 +545,23 @@ func (wallet *Wallet) IsAutoTicketsPurchaseActive() bool { } // StopAutoTicketsPurchase stops the automatic ticket buyer. -func (mw *MultiWallet) StopAutoTicketsPurchase(walletID int) error { - wallet := mw.WalletWithID(walletID) - if wallet == nil { - return errors.New(ErrNotExist) - } +// func (mw *MultiWallet) StopAutoTicketsPurchase(walletID int) error { +// wallet := mw.WalletWithID(walletID) +// if wallet == nil { +// return errors.New(ErrNotExist) +// } - wallet.cancelAutoTicketBuyerMu.Lock() - defer wallet.cancelAutoTicketBuyerMu.Unlock() +// wallet.cancelAutoTicketBuyerMu.Lock() +// defer wallet.cancelAutoTicketBuyerMu.Unlock() - if wallet.cancelAutoTicketBuyer == nil { - return errors.New(ErrInvalid) - } +// if wallet.cancelAutoTicketBuyer == nil { +// return errors.New(ErrInvalid) +// } - wallet.cancelAutoTicketBuyer() - wallet.cancelAutoTicketBuyer = nil - return nil -} +// wallet.cancelAutoTicketBuyer() +// wallet.cancelAutoTicketBuyer = nil +// return nil +// } // SetAutoTicketsBuyerConfig sets ticket buyer config for the wallet. func (wallet *Wallet) SetAutoTicketsBuyerConfig(vspHost string, purchaseAccount int32, amountToMaintain int64) { @@ -590,39 +590,39 @@ func (wallet *Wallet) TicketBuyerConfigIsSet() bool { } // ClearTicketBuyerConfig clears the wallet's ticket buyer config. -func (mw *MultiWallet) ClearTicketBuyerConfig(walletID int) error { - wallet := mw.WalletWithID(walletID) - if wallet == nil { - return errors.New(ErrNotExist) - } +// func (mw *MultiWallet) ClearTicketBuyerConfig(walletID int) error { +// wallet := mw.WalletWithID(walletID) +// if wallet == nil { +// return errors.New(ErrNotExist) +// } - mw.SetLongConfigValueForKey(TicketBuyerATMConfigKey, -1) - mw.SetInt32ConfigValueForKey(TicketBuyerAccountConfigKey, -1) - mw.SetStringConfigValueForKey(TicketBuyerVSPHostConfigKey, "") +// mw.SetLongConfigValueForKey(TicketBuyerATMConfigKey, -1) +// mw.SetInt32ConfigValueForKey(TicketBuyerAccountConfigKey, -1) +// mw.SetStringConfigValueForKey(TicketBuyerVSPHostConfigKey, "") - return nil -} +// return nil +// } // NextTicketPriceRemaining returns the remaning time in seconds of a ticket for the next block, // if secs equal 0 is imminent -func (mw *MultiWallet) NextTicketPriceRemaining() (secs int64, err error) { - params, er := utils.ChainParams(mw.chainParams.Name) - if er != nil { - secs, err = -1, er - return - } - bestBestBlock := mw.GetBestBlock() - idxBlockInWindow := int(int64(bestBestBlock.Height)%params.StakeDiffWindowSize) + 1 - blockTime := params.TargetTimePerBlock.Nanoseconds() - windowSize := params.StakeDiffWindowSize - x := (windowSize - int64(idxBlockInWindow)) * blockTime - if x == 0 { - secs, err = 0, nil - return - } - secs, err = int64(time.Duration(x).Seconds()), nil - return -} +// func (mw *MultiWallet) NextTicketPriceRemaining() (secs int64, err error) { +// params, er := utils.ChainParams(mw.chainParams.Name) +// if er != nil { +// secs, err = -1, er +// return +// } +// bestBestBlock := mw.GetBestBlock() +// idxBlockInWindow := int(int64(bestBestBlock.Height)%params.StakeDiffWindowSize) + 1 +// blockTime := params.TargetTimePerBlock.Nanoseconds() +// windowSize := params.StakeDiffWindowSize +// x := (windowSize - int64(idxBlockInWindow)) * blockTime +// if x == 0 { +// secs, err = 0, nil +// return +// } +// secs, err = int64(time.Duration(x).Seconds()), nil +// return +// } // UnspentUnexpiredTickets returns all Unmined, Immature and Live tickets. func (wallet *Wallet) UnspentUnexpiredTickets() ([]Transaction, error) { diff --git a/transactions.go b/wallets/dcr/transactions.go similarity index 81% rename from transactions.go rename to wallets/dcr/transactions.go index 9bfb76844..9da4cf566 100644 --- a/transactions.go +++ b/wallets/dcr/transactions.go @@ -1,13 +1,13 @@ -package dcrlibwallet +package dcr import ( "encoding/json" - "sort" + // "sort" "github.com/asdine/storm" "github.com/decred/dcrd/chaincfg/chainhash" "github.com/planetdecred/dcrlibwallet/txhelper" - "github.com/planetdecred/dcrlibwallet/walletdata" + "github.com/planetdecred/dcrlibwallet/wallets/dcr/walletdata" ) const ( @@ -55,7 +55,7 @@ func (wallet *Wallet) PublishUnminedTransactions() error { return err } - return wallet.Internal().PublishUnminedTransactions(wallet.shutdownContext(), n) + return wallet.Internal().PublishUnminedTransactions(wallet.ShutdownContext(), n) } func (wallet *Wallet) GetTransaction(txHash string) (string, error) { @@ -80,7 +80,7 @@ func (wallet *Wallet) GetTransactionRaw(txHash string) (*Transaction, error) { return nil, err } - txSummary, _, blockHash, err := wallet.Internal().TransactionSummary(wallet.shutdownContext(), hash) + txSummary, _, blockHash, err := wallet.Internal().TransactionSummary(wallet.ShutdownContext(), hash) if err != nil { log.Error(err) return nil, err @@ -104,57 +104,57 @@ func (wallet *Wallet) GetTransactions(offset, limit, txFilter int32, newestFirst } func (wallet *Wallet) GetTransactionsRaw(offset, limit, txFilter int32, newestFirst bool) (transactions []Transaction, err error) { - err = wallet.walletDataDB.Read(offset, limit, txFilter, newestFirst, wallet.RequiredConfirmations(), wallet.GetBestBlock(), &transactions) + err = wallet.WalletDataDB.Read(offset, limit, txFilter, newestFirst, wallet.RequiredConfirmations(), wallet.GetBestBlock(), &transactions) return } -func (mw *MultiWallet) GetTransactions(offset, limit, txFilter int32, newestFirst bool) (string, error) { +// func (mw *MultiWallet) GetTransactions(offset, limit, txFilter int32, newestFirst bool) (string, error) { - transactions, err := mw.GetTransactionsRaw(offset, limit, txFilter, newestFirst) - if err != nil { - return "", err - } +// transactions, err := mw.GetTransactionsRaw(offset, limit, txFilter, newestFirst) +// if err != nil { +// return "", err +// } - jsonEncodedTransactions, err := json.Marshal(&transactions) - if err != nil { - return "", err - } +// jsonEncodedTransactions, err := json.Marshal(&transactions) +// if err != nil { +// return "", err +// } - return string(jsonEncodedTransactions), nil -} +// return string(jsonEncodedTransactions), nil +// } -func (mw *MultiWallet) GetTransactionsRaw(offset, limit, txFilter int32, newestFirst bool) ([]Transaction, error) { - transactions := make([]Transaction, 0) - for _, wallet := range mw.wallets { - walletTransactions, err := wallet.GetTransactionsRaw(offset, limit, txFilter, newestFirst) - if err != nil { - return nil, err - } +// func (mw *MultiWallet) GetTransactionsRaw(offset, limit, txFilter int32, newestFirst bool) ([]Transaction, error) { +// transactions := make([]Transaction, 0) +// for _, wallet := range mw.wallets { +// walletTransactions, err := wallet.GetTransactionsRaw(offset, limit, txFilter, newestFirst) +// if err != nil { +// return nil, err +// } - transactions = append(transactions, walletTransactions...) - } +// transactions = append(transactions, walletTransactions...) +// } - // sort transaction by timestamp in descending order - sort.Slice(transactions[:], func(i, j int) bool { - if newestFirst { - return transactions[i].Timestamp > transactions[j].Timestamp - } - return transactions[i].Timestamp < transactions[j].Timestamp - }) +// // sort transaction by timestamp in descending order +// sort.Slice(transactions[:], func(i, j int) bool { +// if newestFirst { +// return transactions[i].Timestamp > transactions[j].Timestamp +// } +// return transactions[i].Timestamp < transactions[j].Timestamp +// }) - if len(transactions) > int(limit) && limit > 0 { - transactions = transactions[:limit] - } +// if len(transactions) > int(limit) && limit > 0 { +// transactions = transactions[:limit] +// } - return transactions, nil -} +// return transactions, nil +// } func (wallet *Wallet) CountTransactions(txFilter int32) (int, error) { - return wallet.walletDataDB.Count(txFilter, wallet.RequiredConfirmations(), wallet.GetBestBlock(), &Transaction{}) + return wallet.WalletDataDB.Count(txFilter, wallet.RequiredConfirmations(), wallet.GetBestBlock(), &Transaction{}) } func (wallet *Wallet) TicketHasVotedOrRevoked(ticketHash string) (bool, error) { - err := wallet.walletDataDB.FindOne("TicketSpentHash", ticketHash, &Transaction{}) + err := wallet.WalletDataDB.FindOne("TicketSpentHash", ticketHash, &Transaction{}) if err != nil { if err == storm.ErrNotFound { return false, nil @@ -167,7 +167,7 @@ func (wallet *Wallet) TicketHasVotedOrRevoked(ticketHash string) (bool, error) { func (wallet *Wallet) TicketSpender(ticketHash string) (*Transaction, error) { var spender Transaction - err := wallet.walletDataDB.FindOne("TicketSpentHash", ticketHash, &spender) + err := wallet.WalletDataDB.FindOne("TicketSpentHash", ticketHash, &spender) if err != nil { if err == storm.ErrNotFound { return nil, nil diff --git a/wallets/dcr/txandblocknotifications.go b/wallets/dcr/txandblocknotifications.go new file mode 100644 index 000000000..f127bdbe4 --- /dev/null +++ b/wallets/dcr/txandblocknotifications.go @@ -0,0 +1,159 @@ +package dcr + +import ( + // "encoding/json" + + // "decred.org/dcrwallet/v2/errors" +) + +// func (mw *MultiWallet) listenForTransactions(walletID int) { +// go func() { + +// wallet := mw.wallets[walletID] +// n := wallet.Internal().NtfnServer.TransactionNotifications() + +// for { +// select { +// case v := <-n.C: +// if v == nil { +// return +// } +// for _, transaction := range v.UnminedTransactions { +// tempTransaction, err := wallet.decodeTransactionWithTxSummary(&transaction, nil) +// if err != nil { +// log.Errorf("[%d] Error ntfn parse tx: %v", wallet.ID, err) +// return +// } + +// overwritten, err := wallet.walletDataDB.SaveOrUpdate(&Transaction{}, tempTransaction) +// if err != nil { +// log.Errorf("[%d] New Tx save err: %v", wallet.ID, err) +// return +// } + +// if !overwritten { +// log.Infof("[%d] New Transaction %s", wallet.ID, tempTransaction.Hash) + +// result, err := json.Marshal(tempTransaction) +// if err != nil { +// log.Error(err) +// } else { +// mw.mempoolTransactionNotification(string(result)) +// } +// } +// } + +// for _, block := range v.AttachedBlocks { +// blockHash := block.Header.BlockHash() +// for _, transaction := range block.Transactions { +// tempTransaction, err := wallet.decodeTransactionWithTxSummary(&transaction, &blockHash) +// if err != nil { +// log.Errorf("[%d] Error ntfn parse tx: %v", wallet.ID, err) +// return +// } + +// _, err = wallet.walletDataDB.SaveOrUpdate(&Transaction{}, tempTransaction) +// if err != nil { +// log.Errorf("[%d] Incoming block replace tx error :%v", wallet.ID, err) +// return +// } +// mw.publishTransactionConfirmed(wallet.ID, transaction.Hash.String(), int32(block.Header.Height)) +// } + +// mw.publishBlockAttached(wallet.ID, int32(block.Header.Height)) +// } + +// if len(v.AttachedBlocks) > 0 { +// mw.checkWalletMixers() +// } + +// case <-mw.syncData.syncCanceled: +// n.Done() +// } +// } +// }() +// } + +// AddTxAndBlockNotificationListener registers a set of functions to be invoked +// when a transaction or block update is processed by the wallet. If async is +// true, the provided callback methods will be called from separate goroutines, +// allowing notification senders to continue their operation without waiting +// for the listener to complete processing the notification. This asyncrhonous +// handling is especially important for cases where the wallet process that +// sends the notification temporarily prevents access to other wallet features +// until all notification handlers finish processing the notification. If a +// notification handler were to try to access such features, it would result +// in a deadlock. +// func (mw *MultiWallet) AddTxAndBlockNotificationListener(txAndBlockNotificationListener TxAndBlockNotificationListener, async bool, uniqueIdentifier string) error { +// mw.notificationListenersMu.Lock() +// defer mw.notificationListenersMu.Unlock() + +// _, ok := mw.txAndBlockNotificationListeners[uniqueIdentifier] +// if ok { +// return errors.New(ErrListenerAlreadyExist) +// } + +// if async { +// mw.txAndBlockNotificationListeners[uniqueIdentifier] = &asyncTxAndBlockNotificationListener{ +// l: txAndBlockNotificationListener, +// } +// } else { +// mw.txAndBlockNotificationListeners[uniqueIdentifier] = txAndBlockNotificationListener +// } + +// return nil +// } + +// func (mw *MultiWallet) RemoveTxAndBlockNotificationListener(uniqueIdentifier string) { +// mw.notificationListenersMu.Lock() +// defer mw.notificationListenersMu.Unlock() + +// delete(mw.txAndBlockNotificationListeners, uniqueIdentifier) +// } + +// func (mw *MultiWallet) checkWalletMixers() { +// for _, wallet := range mw.wallets { +// if wallet.IsAccountMixerActive() { +// unmixedAccount := wallet.ReadInt32ConfigValueForKey(AccountMixerUnmixedAccount, -1) +// hasMixableOutput, err := wallet.accountHasMixableOutput(unmixedAccount) +// if err != nil { +// log.Errorf("Error checking for mixable outputs: %v", err) +// } + +// if !hasMixableOutput { +// log.Infof("[%d] unmixed account does not have a mixable output, stopping account mixer", wallet.ID) +// err = mw.StopAccountMixer(wallet.ID) +// if err != nil { +// log.Errorf("Error stopping account mixer: %v", err) +// } +// } +// } +// } +// } + +// func (mw *MultiWallet) mempoolTransactionNotification(transaction string) { +// mw.notificationListenersMu.RLock() +// defer mw.notificationListenersMu.RUnlock() + +// for _, txAndBlockNotifcationListener := range mw.txAndBlockNotificationListeners { +// txAndBlockNotifcationListener.OnTransaction(transaction) +// } +// } + +// func (mw *MultiWallet) publishTransactionConfirmed(walletID int, transactionHash string, blockHeight int32) { +// mw.notificationListenersMu.RLock() +// defer mw.notificationListenersMu.RUnlock() + +// for _, txAndBlockNotifcationListener := range mw.txAndBlockNotificationListeners { +// txAndBlockNotifcationListener.OnTransactionConfirmed(walletID, transactionHash, blockHeight) +// } +// } + +// func (mw *MultiWallet) publishBlockAttached(walletID int, blockHeight int32) { +// mw.notificationListenersMu.RLock() +// defer mw.notificationListenersMu.RUnlock() + +// for _, txAndBlockNotifcationListener := range mw.txAndBlockNotificationListeners { +// txAndBlockNotifcationListener.OnBlockAttached(walletID, blockHeight) +// } +// } diff --git a/txauthor.go b/wallets/dcr/txauthor.go similarity index 93% rename from txauthor.go rename to wallets/dcr/txauthor.go index 878889983..73dcf2e5c 100644 --- a/txauthor.go +++ b/wallets/dcr/txauthor.go @@ -1,4 +1,4 @@ -package dcrlibwallet +package dcr import ( "bytes" @@ -32,24 +32,24 @@ type TxAuthor struct { needsConstruct bool } -func (mw *MultiWallet) NewUnsignedTx(walletID int, sourceAccountNumber int32) (*TxAuthor, error) { - sourceWallet := mw.WalletWithID(walletID) - if sourceWallet == nil { - return nil, fmt.Errorf(ErrWalletNotFound) - } - - _, err := sourceWallet.GetAccount(sourceAccountNumber) - if err != nil { - return nil, err - } - - return &TxAuthor{ - sourceWallet: sourceWallet, - sourceAccountNumber: uint32(sourceAccountNumber), - destinations: make([]TransactionDestination, 0), - needsConstruct: true, - }, nil -} +// func (mw *MultiWallet) NewUnsignedTx(walletID int, sourceAccountNumber int32) (*TxAuthor, error) { +// sourceWallet := mw.WalletWithID(walletID) +// if sourceWallet == nil { +// return nil, fmt.Errorf(ErrWalletNotFound) +// } + +// _, err := sourceWallet.GetAccount(sourceAccountNumber) +// if err != nil { +// return nil, err +// } + +// return &TxAuthor{ +// sourceWallet: sourceWallet, +// sourceAccountNumber: uint32(sourceAccountNumber), +// destinations: make([]TransactionDestination, 0), +// needsConstruct: true, +// }, nil +// } func (tx *TxAuthor) AddSendDestination(address string, atomAmount int64, sendMax bool) error { _, err := stdaddr.DecodeAddress(address, tx.sourceWallet.chainParams) @@ -194,7 +194,7 @@ func (tx *TxAuthor) UseInputs(utxoKeys []string) error { Hash: *txHash, Index: uint32(index), } - outputInfo, err := tx.sourceWallet.Internal().OutputInfo(tx.sourceWallet.shutdownContext(), op) + outputInfo, err := tx.sourceWallet.Internal().OutputInfo(tx.sourceWallet.ShutdownContext(), op) if err != nil { return fmt.Errorf("no valid utxo found for '%s' in the source account", utxoKey) } @@ -251,7 +251,7 @@ func (tx *TxAuthor) Broadcast(privatePassphrase []byte) ([]byte, error) { lock <- time.Time{} }() - ctx := tx.sourceWallet.shutdownContext() + ctx := tx.sourceWallet.ShutdownContext() err = tx.sourceWallet.Internal().Unlock(ctx, privatePassphrase, lock) if err != nil { log.Error(err) @@ -308,16 +308,16 @@ func (tx *TxAuthor) unsignedTransaction() (*txauthor.AuthoredTx, error) { } func (tx *TxAuthor) constructTransaction() (*txauthor.AuthoredTx, error) { - if len(tx.inputs) != 0 { - return tx.constructCustomTransaction() - } + // if len(tx.inputs) != 0 { + // return tx.constructCustomTransaction() + // } var err error var outputs = make([]*wire.TxOut, 0) var outputSelectionAlgorithm w.OutputSelectionAlgorithm = w.OutputSelectionAlgorithmDefault var changeSource txauthor.ChangeSource - ctx := tx.sourceWallet.shutdownContext() + ctx := tx.sourceWallet.ShutdownContext() for _, destination := range tx.destinations { if err := tx.validateSendAmount(destination.SendMax, destination.AtomAmount); err != nil { diff --git a/txindex.go b/wallets/dcr/txindex.go similarity index 79% rename from txindex.go rename to wallets/dcr/txindex.go index ffc29f3b4..a91f92952 100644 --- a/txindex.go +++ b/wallets/dcr/txindex.go @@ -1,13 +1,13 @@ -package dcrlibwallet +package dcr import ( w "decred.org/dcrwallet/v2/wallet" "github.com/decred/dcrd/chaincfg/chainhash" - "github.com/planetdecred/dcrlibwallet/walletdata" + "github.com/planetdecred/dcrlibwallet/wallets/dcr/walletdata" ) func (wallet *Wallet) IndexTransactions() error { - ctx := wallet.shutdownContext() + ctx := wallet.ShutdownContext() var totalIndex int32 var txEndHeight uint32 @@ -27,7 +27,7 @@ func (wallet *Wallet) IndexTransactions() error { return false, err } - _, err = wallet.walletDataDB.SaveOrUpdate(&Transaction{}, tx) + _, err = wallet.WalletDataDB.SaveOrUpdate(&Transaction{}, tx) if err != nil { log.Errorf("[%d] Index tx replace tx err : %v", wallet.ID, err) return false, err @@ -38,7 +38,7 @@ func (wallet *Wallet) IndexTransactions() error { if block.Header != nil { txEndHeight = block.Header.Height - err := wallet.walletDataDB.SaveLastIndexPoint(int32(txEndHeight)) + err := wallet.WalletDataDB.SaveLastIndexPoint(int32(txEndHeight)) if err != nil { log.Errorf("[%d] Set tx index end block height error: ", wallet.ID, err) return false, err @@ -55,7 +55,7 @@ func (wallet *Wallet) IndexTransactions() error { } } - beginHeight, err := wallet.walletDataDB.ReadIndexingStartBlock() + beginHeight, err := wallet.WalletDataDB.ReadIndexingStartBlock() if err != nil { log.Errorf("[%d] Get tx indexing start point error: %v", wallet.ID, err) return err @@ -67,14 +67,14 @@ func (wallet *Wallet) IndexTransactions() error { endBlock := w.NewBlockIdentifierFromHeight(endHeight) defer func() { - count, err := wallet.walletDataDB.Count(walletdata.TxFilterAll, wallet.RequiredConfirmations(), endHeight, &Transaction{}) + count, err := wallet.WalletDataDB.Count(walletdata.TxFilterAll, wallet.RequiredConfirmations(), endHeight, &Transaction{}) if err != nil { log.Errorf("[%d] Post-indexing tx count error :%v", wallet.ID, err) } else if count > 0 { log.Infof("[%d] Transaction index finished at %d, %d transaction(s) indexed in total", wallet.ID, endHeight, count) } - err = wallet.walletDataDB.SaveLastIndexPoint(endHeight) + err = wallet.WalletDataDB.SaveLastIndexPoint(endHeight) if err != nil { log.Errorf("[%d] Set tx index end block height error: ", wallet.ID, err) } @@ -84,8 +84,8 @@ func (wallet *Wallet) IndexTransactions() error { return wallet.Internal().GetTransactions(ctx, rangeFn, startBlock, endBlock) } -func (wallet *Wallet) reindexTransactions() error { - err := wallet.walletDataDB.ClearSavedTransactions(&Transaction{}) +func (wallet *Wallet) ReindexTransactions() error { + err := wallet.WalletDataDB.ClearSavedTransactions(&Transaction{}) if err != nil { return err } diff --git a/txparser.go b/wallets/dcr/txparser.go similarity index 94% rename from txparser.go rename to wallets/dcr/txparser.go index 126c8c985..30826bb9c 100644 --- a/txparser.go +++ b/wallets/dcr/txparser.go @@ -1,4 +1,4 @@ -package dcrlibwallet +package dcr import ( "fmt" @@ -15,7 +15,7 @@ func (wallet *Wallet) decodeTransactionWithTxSummary(txSummary *w.TransactionSum var blockHeight int32 = BlockHeightInvalid if blockHash != nil { blockIdentifier := w.NewBlockIdentifierFromHash(blockHash) - blockInfo, err := wallet.Internal().BlockInfo(wallet.shutdownContext(), blockIdentifier) + blockInfo, err := wallet.Internal().BlockInfo(wallet.ShutdownContext(), blockIdentifier) if err != nil { log.Error(err) } else { @@ -104,7 +104,7 @@ func (wallet *Wallet) decodeTransactionWithTxSummary(txSummary *w.TransactionSum // update ticket with spender hash ticketPurchaseTx.TicketSpender = decodedTx.Hash - wallet.walletDataDB.SaveOrUpdate(&Transaction{}, ticketPurchaseTx) + wallet.WalletDataDB.SaveOrUpdate(&Transaction{}, ticketPurchaseTx) } return decodedTx, nil diff --git a/wallets/dcr/types.go b/wallets/dcr/types.go new file mode 100644 index 000000000..063ea9542 --- /dev/null +++ b/wallets/dcr/types.go @@ -0,0 +1,530 @@ +package dcr + +import ( + "context" + "fmt" + "net" + + "decred.org/dcrwallet/v2/wallet/udb" + + "github.com/decred/dcrd/chaincfg/v3" + "github.com/decred/dcrd/dcrutil/v4" + "github.com/planetdecred/dcrlibwallet/internal/vsp" +) + +// WalletConfig defines options for configuring wallet behaviour. +// This is a subset of the config used by dcrwallet. +type WalletConfig struct { + // General + GapLimit uint32 // Allowed unused address gap between used addresses of accounts + ManualTickets bool // Do not discover new tickets through network synchronization + AllowHighFees bool // Do not perform high fee checks + RelayFee dcrutil.Amount // Transaction fee per kilobyte + AccountGapLimit int // Allowed gap of unused accounts + DisableCoinTypeUpgrades bool // Never upgrade from legacy to SLIP0044 coin type keys + + // CSPP + MixSplitLimit int // Connection limit to CoinShuffle++ server per change amount +} + +type CSPPConfig struct { + CSPPServer string + DialCSPPServer func(ctx context.Context, network, addr string) (net.Conn, error) + MixedAccount uint32 + MixedAccountBranch uint32 + TicketSplitAccount uint32 + ChangeAccount uint32 +} + +type WalletsIterator struct { + currentIndex int + wallets []*Wallet +} + +type BlockInfo struct { + Height int32 + Timestamp int64 +} + +type Amount struct { + AtomValue int64 + DcrValue float64 +} + +type TxFeeAndSize struct { + Fee *Amount + Change *Amount + EstimatedSignedSize int +} + +type UnsignedTransaction struct { + UnsignedTransaction []byte + EstimatedSignedSize int + ChangeIndex int + TotalOutputAmount int64 + TotalPreviousOutputAmount int64 +} + +type Balance struct { + Total int64 + Spendable int64 + ImmatureReward int64 + ImmatureStakeGeneration int64 + LockedByTickets int64 + VotingAuthority int64 + UnConfirmed int64 +} + +type Account struct { + WalletID int + Number int32 + Name string + Balance *Balance + TotalBalance int64 + ExternalKeyCount int32 + InternalKeyCount int32 + ImportedKeyCount int32 +} + +type AccountsIterator struct { + currentIndex int + accounts []*Account +} + +type Accounts struct { + Count int + Acc []*Account + CurrentBlockHash []byte + CurrentBlockHeight int32 +} + +type PeerInfo struct { + ID int32 `json:"id"` + Addr string `json:"addr"` + AddrLocal string `json:"addr_local"` + Services string `json:"services"` + Version uint32 `json:"version"` + SubVer string `json:"sub_ver"` + StartingHeight int64 `json:"starting_height"` + BanScore int32 `json:"ban_score"` +} + +type AccountMixerNotificationListener interface { + OnAccountMixerStarted(walletID int) + OnAccountMixerEnded(walletID int) +} + +/** begin sync-related types */ + +type SyncProgressListener interface { + OnSyncStarted(wasRestarted bool) + OnPeerConnectedOrDisconnected(numberOfConnectedPeers int32) + OnCFiltersFetchProgress(cfiltersFetchProgress *CFiltersFetchProgressReport) + OnHeadersFetchProgress(headersFetchProgress *HeadersFetchProgressReport) + OnAddressDiscoveryProgress(addressDiscoveryProgress *AddressDiscoveryProgressReport) + OnHeadersRescanProgress(headersRescanProgress *HeadersRescanProgressReport) + OnSyncCompleted() + OnSyncCanceled(willRestart bool) + OnSyncEndedWithError(err error) + Debug(debugInfo *DebugInfo) +} + +type GeneralSyncProgress struct { + TotalSyncProgress int32 `json:"totalSyncProgress"` + TotalTimeRemainingSeconds int64 `json:"totalTimeRemainingSeconds"` +} + +type CFiltersFetchProgressReport struct { + *GeneralSyncProgress + beginFetchCFiltersTimeStamp int64 + startCFiltersHeight int32 + cfiltersFetchTimeSpent int64 + totalFetchedCFiltersCount int32 + TotalCFiltersToFetch int32 `json:"totalCFiltersToFetch"` + CurrentCFilterHeight int32 `json:"currentCFilterHeight"` + CFiltersFetchProgress int32 `json:"headersFetchProgress"` +} + +type HeadersFetchProgressReport struct { + *GeneralSyncProgress + headersFetchTimeSpent int64 + beginFetchTimeStamp int64 + startHeaderHeight int32 + totalFetchedHeadersCount int32 + TotalHeadersToFetch int32 `json:"totalHeadersToFetch"` + CurrentHeaderHeight int32 `json:"currentHeaderHeight"` + CurrentHeaderTimestamp int64 `json:"currentHeaderTimestamp"` + HeadersFetchProgress int32 `json:"headersFetchProgress"` +} + +type AddressDiscoveryProgressReport struct { + *GeneralSyncProgress + addressDiscoveryStartTime int64 + totalDiscoveryTimeSpent int64 + AddressDiscoveryProgress int32 `json:"addressDiscoveryProgress"` + WalletID int `json:"walletID"` +} + +type HeadersRescanProgressReport struct { + *GeneralSyncProgress + TotalHeadersToScan int32 `json:"totalHeadersToScan"` + CurrentRescanHeight int32 `json:"currentRescanHeight"` + RescanProgress int32 `json:"rescanProgress"` + RescanTimeRemaining int64 `json:"rescanTimeRemaining"` + WalletID int `json:"walletID"` +} + +type DebugInfo struct { + TotalTimeElapsed int64 + TotalTimeRemaining int64 + CurrentStageTimeElapsed int64 + CurrentStageTimeRemaining int64 +} + +/** end sync-related types */ + +/** begin tx-related types */ + +type TxAndBlockNotificationListener interface { + OnTransaction(transaction string) + OnBlockAttached(walletID int, blockHeight int32) + OnTransactionConfirmed(walletID int, hash string, blockHeight int32) +} + +// asyncTxAndBlockNotificationListener is a TxAndBlockNotificationListener that +// triggers notifcation callbacks asynchronously. +type asyncTxAndBlockNotificationListener struct { + l TxAndBlockNotificationListener +} + +// OnTransaction satisfies the TxAndBlockNotificationListener interface and +// starts a goroutine to actually handle the notification using the embedded +// listener. +func (asyncTxBlockListener *asyncTxAndBlockNotificationListener) OnTransaction(transaction string) { + go asyncTxBlockListener.l.OnTransaction(transaction) +} + +// OnBlockAttached satisfies the TxAndBlockNotificationListener interface and +// starts a goroutine to actually handle the notification using the embedded +// listener. +func (asyncTxBlockListener *asyncTxAndBlockNotificationListener) OnBlockAttached(walletID int, blockHeight int32) { + go asyncTxBlockListener.l.OnBlockAttached(walletID, blockHeight) +} + +// OnTransactionConfirmed satisfies the TxAndBlockNotificationListener interface +// and starts a goroutine to actually handle the notification using the embedded +// listener. +func (asyncTxBlockListener *asyncTxAndBlockNotificationListener) OnTransactionConfirmed(walletID int, hash string, blockHeight int32) { + go asyncTxBlockListener.l.OnTransactionConfirmed(walletID, hash, blockHeight) +} + +type BlocksRescanProgressListener interface { + OnBlocksRescanStarted(walletID int) + OnBlocksRescanProgress(*HeadersRescanProgressReport) + OnBlocksRescanEnded(walletID int, err error) +} + +// Transaction is used with storm for tx indexing operations. +// For faster queries, the `Hash`, `Type` and `Direction` fields are indexed. +type Transaction struct { + WalletID int `json:"walletID"` + Hash string `storm:"id,unique" json:"hash"` + Type string `storm:"index" json:"type"` + Hex string `json:"hex"` + Timestamp int64 `storm:"index" json:"timestamp"` + BlockHeight int32 `storm:"index" json:"block_height"` + TicketSpender string `storm:"index" json:"ticket_spender"` + + MixDenomination int64 `json:"mix_denom"` + MixCount int32 `json:"mix_count"` + + Version int32 `json:"version"` + LockTime int32 `json:"lock_time"` + Expiry int32 `json:"expiry"` + Fee int64 `json:"fee"` + FeeRate int64 `json:"fee_rate"` + Size int `json:"size"` + + Direction int32 `storm:"index" json:"direction"` + Amount int64 `json:"amount"` + Inputs []*TxInput `json:"inputs"` + Outputs []*TxOutput `json:"outputs"` + + // Vote Info + VoteVersion int32 `json:"vote_version"` + LastBlockValid bool `json:"last_block_valid"` + VoteBits string `json:"vote_bits"` + VoteReward int64 `json:"vote_reward"` + TicketSpentHash string `storm:"unique" json:"ticket_spent_hash"` + DaysToVoteOrRevoke int32 `json:"days_to_vote_revoke"` +} + +type TxInput struct { + PreviousTransactionHash string `json:"previous_transaction_hash"` + PreviousTransactionIndex int32 `json:"previous_transaction_index"` + PreviousOutpoint string `json:"previous_outpoint"` + Amount int64 `json:"amount"` + AccountNumber int32 `json:"account_number"` +} + +type TxOutput struct { + Index int32 `json:"index"` + Amount int64 `json:"amount"` + Version int32 `json:"version"` + ScriptType string `json:"script_type"` + Address string `json:"address"` + Internal bool `json:"internal"` + AccountNumber int32 `json:"account_number"` +} + +// TxInfoFromWallet contains tx data that relates to the querying wallet. +// This info is used with `DecodeTransaction` to compose the entire details of a transaction. +type TxInfoFromWallet struct { + WalletID int + Hex string + Timestamp int64 + BlockHeight int32 + Inputs []*WalletInput + Outputs []*WalletOutput +} + +type WalletInput struct { + Index int32 `json:"index"` + AmountIn int64 `json:"amount_in"` + *WalletAccount +} + +type WalletOutput struct { + Index int32 `json:"index"` + AmountOut int64 `json:"amount_out"` + Internal bool `json:"internal"` + Address string `json:"address"` + *WalletAccount +} + +type WalletAccount struct { + AccountNumber int32 `json:"account_number"` + AccountName string `json:"account_name"` +} + +type TransactionDestination struct { + Address string + AtomAmount int64 + SendMax bool +} + +type TransactionOverview struct { + All int + Sent int + Received int + Transferred int + Mixed int + Staking int + Coinbase int +} + +/** end tx-related types */ + +/** begin ticket-related types */ + +type TicketPriceResponse struct { + TicketPrice int64 + Height int32 +} + +type StakingOverview struct { + All int + Unmined int + Immature int + Live int + Voted int + Revoked int + Expired int +} + +// TicketBuyerConfig defines configuration parameters for running +// an automated ticket buyer. +type TicketBuyerConfig struct { + VspHost string + PurchaseAccount int32 + BalanceToMaintain int64 + + vspClient *vsp.Client +} + +// VSPFeeStatus represents the current fee status of a ticket. +type VSPFeeStatus uint8 + +const ( + // VSPFeeProcessStarted represents the state which process has being + // called but fee still not paid. + VSPFeeProcessStarted VSPFeeStatus = iota + // VSPFeeProcessPaid represents the state where the process has being + // paid, but not published. + VSPFeeProcessPaid + VSPFeeProcessErrored + // VSPFeeProcessConfirmed represents the state where the fee has been + // confirmed by the VSP. + VSPFeeProcessConfirmed +) + +// String returns a human-readable interpretation of the vsp fee status. +func (status VSPFeeStatus) String() string { + switch udb.FeeStatus(status) { + case udb.VSPFeeProcessStarted: + return "fee process started" + case udb.VSPFeeProcessPaid: + return "fee paid" + case udb.VSPFeeProcessErrored: + return "fee payment errored" + case udb.VSPFeeProcessConfirmed: + return "fee confirmed by vsp" + default: + return fmt.Sprintf("invalid fee status %d", status) + } +} + +// VSPTicketInfo is information about a ticket that is assigned to a VSP. +type VSPTicketInfo struct { + VSP string + FeeTxHash string + FeeTxStatus VSPFeeStatus + // ConfirmedByVSP is nil if the ticket status could not be obtained + // from the VSP, false if the VSP hasn't confirmed the fee and true + // if the VSP has fully registered the ticket. + ConfirmedByVSP *bool + // VoteChoices is only set if the ticket status was obtained from the + // VSP. + VoteChoices map[string]string +} + +/** end ticket-related types */ + +/** begin politeia types */ +type Proposal struct { + ID int `storm:"id,increment"` + Token string `json:"token" storm:"unique"` + Category int32 `json:"category" storm:"index"` + Name string `json:"name"` + State int32 `json:"state"` + Status int32 `json:"status"` + Timestamp int64 `json:"timestamp"` + UserID string `json:"userid"` + Username string `json:"username"` + NumComments int32 `json:"numcomments"` + Version string `json:"version"` + PublishedAt int64 `json:"publishedat"` + IndexFile string `json:"indexfile"` + IndexFileVersion string `json:"fileversion"` + VoteStatus int32 `json:"votestatus"` + VoteApproved bool `json:"voteapproved"` + YesVotes int32 `json:"yesvotes"` + NoVotes int32 `json:"novotes"` + EligibleTickets int32 `json:"eligibletickets"` + QuorumPercentage int32 `json:"quorumpercentage"` + PassPercentage int32 `json:"passpercentage"` +} + +type ProposalOverview struct { + All int32 + Discussion int32 + Voting int32 + Approved int32 + Rejected int32 + Abandoned int32 +} + +type ProposalVoteDetails struct { + EligibleTickets []*EligibleTicket + Votes []*ProposalVote + YesVotes int32 + NoVotes int32 +} + +type EligibleTicket struct { + Hash string + Address string +} + +type ProposalVote struct { + Ticket *EligibleTicket + Bit string +} + +type ProposalNotificationListener interface { + OnProposalsSynced() + OnNewProposal(proposal *Proposal) + OnProposalVoteStarted(proposal *Proposal) + OnProposalVoteFinished(proposal *Proposal) +} + +/** end politea proposal types */ + +type UnspentOutput struct { + TransactionHash []byte + OutputIndex uint32 + OutputKey string + ReceiveTime int64 + Amount int64 + FromCoinbase bool + Tree int32 + PkScript []byte + Addresses string // separated by commas + Confirmations int32 +} + +/** end politea proposal types */ + +/** begin vspd-related types */ +type VspInfoResponse struct { + APIVersions []int64 `json:"apiversions"` + Timestamp int64 `json:"timestamp"` + PubKey []byte `json:"pubkey"` + FeePercentage float64 `json:"feepercentage"` + VspClosed bool `json:"vspclosed"` + Network string `json:"network"` + VspdVersion string `json:"vspdversion"` + Voting int64 `json:"voting"` + Voted int64 `json:"voted"` + Revoked int64 `json:"revoked"` +} + +type VSP struct { + Host string + *VspInfoResponse +} + +/** end vspd-related types */ + +/** begin agenda types */ + +// Agenda contains information about a consensus deployment +type Agenda struct { + AgendaID string `json:"agenda_id"` + Description string `json:"description"` + Mask uint32 `json:"mask"` + Choices []chaincfg.Choice `json:"choices"` + VotingPreference string `json:"voting_preference"` + StartTime int64 `json:"start_time"` + ExpireTime int64 `json:"expire_time"` + Status string `json:"status"` +} + +// DcrdataAgenda models agenda information for the active network from the +// dcrdata api https://dcrdata.decred.org/api/agendas for mainnet or +// https://testnet.decred.org/api/agendas for testnet. +type DcrdataAgenda struct { + Name string `json:"name"` + Description string `json:"-"` + Status string `json:"status"` + VotingStarted int64 `json:"-"` + VotingDone int64 `json:"-"` + Activated int64 `json:"-"` + HardForked int64 `json:"-"` + StartTime string `json:"-"` + ExpireTime string `json:"-"` + VoteVersion uint32 `json:"-"` + Mask uint16 `json:"-"` +} + +/** end agenda types */ diff --git a/wallets/dcr/utils.go b/wallets/dcr/utils.go new file mode 100644 index 000000000..c23e66ab9 --- /dev/null +++ b/wallets/dcr/utils.go @@ -0,0 +1,502 @@ +package dcr + +import ( + "context" + "encoding/base64" + "encoding/hex" + "encoding/json" + "fmt" + "io/ioutil" + "math" + "net" + "net/http" + "os" + "path/filepath" + "strconv" + "strings" + "time" + + // "decred.org/dcrwallet/v2/errors" + "decred.org/dcrwallet/v2/wallet" + "decred.org/dcrwallet/v2/wallet/txrules" + "decred.org/dcrwallet/v2/walletseed" + "github.com/decred/dcrd/chaincfg/chainhash" + "github.com/decred/dcrd/chaincfg/v3" + "github.com/decred/dcrd/dcrutil/v4" + "github.com/decred/dcrd/hdkeychain/v3" + "github.com/decred/dcrd/wire" + "github.com/planetdecred/dcrlibwallet/internal/loader" +) + +const ( + walletDbName = "wallet.db" + + // FetchPercentage is used to increase the initial estimate gotten during cfilters stage + FetchPercentage = 0.38 + + // Use 10% of estimated total headers fetch time to estimate rescan time + RescanPercentage = 0.1 + + // Use 80% of estimated total headers fetch time to estimate address discovery time + DiscoveryPercentage = 0.8 + + MaxAmountAtom = dcrutil.MaxAmount + MaxAmountDcr = dcrutil.MaxAmount / dcrutil.AtomsPerCoin + + TestnetHDPath = "m / 44' / 1' / " + LegacyTestnetHDPath = "m / 44’ / 11’ / " + MainnetHDPath = "m / 44' / 42' / " + LegacyMainnetHDPath = "m / 44’ / 20’ / " + + DefaultRequiredConfirmations = 2 + + LongAbbreviationFormat = "long" + ShortAbbreviationFormat = "short" + ShortestAbbreviationFormat = "shortest" +) + +// func (mw *MultiWallet) RequiredConfirmations() int32 { +// spendUnconfirmed := mw.ReadBoolConfigValueForKey(SpendUnconfirmedConfigKey, false) +// if spendUnconfirmed { +// return 0 +// } +// return DefaultRequiredConfirmations +// } + +func (wallet *Wallet) RequiredConfirmations() int32 { + var spendUnconfirmed bool + wallet.readUserConfigValue(true, SpendUnconfirmedConfigKey, &spendUnconfirmed) + if spendUnconfirmed { + return 0 + } + return DefaultRequiredConfirmations +} + +// func (mw *MultiWallet) listenForShutdown() { + +// mw.cancelFuncs = make([]context.CancelFunc, 0) +// mw.shuttingDown = make(chan bool) +// go func() { +// <-mw.shuttingDown +// for _, cancel := range mw.cancelFuncs { +// cancel() +// } +// }() +// } + +func (wallet *Wallet) ShutdownContextWithCancel() (context.Context, context.CancelFunc) { + ctx, cancel := context.WithCancel(context.Background()) + wallet.cancelFuncs = append(wallet.cancelFuncs, cancel) + return ctx, cancel +} + +func (wallet *Wallet) ShutdownContext() (ctx context.Context) { + ctx, _ = wallet.ShutdownContextWithCancel() + return +} + +func (wallet *Wallet) contextWithShutdownCancel() (context.Context, context.CancelFunc) { + ctx, cancel := context.WithCancel(context.Background()) + wallet.cancelFuncs = append(wallet.cancelFuncs, cancel) + return ctx, cancel +} + +// func (mw *MultiWallet) ValidateExtPubKey(extendedPubKey string) error { +// _, err := hdkeychain.NewKeyFromString(extendedPubKey, mw.chainParams) +// if err != nil { +// if err == hdkeychain.ErrInvalidChild { +// return errors.New(ErrUnusableSeed) +// } + +// return errors.New(ErrInvalid) +// } + +// return nil +// } + +func NormalizeAddress(addr string, defaultPort string) (string, error) { + // If the first SplitHostPort errors because of a missing port and not + // for an invalid host, add the port. If the second SplitHostPort + // fails, then a port is not missing and the original error should be + // returned. + host, port, origErr := net.SplitHostPort(addr) + if origErr == nil { + return net.JoinHostPort(host, port), nil + } + addr = net.JoinHostPort(addr, defaultPort) + _, _, err := net.SplitHostPort(addr) + if err != nil { + return "", origErr + } + return addr, nil +} + +// For use with gomobile bind, +// doesn't support the alternative `GenerateSeed` function because it returns more than 2 types. +func GenerateSeed() (string, error) { + seed, err := hdkeychain.GenerateSeed(hdkeychain.RecommendedSeedLen) + if err != nil { + return "", err + } + + return walletseed.EncodeMnemonic(seed), nil +} + +func VerifySeed(seedMnemonic string) bool { + _, err := walletseed.DecodeUserInput(seedMnemonic) + return err == nil +} + +// ExtractDateOrTime returns the date represented by the timestamp as a date string if the timestamp is over 24 hours ago. +// Otherwise, the time alone is returned as a string. +func ExtractDateOrTime(timestamp int64) string { + utcTime := time.Unix(timestamp, 0).UTC() + if time.Now().UTC().Sub(utcTime).Hours() > 24 { + return utcTime.Format("2006-01-02") + } else { + return utcTime.Format("15:04:05") + } +} + +func FormatUTCTime(timestamp int64) string { + return time.Unix(timestamp, 0).UTC().Format("2006-01-02 15:04:05") +} + +func AmountCoin(amount int64) float64 { + return dcrutil.Amount(amount).ToCoin() +} + +func AmountAtom(f float64) int64 { + amount, err := dcrutil.NewAmount(f) + if err != nil { + log.Error(err) + return -1 + } + return int64(amount) +} + +func EncodeHex(hexBytes []byte) string { + return hex.EncodeToString(hexBytes) +} + +func EncodeBase64(text []byte) string { + return base64.StdEncoding.EncodeToString(text) +} + +func DecodeBase64(base64Text string) ([]byte, error) { + b, err := base64.StdEncoding.DecodeString(base64Text) + if err != nil { + return nil, err + } + + return b, nil +} + +func ShannonEntropy(text string) (entropy float64) { + if text == "" { + return 0 + } + for i := 0; i < 256; i++ { + px := float64(strings.Count(text, string(byte(i)))) / float64(len(text)) + if px > 0 { + entropy += -px * math.Log2(px) + } + } + return entropy +} + +// func TransactionDirectionName(direction int32) string { +// switch direction { +// case TxDirectionSent: +// return "Sent" +// case TxDirectionReceived: +// return "Received" +// case TxDirectionTransferred: +// return "Yourself" +// default: +// return "invalid" +// } +// } + +func CalculateTotalTimeRemaining(timeRemainingInSeconds int64) string { + minutes := timeRemainingInSeconds / 60 + if minutes > 0 { + return fmt.Sprintf("%d min", minutes) + } + return fmt.Sprintf("%d sec", timeRemainingInSeconds) +} + +func CalculateDaysBehind(lastHeaderTime int64) string { + diff := time.Since(time.Unix(lastHeaderTime, 0)) + daysBehind := int(math.Round(diff.Hours() / 24)) + if daysBehind == 0 { + return "<1 day" + } else if daysBehind == 1 { + return "1 day" + } else { + return fmt.Sprintf("%d days", daysBehind) + } +} + +func StringsToHashes(h []string) ([]*chainhash.Hash, error) { + hashes := make([]*chainhash.Hash, 0, len(h)) + for _, v := range h { + hash, err := chainhash.NewHashFromStr(v) + if err != nil { + return nil, err + } + hashes = append(hashes, hash) + } + return hashes, nil +} + +func roundUp(n float64) int32 { + return int32(math.Round(n)) +} + +func WalletUniqueConfigKey(walletID int, key string) string { + return fmt.Sprintf("%d%s", walletID, key) +} + +func WalletExistsAt(directory string) bool { + walletDbFilePath := filepath.Join(directory, walletDbName) + exists, err := fileExists(walletDbFilePath) + if err != nil { + log.Errorf("wallet exists check error: %v", err) + } + return exists +} + +func fileExists(filePath string) (bool, error) { + _, err := os.Stat(filePath) + if err != nil { + if os.IsNotExist(err) { + return false, nil + } + return false, err + } + return true, nil +} + +func moveFile(sourcePath, destinationPath string) error { + if exists, _ := fileExists(sourcePath); exists { + return os.Rename(sourcePath, destinationPath) + } + return nil +} + +// done returns whether the context's Done channel was closed due to +// cancellation or exceeded deadline. +func done(ctx context.Context) bool { + select { + case <-ctx.Done(): + return true + default: + return false + } +} + +func backupFile(fileName string, suffix int) (newName string, err error) { + newName = fileName + ".bak" + strconv.Itoa(suffix) + exists, err := fileExists(newName) + if err != nil { + return "", err + } else if exists { + return backupFile(fileName, suffix+1) + } + + err = moveFile(fileName, newName) + if err != nil { + return "", err + } + + return newName, nil +} + +func initWalletLoader(chainParams *chaincfg.Params, walletDataDir, walletDbDriver string) *loader.Loader { + // TODO: Allow users provide values to override these defaults. + cfg := &WalletConfig{ + GapLimit: 20, + AllowHighFees: false, + RelayFee: txrules.DefaultRelayFeePerKb, + AccountGapLimit: wallet.DefaultAccountGapLimit, + DisableCoinTypeUpgrades: false, + ManualTickets: false, + MixSplitLimit: 10, + } + + stakeOptions := &loader.StakeOptions{ + VotingEnabled: false, + AddressReuse: false, + VotingAddress: nil, + } + walletLoader := loader.NewLoader(chainParams, walletDataDir, stakeOptions, + cfg.GapLimit, cfg.AllowHighFees, cfg.RelayFee, cfg.AccountGapLimit, + cfg.DisableCoinTypeUpgrades, cfg.ManualTickets, cfg.MixSplitLimit) + + if walletDbDriver != "" { + walletLoader.SetDatabaseDriver(walletDbDriver) + } + + return walletLoader +} + +// makePlural is used with the TimeElapsed function. makePlural checks if the arguments passed is > 1, +// if true, it adds "s" after the given time to make it plural +func makePlural(x float64) string { + if int(x) == 1 { + return "" + } + return "s" +} + +// TimeElapsed returns the formatted time diffrence between two times as a string. +// If the argument `fullTime` is set to true, then the full time available is returned e.g 3 hours, 2 minutes, 20 seconds ago, +// as opposed to 3 hours ago. +// If the argument `abbreviationFormat` is set to `long` the time format is e.g 2 minutes +// If the argument `abbreviationFormat` is set to `short` the time format is e.g 2 mins +// If the argument `abbreviationFormat` is set to `shortest` the time format is e.g 2 m +func TimeElapsed(now, then time.Time, abbreviationFormat string, fullTime bool) string { + var parts []string + var text string + + year2, month2, day2 := now.Date() + hour2, minute2, second2 := now.Clock() + + year1, month1, day1 := then.Date() + hour1, minute1, second1 := then.Clock() + + year := math.Abs(float64(year2 - year1)) + month := math.Abs(float64(month2 - month1)) + day := math.Abs(float64(day2 - day1)) + hour := math.Abs(float64(hour2 - hour1)) + minute := math.Abs(float64(minute2 - minute1)) + second := math.Abs(float64(second2 - second1)) + + week := math.Floor(day / 7) + + if year > 0 { + if abbreviationFormat == LongAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(year))+" year"+makePlural(year)) + } else if abbreviationFormat == ShortAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(year))+" yr"+makePlural(year)) + } else if abbreviationFormat == ShortestAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(year))+" y") + } + } + + if month > 0 { + if abbreviationFormat == LongAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(month))+" month"+makePlural(month)) + } else if abbreviationFormat == ShortAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(month))+" mon"+makePlural(month)) + } else if abbreviationFormat == ShortestAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(month))+" m") + } + } + + if week > 0 { + if abbreviationFormat == LongAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(week))+" week"+makePlural(week)) + } else if abbreviationFormat == ShortAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(week))+" wk"+makePlural(week)) + } else if abbreviationFormat == ShortestAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(week))+" w") + } + } + + if day > 0 { + if abbreviationFormat == LongAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(day))+" day"+makePlural(day)) + } else if abbreviationFormat == ShortAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(day))+" dy"+makePlural(day)) + } else if abbreviationFormat == ShortestAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(day))+" d") + } + } + + if hour > 0 { + if abbreviationFormat == LongAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(hour))+" hour"+makePlural(hour)) + } else if abbreviationFormat == ShortAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(hour))+" hr"+makePlural(hour)) + } else if abbreviationFormat == ShortestAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(hour))+" h") + } + } + + if minute > 0 { + if abbreviationFormat == LongAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(minute))+" minute"+makePlural(minute)) + } else if abbreviationFormat == ShortAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(minute))+" min"+makePlural(minute)) + } else if abbreviationFormat == ShortestAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(minute))+" mi") + } + } + + if second > 0 { + if abbreviationFormat == LongAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(second))+" second"+makePlural(second)) + } else if abbreviationFormat == ShortAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(second))+" sec"+makePlural(second)) + } else if abbreviationFormat == ShortestAbbreviationFormat { + parts = append(parts, strconv.Itoa(int(second))+" s") + } + } + + if now.After(then) { + text = " ago" + } else { + text = " after" + } + + if len(parts) == 0 { + return "just now" + } + + if fullTime { + return strings.Join(parts, ", ") + text + } + return parts[0] + text +} + +// voteVersion was borrowed from upstream, and needs to always be in +// sync with the upstream method. This is the LOC to the upstream version: +// https://github.com/decred/dcrwallet/blob/master/wallet/wallet.go#L266 +func voteVersion(params *chaincfg.Params) uint32 { + switch params.Net { + case wire.MainNet: + return 9 + case 0x48e7a065: // TestNet2 + return 6 + case wire.TestNet3: + return 10 + case wire.SimNet: + return 10 + default: + return 1 + } +} + +// HttpGet helps to convert json(Byte data) into a struct object. +func HttpGet(url string, respObj interface{}) (*http.Response, []byte, error) { + rq := new(http.Client) + resp, err := rq.Get((url)) + if err != nil { + return nil, nil, err + } + + respBytes, err := ioutil.ReadAll(resp.Body) + resp.Body.Close() + if err != nil { + return nil, nil, err + } + + if resp.StatusCode != http.StatusOK { + return resp, respBytes, fmt.Errorf("%d response from server: %v", resp.StatusCode, string(respBytes)) + } + + err = json.Unmarshal(respBytes, respObj) + return resp, respBytes, err +} diff --git a/utxo.go b/wallets/dcr/utxo.go similarity index 98% rename from utxo.go rename to wallets/dcr/utxo.go index 5e8775c49..d0de847e1 100644 --- a/utxo.go +++ b/wallets/dcr/utxo.go @@ -1,4 +1,4 @@ -package dcrlibwallet +package dcr import ( "fmt" @@ -67,7 +67,7 @@ func (tx *TxAuthor) constructCustomTransaction() (*txauthor.AuthoredTx, error) { // if no change destination is provided and // no recipient is set to receive max amount. nextInternalAddress := func() (string, error) { - ctx := tx.sourceWallet.shutdownContext() + ctx := tx.sourceWallet.ShutdownContext() addr, err := tx.sourceWallet.Internal().NewChangeAddress(ctx, tx.sourceAccountNumber) if err != nil { return "", err diff --git a/wallets/dcr/vsp.go b/wallets/dcr/vsp.go new file mode 100644 index 000000000..fe0054d4b --- /dev/null +++ b/wallets/dcr/vsp.go @@ -0,0 +1,192 @@ +package dcr + +import ( + // "context" + "crypto/ed25519" + "encoding/base64" + "fmt" + "strings" + + "decred.org/dcrwallet/v2/errors" + "github.com/planetdecred/dcrlibwallet/internal/vsp" +) + +// VSPClient loads or creates a VSP client instance for the specified host. +func (wallet *Wallet) VSPClient(host string, pubKey []byte) (*vsp.Client, error) { + wallet.vspClientsMu.Lock() + defer wallet.vspClientsMu.Unlock() + client, ok := wallet.vspClients[host] + if ok { + return client, nil + } + + cfg := vsp.Config{ + URL: host, + PubKey: base64.StdEncoding.EncodeToString(pubKey), + Dialer: nil, // optional, but consider providing a value + Wallet: wallet.Internal(), + } + client, err := vsp.New(cfg) + if err != nil { + return nil, err + } + wallet.vspClients[host] = client + return client, nil +} + +// KnownVSPs returns a list of known VSPs. This list may be updated by calling +// ReloadVSPList. This method is safe for concurrent access. +// func (mw *MultiWallet) KnownVSPs() []*VSP { +// mw.vspMu.RLock() +// defer mw.vspMu.RUnlock() +// return mw.vsps // TODO: Return a copy. +// } + +// SaveVSP marks a VSP as known and will be susbequently included as part of +// known VSPs. +// func (mw *MultiWallet) SaveVSP(host string) (err error) { +// // check if host already exists +// vspDbData := mw.getVSPDBData() +// for _, savedHost := range vspDbData.SavedHosts { +// if savedHost == host { +// return fmt.Errorf("duplicate host %s", host) +// } +// } + +// // validate host network +// info, err := vspInfo(host) +// if err != nil { +// return err +// } + +// // TODO: defaultVSPs() uses strings.Contains(network, vspInfo.Network). +// if info.Network != mw.NetType() { +// return fmt.Errorf("invalid net %s", info.Network) +// } + +// vspDbData.SavedHosts = append(vspDbData.SavedHosts, host) +// mw.updateVSPDBData(vspDbData) + +// mw.vspMu.Lock() +// mw.vsps = append(mw.vsps, &VSP{Host: host, VspInfoResponse: info}) +// mw.vspMu.Unlock() + +// return +// } + +// LastUsedVSP returns the host of the last used VSP, as saved by the +// SaveLastUsedVSP() method. +// func (mw *MultiWallet) LastUsedVSP() string { +// return mw.getVSPDBData().LastUsedVSP +// } + +// SaveLastUsedVSP saves the host of the last used VSP. +// func (mw *MultiWallet) SaveLastUsedVSP(host string) { +// vspDbData := mw.getVSPDBData() +// vspDbData.LastUsedVSP = host +// mw.updateVSPDBData(vspDbData) +// } + +type vspDbData struct { + SavedHosts []string + LastUsedVSP string +} + +// func (mw *MultiWallet) getVSPDBData() *vspDbData { +// vspDbData := new(vspDbData) +// mw.ReadUserConfigValue(KnownVSPsConfigKey, vspDbData) +// return vspDbData +// } + +// func (mw *MultiWallet) updateVSPDBData(data *vspDbData) { +// mw.SaveUserConfigValue(KnownVSPsConfigKey, data) +// } + +// ReloadVSPList reloads the list of known VSPs. +// This method makes multiple network calls; should be called in a goroutine +// to prevent blocking the UI thread. +// func (mw *MultiWallet) ReloadVSPList(ctx context.Context) { +// log.Debugf("Reloading list of known VSPs") +// defer log.Debugf("Reloaded list of known VSPs") + +// vspDbData := mw.getVSPDBData() +// vspList := make(map[string]*VspInfoResponse) +// for _, host := range vspDbData.SavedHosts { +// vspInfo, err := vspInfo(host) +// if err != nil { +// // User saved this VSP. Log an error message. +// log.Errorf("get vsp info error for %s: %v", host, err) +// } else { +// vspList[host] = vspInfo +// } +// if ctx.Err() != nil { +// return // context canceled, abort +// } +// } + +// otherVSPHosts, err := defaultVSPs(mw.NetType()) +// if err != nil { +// log.Debugf("get default vsp list error: %v", err) +// } +// for _, host := range otherVSPHosts { +// if _, wasAdded := vspList[host]; wasAdded { +// continue +// } +// vspInfo, err := vspInfo(host) +// if err != nil { +// log.Debugf("vsp info error for %s: %v\n", host, err) // debug only, user didn't request this VSP +// } else { +// vspList[host] = vspInfo +// } +// if ctx.Err() != nil { +// return // context canceled, abort +// } +// } + +// mw.vspMu.Lock() +// mw.vsps = make([]*VSP, 0, len(vspList)) +// for host, info := range vspList { +// mw.vsps = append(mw.vsps, &VSP{Host: host, VspInfoResponse: info}) +// } +// mw.vspMu.Unlock() +// } + +func vspInfo(vspHost string) (*VspInfoResponse, error) { + vspInfoResponse := new(VspInfoResponse) + resp, respBytes, err := HttpGet(vspHost+"/api/v3/vspinfo", vspInfoResponse) + if err != nil { + return nil, err + } + + // Validate server response. + sigStr := resp.Header.Get("VSP-Server-Signature") + sig, err := base64.StdEncoding.DecodeString(sigStr) + if err != nil { + return nil, fmt.Errorf("error validating VSP signature: %v", err) + } + if !ed25519.Verify(vspInfoResponse.PubKey, respBytes, sig) { + return nil, errors.New("bad signature from VSP") + } + + return vspInfoResponse, nil +} + +// defaultVSPs returns a list of known VSPs. +func defaultVSPs(network string) ([]string, error) { + var vspInfoResponse map[string]*VspInfoResponse + _, _, err := HttpGet("https://api.decred.org/?c=vsp", &vspInfoResponse) + if err != nil { + return nil, err + } + + // The above API does not return the pubKeys for the + // VSPs. Only return the host since we'll still need + // to make another API call to get the VSP pubKeys. + vsps := make([]string, 0) + for url, vspInfo := range vspInfoResponse { + if strings.Contains(network, vspInfo.Network) { + vsps = append(vsps, "https://"+url) + } + } + return vsps, nil +} diff --git a/wallet.go b/wallets/dcr/wallet.go similarity index 76% rename from wallet.go rename to wallets/dcr/wallet.go index 593b92a1e..1e4ddd582 100644 --- a/wallet.go +++ b/wallets/dcr/wallet.go @@ -1,4 +1,4 @@ -package dcrlibwallet +package dcr import ( "context" @@ -12,34 +12,38 @@ import ( "decred.org/dcrwallet/v2/errors" w "decred.org/dcrwallet/v2/wallet" "decred.org/dcrwallet/v2/walletseed" + "github.com/asdine/storm" "github.com/decred/dcrd/chaincfg/v3" "github.com/planetdecred/dcrlibwallet/internal/loader" "github.com/planetdecred/dcrlibwallet/internal/vsp" - "github.com/planetdecred/dcrlibwallet/walletdata" + "github.com/planetdecred/dcrlibwallet/wallets/dcr/walletdata" ) type Wallet struct { - ID int `storm:"id,increment"` - Name string `storm:"unique"` - CreatedAt time.Time `storm:"index"` - DbDriver string + ID int `storm:"id,increment"` + Name string `storm:"unique"` + CreatedAt time.Time `storm:"index"` + DbDriver string + rootDir string + db *storm.DB + EncryptedSeed []byte IsRestored bool HasDiscoveredAccounts bool PrivatePassphraseType int32 chainParams *chaincfg.Params - dataDir string + DataDir string loader *loader.Loader - walletDataDB *walletdata.DB + WalletDataDB *walletdata.DB - synced bool - syncing bool - waitingForHeaders bool + Synced bool + Syncing bool + WaitingForHeaders bool shuttingDown chan bool cancelFuncs []context.CancelFunc - cancelAccountMixer context.CancelFunc + CancelAccountMixer context.CancelFunc cancelAutoTicketBuyerMu sync.Mutex cancelAutoTicketBuyer context.CancelFunc @@ -57,34 +61,38 @@ type Wallet struct { // This function is ideally assigned when the `wallet.prepare` method is // called from a MultiWallet instance. readUserConfigValue configReadFn + + notificationListenersMu sync.RWMutex + syncData *SyncData + accountMixerNotificationListener map[string]AccountMixerNotificationListener } // prepare gets a wallet ready for use by opening the transactions index database // and initializing the wallet loader which can be used subsequently to create, // load and unload the wallet. -func (wallet *Wallet) prepare(rootDir string, chainParams *chaincfg.Params, +func (wallet *Wallet) Prepare(rootDir string, chainParams *chaincfg.Params, setUserConfigValueFn configSaveFn, readUserConfigValueFn configReadFn) (err error) { wallet.chainParams = chainParams - wallet.dataDir = filepath.Join(rootDir, strconv.Itoa(wallet.ID)) + wallet.DataDir = filepath.Join(rootDir, strconv.Itoa(wallet.ID)) wallet.vspClients = make(map[string]*vsp.Client) wallet.setUserConfigValue = setUserConfigValueFn wallet.readUserConfigValue = readUserConfigValueFn // open database for indexing transactions for faster loading - walletDataDBPath := filepath.Join(wallet.dataDir, walletdata.DbName) - oldTxDBPath := filepath.Join(wallet.dataDir, walletdata.OldDbName) + walletDataDBPath := filepath.Join(wallet.DataDir, walletdata.DbName) + oldTxDBPath := filepath.Join(wallet.DataDir, walletdata.OldDbName) if exists, _ := fileExists(oldTxDBPath); exists { moveFile(oldTxDBPath, walletDataDBPath) } - wallet.walletDataDB, err = walletdata.Initialize(walletDataDBPath, chainParams, &Transaction{}) + wallet.WalletDataDB, err = walletdata.Initialize(walletDataDBPath, chainParams, &Transaction{}) if err != nil { log.Error(err.Error()) return err } // init loader - wallet.loader = initWalletLoader(wallet.chainParams, wallet.dataDir, wallet.DbDriver) + wallet.loader = initWalletLoader(wallet.chainParams, wallet.DataDir, wallet.DbDriver) // init cancelFuncs slice to hold cancel functions for long running // operations and start go routine to listen for shutdown signal @@ -102,7 +110,7 @@ func (wallet *Wallet) prepare(rootDir string, chainParams *chaincfg.Params, func (wallet *Wallet) Shutdown() { // Trigger shuttingDown signal to cancel all contexts created with - // `wallet.shutdownContext()` or `wallet.shutdownContextWithCancel()`. + // `wallet.ShutdownContext()` or `wallet.shutdownContextWithCancel()`. wallet.shuttingDown <- true if _, loaded := wallet.loader.LoadedWallet(); loaded { @@ -114,8 +122,8 @@ func (wallet *Wallet) Shutdown() { } } - if wallet.walletDataDB != nil { - err := wallet.walletDataDB.Close() + if wallet.WalletDataDB != nil { + err := wallet.WalletDataDB.Close() if err != nil { log.Errorf("tx db closed with error: %v", err) } else { @@ -147,7 +155,7 @@ func (wallet *Wallet) WalletExists() (bool, error) { return wallet.loader.WalletExists() } -func (wallet *Wallet) createWallet(privatePassphrase, seedMnemonic string) error { +func (wallet *Wallet) CreateWallet(privatePassphrase, seedMnemonic string) error { log.Info("Creating Wallet") if len(seedMnemonic) == 0 { return errors.New(ErrEmptySeed) @@ -161,7 +169,7 @@ func (wallet *Wallet) createWallet(privatePassphrase, seedMnemonic string) error return err } - _, err = wallet.loader.CreateNewWallet(wallet.shutdownContext(), pubPass, privPass, seed) + _, err = wallet.loader.CreateNewWallet(wallet.ShutdownContext(), pubPass, privPass, seed) if err != nil { log.Error(err) return err @@ -171,10 +179,10 @@ func (wallet *Wallet) createWallet(privatePassphrase, seedMnemonic string) error return nil } -func (wallet *Wallet) createWatchingOnlyWallet(extendedPublicKey string) error { +func (wallet *Wallet) CreateWatchingOnlyWallet(extendedPublicKey string) error { pubPass := []byte(w.InsecurePubPassphrase) - _, err := wallet.loader.CreateWatchingOnlyWallet(wallet.shutdownContext(), extendedPublicKey, pubPass) + _, err := wallet.loader.CreateWatchingOnlyWallet(wallet.ShutdownContext(), extendedPublicKey, pubPass) if err != nil { log.Error(err) return err @@ -192,10 +200,10 @@ func (wallet *Wallet) IsWatchingOnlyWallet() bool { return false } -func (wallet *Wallet) openWallet() error { +func (wallet *Wallet) OpenWallet() error { pubPass := []byte(w.InsecurePubPassphrase) - _, err := wallet.loader.OpenExistingWallet(wallet.shutdownContext(), pubPass) + _, err := wallet.loader.OpenExistingWallet(wallet.ShutdownContext(), pubPass) if err != nil { log.Error(err) return translateError(err) @@ -214,7 +222,7 @@ func (wallet *Wallet) UnlockWallet(privPass []byte) error { return fmt.Errorf("wallet has not been loaded") } - ctx, _ := wallet.shutdownContextWithCancel() + ctx, _ := wallet.ShutdownContextWithCancel() err := loadedWallet.Unlock(ctx, privPass, nil) if err != nil { return translateError(err) @@ -238,7 +246,7 @@ func (wallet *Wallet) IsLocked() bool { return wallet.Internal().Locked() } -func (wallet *Wallet) changePrivatePassphrase(oldPass []byte, newPass []byte) error { +func (wallet *Wallet) ChangePrivatePassphrase(oldPass []byte, newPass []byte) error { defer func() { for i := range oldPass { oldPass[i] = 0 @@ -249,14 +257,14 @@ func (wallet *Wallet) changePrivatePassphrase(oldPass []byte, newPass []byte) er } }() - err := wallet.Internal().ChangePrivatePassphrase(wallet.shutdownContext(), oldPass, newPass) + err := wallet.Internal().ChangePrivatePassphrase(wallet.ShutdownContext(), oldPass, newPass) if err != nil { return translateError(err) } return nil } -func (wallet *Wallet) deleteWallet(privatePassphrase []byte) error { +func (wallet *Wallet) DeleteWallet(privatePassphrase []byte) error { defer func() { for i := range privatePassphrase { privatePassphrase[i] = 0 @@ -268,7 +276,7 @@ func (wallet *Wallet) deleteWallet(privatePassphrase []byte) error { } if !wallet.IsWatchingOnlyWallet() { - err := wallet.Internal().Unlock(wallet.shutdownContext(), privatePassphrase, nil) + err := wallet.Internal().Unlock(wallet.ShutdownContext(), privatePassphrase, nil) if err != nil { return translateError(err) } @@ -278,24 +286,24 @@ func (wallet *Wallet) deleteWallet(privatePassphrase []byte) error { wallet.Shutdown() log.Info("Deleting Wallet") - return os.RemoveAll(wallet.dataDir) + return os.RemoveAll(wallet.DataDir) } // DecryptSeed decrypts wallet.EncryptedSeed using privatePassphrase -func (wallet *Wallet) DecryptSeed(privatePassphrase []byte) (string, error) { - if wallet.EncryptedSeed == nil { - return "", errors.New(ErrInvalid) - } +// func (wallet *Wallet) DecryptSeed(privatePassphrase []byte) (string, error) { +// if wallet.EncryptedSeed == nil { +// return "", errors.New(ErrInvalid) +// } - return decryptWalletSeed(privatePassphrase, wallet.EncryptedSeed) -} +// return decryptWalletSeed(privatePassphrase, wallet.EncryptedSeed) +// } // AccountXPubMatches checks if the xpub of the provided account matches the // provided legacy or SLIP0044 xpub. While both the legacy and SLIP0044 xpubs // will be checked for watch-only wallets, other wallets will only check the // xpub that matches the coin type key used by the wallet. func (wallet *Wallet) AccountXPubMatches(account uint32, legacyXPub, slip044XPub string) (bool, error) { - ctx := wallet.shutdownContext() + ctx := wallet.ShutdownContext() acctXPubKey, err := wallet.Internal().AccountXpub(ctx, account) if err != nil { diff --git a/wallet_config.go b/wallets/dcr/wallet_config.go similarity index 99% rename from wallet_config.go rename to wallets/dcr/wallet_config.go index 8f6ba8ab8..29636c2cc 100644 --- a/wallet_config.go +++ b/wallets/dcr/wallet_config.go @@ -1,4 +1,4 @@ -package dcrlibwallet +package dcr import ( "decred.org/dcrwallet/v2/errors" diff --git a/walletdata/db.go b/wallets/dcr/walletdata/db.go similarity index 100% rename from walletdata/db.go rename to wallets/dcr/walletdata/db.go diff --git a/walletdata/filter.go b/wallets/dcr/walletdata/filter.go similarity index 100% rename from walletdata/filter.go rename to wallets/dcr/walletdata/filter.go diff --git a/walletdata/read.go b/wallets/dcr/walletdata/read.go similarity index 100% rename from walletdata/read.go rename to wallets/dcr/walletdata/read.go diff --git a/walletdata/save.go b/wallets/dcr/walletdata/save.go similarity index 100% rename from walletdata/save.go rename to wallets/dcr/walletdata/save.go diff --git a/wordlist.go b/wallets/dcr/wordlist.go similarity index 99% rename from wordlist.go rename to wallets/dcr/wordlist.go index 91a819dda..64d30862a 100644 --- a/wordlist.go +++ b/wallets/dcr/wordlist.go @@ -14,7 +14,7 @@ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -package dcrlibwallet +package dcr import "strings"