btc-crawl/btc-crawl.go

93 lines
2.6 KiB
Go
Raw Normal View History

2014-04-24 21:13:33 -05:00
// TODO: Namespace packages properly (outside of `main`)
2014-04-25 18:31:03 -05:00
// TODO: Apply peer-age filter to results?
2014-04-18 23:26:47 -05:00
package main
import (
2014-04-25 18:24:27 -05:00
"encoding/json"
"fmt"
"io/ioutil"
2014-04-24 21:13:33 -05:00
"os"
2014-04-23 19:44:22 -05:00
"time"
2014-04-24 21:13:33 -05:00
"github.com/alexcesaro/log"
"github.com/alexcesaro/log/golog"
"github.com/jessevdk/go-flags"
2014-04-18 23:26:47 -05:00
)
// Taken from: https://github.com/bitcoin/bitcoin/blob/89d72f3d9b6e7ef051ad1439f266b809f348229b/src/chainparams.cpp#L143
var defaultDnsSeeds = []string{
"seed.bitcoin.sipa.be",
"dnsseed.bluematt.me",
"dnsseed.bitcoin.dashjr.org",
"seed.bitcoinstats.com",
"seed.bitnodes.io",
"bitseed.xf2.org",
}
type Options struct {
2014-04-23 19:44:22 -05:00
Verbose []bool `short:"v" long:"verbose" description:"Show verbose logging."`
2014-04-25 18:24:27 -05:00
Output string `short:"o" long:"output" description:"File to write result to." default:"btc-crawl.json"`
2014-04-23 19:44:22 -05:00
Seed []string `short:"s" long:"seed" description:"Override which seeds to use." default-mask:"<bitcoin-core DNS seeds>"`
Concurrency int `short:"c" long:"concurrency" description:"Maximum number of concurrent connections to open." default:"10"`
UserAgent string `short:"A" long:"user-agent" description:"Client name to advertise while crawling. Should be in format of '/name:x.y.z/'." default:"/btc-crawl:0.1.1/"`
PeerAge time.Duration `long:"peer-age" description:"Ignore discovered peers older than this." default:"24h"`
2014-04-25 18:31:03 -05:00
StopAfter int `long:"stop-after" description:"Stop crawling after this many results." default:"0"`
}
2014-04-24 21:13:33 -05:00
var logLevels = []log.Level{
log.Warning,
log.Info,
log.Debug,
}
2014-04-18 23:26:47 -05:00
func main() {
2014-04-25 18:27:10 -05:00
now := time.Now()
options := Options{}
2014-04-23 18:03:34 -05:00
parser := flags.NewParser(&options, flags.Default)
2014-04-25 18:24:27 -05:00
p, err := parser.Parse()
2014-04-18 23:26:47 -05:00
if err != nil {
2014-04-25 18:24:27 -05:00
if p == nil {
fmt.Print(err)
}
2014-04-23 18:03:34 -05:00
return
2014-04-18 23:26:47 -05:00
}
2014-04-24 21:13:33 -05:00
// Figure out the log level
numVerbose := len(options.Verbose)
if numVerbose > len(logLevels) { // lol math.Min, you floaty bugger.
numVerbose = len(logLevels)
}
2014-04-24 21:13:33 -05:00
logLevel := logLevels[numVerbose]
logger = golog.New(os.Stderr, logLevel)
seedNodes := options.Seed
if len(seedNodes) == 0 {
seedNodes = GetSeedsFromDNS(defaultDnsSeeds)
}
2014-04-23 18:03:34 -05:00
client := NewClient(options.UserAgent)
2014-04-25 18:24:27 -05:00
crawler := NewCrawler(client, seedNodes, options.PeerAge)
results := crawler.Run(options.Concurrency, options.StopAfter)
b, err := json.Marshal(results)
if err != nil {
logger.Errorf("Failed to export JSON: %v", err)
return
}
if options.Output == "-" {
os.Stdout.Write(b)
return
}
err = ioutil.WriteFile(options.Output, b, 0644)
if err != nil {
logger.Errorf("Failed to write to %s: %v", options.Output, err)
return
}
2014-04-25 18:27:10 -05:00
logger.Infof("Written %d results after %s: %s", len(*results), time.Now().Sub(now), options.Output)
2014-04-18 23:26:47 -05:00
}