Skip to content

Commit

Permalink
PR changes
Browse files Browse the repository at this point in the history
  • Loading branch information
sisuresh committed Jan 29, 2020
1 parent 1ef9b45 commit ce60500
Show file tree
Hide file tree
Showing 2 changed files with 184 additions and 165 deletions.
330 changes: 174 additions & 156 deletions scripts/OverlaySurvey.py
Original file line number Diff line number Diff line change
@@ -1,194 +1,212 @@
# importing the requests library
import requests
import json
import time
import sys
import os
#!/usr/bin/env python

import argparse
import networkx as nx
from networkx import Graph
from collections import defaultdict
import json
import networkx as nx
import requests
import sys
import time

def add_new_node(label):
if G.has_node(label):

def add_new_node(graph, label):
if graph.has_node(label):
return
G.add_node(label, label=label)
graph.add_node(label, label=label)


def add_new_edge(u, v):
if G.has_edge(u, v):
def add_new_edge(graph, u, v):
if graph.has_edge(u, v):
return
G.add_edge(u, v)
graph.add_edge(u, v)


def next_peer(direction_tag, node_info):
if direction_tag in node_info and node_info[direction_tag]:
for peer in node_info[direction_tag]:
yield peer


def get_next_peers(topology):
results = []
for key in topology:
if topology[key] is None:
continue

if "inboundPeers" in topology[key] and topology[key]["inboundPeers"]:
for in_peers in topology[key]["inboundPeers"]:
results.append(in_peers["nodeId"])
if "outboundPeers" in topology[key] and topology[key]["outboundPeers"]:
for out_peers in topology[key]["outboundPeers"]:
results.append(out_peers["nodeId"])

curr = topology[key]
if curr is None:
continue

for peer in next_peer("inboundPeers", curr):
results.append(peer["nodeId"])

for peer in next_peer("outboundPeers", curr):
results.append(peer["nodeId"])

return results

def send_requests(peer_list):

def update_results(graph, parent_info, parent_key, results, is_inbound):
direction_tag = "inboundPeers" if is_inbound else "outboundPeers"
for peer in next_peer(direction_tag, parent_info):
other_key = peer["nodeId"]

results[direction_tag][other_key] = peer
add_new_node(graph, parent_key)
add_new_node(graph, other_key)
add_new_edge(graph, other_key, parent_key)

if "numTotalInboundPeers" in parent_info:
results["totalInbound"] = parent_info["numTotalInboundPeers"]
if "numTotalOutboundPeers" in parent_info:
results["totalOutbound"] = parent_info["numTotalOutboundPeers"]


def send_requests(peer_list, params, requestUrl):
for key in peer_list:
PARAMS["node"] = key
requests.get(url = SURVEY_REQUEST, params = PARAMS)
params["node"] = key
requests.get(url=requestUrl, params=params)

def check_results():
r = requests.get(url = SURVEY_RESULT)

def check_results(graph, merged_results, resultUrl):
r = requests.get(url=resultUrl)
data = r.json()

if("topology" not in data):
return []
if "topology" not in data:
raise ValueError("stellar-core is missing survey nodes. Are the public keys surveyed valid?")

topology = data["topology"]

for key in topology:
if topology[key] is None:

curr = topology[key]
if curr is None:
continue

if "inboundPeers" in topology[key] and topology[key]["inboundPeers"]:
for in_peers in topology[key]["inboundPeers"]:
merged_results[key]["inboundPeers"][in_peers["nodeId"]] = in_peers
add_new_node(key)
add_new_node(in_peers["nodeId"])
add_new_edge(in_peers["nodeId"], key)
merged_results[key]["totalInbound"] = topology[key]["numTotalInboundPeers"]
if "outboundPeers" in topology[key] and topology[key]["outboundPeers"]:
for out_peers in topology[key]["outboundPeers"]:
merged_results[key]["outboundPeers"][out_peers["nodeId"]] = out_peers
add_new_node(key)
add_new_node(out_peers["nodeId"])
add_new_edge(key, out_peers["nodeId"])
merged_results[key]["totalOutbound"] = topology[key]["numTotalOutboundPeers"]

merged_results[key]["responseSeen"] = True
merged = merged_results[key]

update_results(graph, curr, key, merged, True)
update_results(graph, curr, key, merged, False)

merged["responseSeen"] = True

return get_next_peers(topology)

def write_graph_stats():

def write_graph_stats(graph, outputFile):
stats = {}
stats["average_shortest_path_length"] = nx.average_shortest_path_length(G)
stats["average_clustering"] = nx.average_clustering(G)
stats["clustering"] = nx.clustering(G)
stats["degree"] = dict(nx.degree(G))
with open('graphStats.json', 'w') as outfile:
stats["average_shortest_path_length"] = nx.average_shortest_path_length(graph)
stats["average_clustering"] = nx.average_clustering(graph)
stats["clustering"] = nx.clustering(graph)
stats["degree"] = dict(nx.degree(graph))
with open(outputFile, 'w') as outfile:
json.dump(stats, outfile)

#not used yet
def get_node_aliases():
r = requests.get(url = "https://api.stellarbeat.io/v1/nodes")
nodeList = r.json()

result = {}
for node in nodeList:
if "alias" in node:
result[node["publicKey"]] = node["alias"]
return result


# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-n", "--node", help="address of initial survey node")
ap.add_argument("-d", "--duration", help="duration of survey in seconds")
ap.add_argument("-nl", "--nodeList", help="optional list of seed nodes")
ap.add_argument("-gml", "--graphml", help="print stats for the graphml input graph")

try:
args = ap.parse_args()
except:
print("Error with input arguments!")
ap.print_help()
sys.exit(0)

#we just want to print stats for an existing graph
if args.graphml:
G = nx.read_graphml(args.graphml)
write_graph_stats()
def analyze(args):
G = nx.read_graphml(args.graphmlAnalyze)
write_graph_stats(G, args.graphStats)
sys.exit(0)

if not args.node or not args.duration:
print("--node and --duration are both required to run the survey!")
sys.exit(0)

G = nx.Graph()
merged_results = defaultdict(lambda:{
"totalInbound":0,
"totalOutbound":0,
"inboundPeers":{},
"outboundPeers":{},
"responseSeen":False
})

URL = args.node

PEERS = URL + "/peers"
SURVEY_REQUEST = URL + "/surveytopology"
SURVEY_RESULT = URL + "/getsurveyresult"
STOP_SURVEY = URL + "/stopsurvey"

duration = int(args.duration)
PARAMS = {'duration':duration}

#reset survey
r = requests.get(url = STOP_SURVEY)

peer_list = []
if args.nodeList:
#include nodes from file
f = open(args.nodeList, "r")
for node in f:
peer_list.append(node.rstrip('\n'))

PEERS_PARAMS = {'fullkeys':"true"}
r = requests.get(url = PEERS, params = PEERS_PARAMS)

data = r.json()
peers = data["authenticated_peers"]

#seed initial peers off of /peers endpoint
if peers["inbound"]:
for peer in peers["inbound"]:
peer_list.append(peer["id"])
if peers["outbound"]:
for peer in peers["outbound"]:
peer_list.append(peer["id"])

t_end = time.time() + duration
while time.time() < t_end:
send_requests(peer_list)
def run_survey(args):
G = nx.Graph()
merged_results = defaultdict(lambda: {
"totalInbound": 0,
"totalOutbound": 0,
"inboundPeers": {},
"outboundPeers": {},
"responseSeen": False
})

URL = args.node

PEERS = URL + "/peers"
SURVEY_REQUEST = URL + "/surveytopology"
SURVEY_RESULT = URL + "/getsurveyresult"
STOP_SURVEY = URL + "/stopsurvey"

duration = int(args.duration)
PARAMS = {'duration': duration}

# reset survey
r = requests.get(url=STOP_SURVEY)

peer_list = []
if args.nodeList:
# include nodes from file
f = open(args.nodeList, "r")
for node in f:
peer_list.append(node.rstrip('\n'))

#allow time for results
time.sleep(1)
result_node_list = check_results()

#try new nodes
for key in result_node_list:
if key not in merged_results:
peer_list.append(key)

#retry for incomplete nodes
for key in merged_results:
node = merged_results[key]
if(node["totalInbound"] > len(node["inboundPeers"])):
peer_list.append(key)
if(node["totalOutbound"] > len(node["outboundPeers"])):
peer_list.append(key)

if nx.is_empty(G):
print "Graph is empty!"
sys.exit(0)
PEERS_PARAMS = {'fullkeys': "true"}
r = requests.get(url=PEERS, params=PEERS_PARAMS)

data = r.json()
peers = data["authenticated_peers"]

# seed initial peers off of /peers endpoint
if peers["inbound"]:
for peer in peers["inbound"]:
peer_list.append(peer["id"])
if peers["outbound"]:
for peer in peers["outbound"]:
peer_list.append(peer["id"])

write_graph_stats()
t_end = time.time() + duration
while time.time() < t_end:
send_requests(peer_list, PARAMS, SURVEY_REQUEST)
peer_list = []

nx.write_graphml(G, "survey.graphml")
# allow time for results
time.sleep(1)
result_node_list = check_results(G, merged_results, SURVEY_RESULT)

# try new nodes
for key in result_node_list:
if key not in merged_results:
peer_list.append(key)

# retry for incomplete nodes
for key in merged_results:
node = merged_results[key]
if(node["totalInbound"] > len(node["inboundPeers"])):
peer_list.append(key)
if(node["totalOutbound"] > len(node["outboundPeers"])):
peer_list.append(key)

if nx.is_empty(G):
print "Graph is empty!"
sys.exit(0)

write_graph_stats(G, args.graphStats)

nx.write_graphml(G, args.graphmlWrite)

with open(args.surveyResult, 'w') as outfile:
json.dump(merged_results, outfile)


def main():
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-gs", "--graphStats", required=True, help="output file for graph stats")

subparsers = ap.add_subparsers()

parser_survey = subparsers.add_parser('survey', help="run survey and analyze results")
parser_survey.add_argument("-n", "--node", required=True, help="address of initial survey node")
parser_survey.add_argument("-d", "--duration", required=True, help="duration of survey in seconds")
parser_survey.add_argument("-sr", "--surveyResult", required=True, help="output file for survey results")
parser_survey.add_argument("-gmlw", "--graphmlWrite", required=True, help="output file for graphml file")
parser_survey.add_argument("-nl", "--nodeList", help="optional list of seed nodes")
parser_survey.set_defaults(func=run_survey)

parser_analyze = subparsers.add_parser('analyze', help="write stats for the graphml input graph")
parser_analyze.add_argument("-gmla", "--graphmlAnalyze", help="input graphml file")
parser_analyze.set_defaults(func=analyze)

args = ap.parse_args()
args.func(args)

with open('surveyResults.json', 'w') as outfile:
json.dump(merged_results, outfile)


if __name__ == "__main__":
main()
19 changes: 10 additions & 9 deletions scripts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,13 @@ This folder is for storing any scripts that may be helpful for using stellar-cor
### Overlay survey
- Name - `OverlaySurvey.py`
- Description - A Python script that will walk the network using the Overlay survey mechanism to gather connection information. See [admin](./../docs/software/admin.md#overlay-topology-survey) for more information on the overlay survey. The survey will use the peers of the initial node to seed the survey.
- Usage - Ex. `python OverlaySurvey.py -n http://127.0.0.1:8080 -d 45 -nl temp.txt` to run the survey or `python OverlaySurvey.py -gml survey.graphml` to analyze an existing graph.
- `-n NODE`, `--node NODE` - address of initial survey node (required if not using -gml)
- `-d DURATION`, `--duration DURATION` - duration of survey in seconds (required if not using -gml)
- `-nl NODELIST`, `--nodeList NODELIST` - list of seed nodes. One node per line. (Optional)
- `-gml GRAPHML`, `--graphml GRAPHML` - print stats for the graphml input graph. Does not run survey.
- Output files
- `graphStats.json` - networkx stats about graph
- `surveyResults.json` - connection information gathered from survey
- `survey.graphml` - networkx graph in graphml format
- Usage - Ex. `python OverlaySurvey.py -gs gs.json survey -n http://127.0.0.1:11626 -d 50 -sr sr.json -gmlw gmlw.graphml` to run the survey or `python OverlaySurvey.py -gs gs.json analyze -gmla gmla.graphml` to analyze an existing graph.
- `-gs GRAPHSTATS`, `--graphStats GRAPHSTATS` - output file for graph stats
- sub command `survey` - run survey and analyze
- `-n NODE`, `--node NODE` - address of initial survey node
- `-d DURATION`, `--duration DURATION` - duration of survey in seconds
- `-nl NODELIST`, `--nodeList NODELIST` - list of seed nodes. One node per line. (Optional)
- `-gmlw GRAPHMLWRITE`, `--graphmlWrite GRAPHMLWRITE` - output file for graphml file
- `-sr SURVEYRESULT`, `--surveyResult SURVEYRESULT` - output file for survey results
- sub command `analyze` - analyze an existing graph
- `-gmla GRAPHMLANALYZE`, `--graphmlAnalyze GRAPHMLANALYZE` - input graphml file

5 comments on commit ce60500

@latobarita
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

saw approval from graydon
at sisuresh@ce60500

@latobarita
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

merging sisuresh/stellar-core/addSurveyScript = ce60500 into auto

@latobarita
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sisuresh/stellar-core/addSurveyScript = ce60500 merged ok, testing candidate = 5a8f8b1

@latobarita
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@latobarita
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fast-forwarding master to auto = 5a8f8b1

Please sign in to comment.