Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Partially made Gumby Python 3 compatible #415

Merged
merged 3 commits into from
May 23, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion experiments/dht/dht_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def on_ipv8_available(self, _):

@experiment_callback
def introduce_peers_dht(self):
for peer_id in self.all_vars.iterkeys():
for peer_id in self.all_vars.keys():
if int(peer_id) != self.my_id:
self.overlay.walk_to(self.experiment.get_peer_ip_port_by_id(peer_id))

Expand Down
4 changes: 2 additions & 2 deletions experiments/dht/parse_dht_statistics.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python
import os
import sys

Expand All @@ -13,7 +13,7 @@ class DHTStatisticsParser(StatisticsParser):
"""

def aggregate_dht_response_times(self):
with open('dht_response_times.csv', 'w', 0) as csv_fp:
with open('dht_response_times.csv', 'w') as csv_fp:
csv_fp.write('peer time operation response_time\n')
for peer_nr, filename, dir in self.yield_files('dht.log'):
with open(filename) as log_fp:
Expand Down
2 changes: 1 addition & 1 deletion experiments/dummy/dummy_experiment_client.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python
# experiment_client.py ---
#
# Filename: experiment_client.py
Expand Down
2 changes: 1 addition & 1 deletion experiments/gigachannel/gigachannel_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def on_ipv8_available(self, _):

@experiment_callback
def introduce_peers_gigachannels(self):
for peer_id in self.all_vars.iterkeys():
for peer_id in self.all_vars.keys():
if int(peer_id) != self.my_id:
self.overlay.walk_to(self.experiment.get_peer_ip_port_by_id(peer_id))

Expand Down
20 changes: 10 additions & 10 deletions experiments/ipv8/parse_ipv8_statistics.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python
import json
import os
import sys
Expand Down Expand Up @@ -39,8 +39,8 @@ def aggregate_messages(self):
# Aggregate statistics for all available overlays
for stat_time, stat_dict in stat_dicts:
collapsed_stat_dict = {}
for _, msg_stats_dict in stat_dict.iteritems():
for msg_id, specific_msg_stats_dict in msg_stats_dict.iteritems():
for _, msg_stats_dict in stat_dict.items():
for msg_id, specific_msg_stats_dict in msg_stats_dict.items():
if msg_id not in collapsed_stat_dict:
collapsed_stat_dict[msg_id] = {'num_up': 0, 'num_down': 0, 'bytes_up': 0, 'bytes_down': 0}
collapsed_stat_dict[msg_id]['num_up'] += specific_msg_stats_dict['num_up']
Expand All @@ -54,11 +54,11 @@ def aggregate_messages(self):
# Find the largest time across all the files + different messages we have
msg_ids = set()
largest_time = 0
for stat_lists in stats_per_peer.itervalues():
for stat_lists in stats_per_peer.values():
for stat_time, stat_dict in stat_lists:
if stat_time > largest_time:
largest_time = stat_time
for msg_id in stat_dict.iterkeys():
for msg_id in stat_dict.keys():
msg_ids.add(msg_id)

if not msg_ids:
Expand All @@ -78,14 +78,14 @@ def aggregate_messages(self):
# We now actually fill in the results
for ind in xrange(1, largest_time / 5 + 1):
cur_time = ind * 5
for stats_list in stats_per_peer.itervalues():
for stats_list in stats_per_peer.values():
filtered_dicts = [stat_dict for stat_time, stat_dict in stats_list if stat_time <= cur_time]
if not filtered_dicts:
continue
required_dict = filtered_dicts[-1]

# We have to merge the information now
for msg_id, msg_stats in required_dict.iteritems():
for msg_id, msg_stats in required_dict.items():
results[ind][msg_id]['num_up'] += msg_stats['num_up']
results[ind][msg_id]['num_down'] += msg_stats['num_down']
results[ind][msg_id]['bytes_up'] += msg_stats['bytes_up']
Expand All @@ -96,7 +96,7 @@ def aggregate_messages(self):
output_file.write("time,msg_id,num_up,num_down,bytes_up,bytes_down\n")
for ind, stats in enumerate(results):
cur_time = ind * 5
for msg_id, msg_stats in stats.iteritems():
for msg_id, msg_stats in stats.items():
output_file.write("%d,%s,%d,%d,%d,%d\n" % (cur_time, msg_id, msg_stats['num_up'],
msg_stats['num_down'], msg_stats['bytes_up'],
msg_stats['bytes_down']))
Expand All @@ -109,7 +109,7 @@ def aggregate_peer_connections(self):
for peer_connection in peer_connections:
peers_connections.add((peer_nr, int(peer_connection)))

with open('peer_connections.log', 'w', 0) as connections_file:
with open('peer_connections.log', 'w') as connections_file:
connections_file.write("peer_a,peer_b\n")
for peer_a, peer_b in peers_connections:
connections_file.write("%d,%d\n" % (peer_a, peer_b))
Expand All @@ -122,7 +122,7 @@ def aggregate_bandwidth(self):
total_up += int(parts[0])
total_down += int(parts[1])

with open('total_bandwidth.log', 'w', 0) as output_file:
with open('total_bandwidth.log', 'w') as output_file:
output_file.write("%s,%s,%s\n" % (total_up, total_down, (total_up + total_down) / 2))

def aggregate_autoplot(self):
Expand Down
2 changes: 1 addition & 1 deletion experiments/market/market_experiment.conf
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ das4_node_timeout = 120
das4_instances_to_run = 1

# What command do we want to run?
das4_node_command = "launch_scenario.py"
das4_node_command = "launch_scenario_py3.py"
scenario_file = "market_experiment_100.scenario"

messages_to_plot = 'ask,bid'
Expand Down
21 changes: 11 additions & 10 deletions experiments/market/market_module.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import json
import os
import random
from base64 import b64decode

from anydex.core.community import MarketCommunity
from anydex.core.assetamount import AssetAmount
Expand Down Expand Up @@ -80,14 +81,14 @@ def init_trader_lookup_table(self):
Initialize the lookup table for all traders so we do not have to use the DHT.
"""
num_total_matchmakers = int(os.environ['NUM_MATCHMAKERS'])
for peer_id in self.all_vars.iterkeys():
for peer_id in self.all_vars.keys():
target = self.all_vars[peer_id]
address = (str(target['host']), target['port'])

if 'public_key' not in self.all_vars[peer_id]:
self._logger.error("Could not find public key of peer %s!", peer_id)
else:
peer = Peer(self.all_vars[peer_id]['public_key'].decode("base64"), address=address)
peer = Peer(b64decode(self.all_vars[peer_id]['public_key']), address=address)
self.overlay.update_ip(TraderId(peer.mid), address)

if int(peer_id) <= num_total_matchmakers:
Expand Down Expand Up @@ -132,12 +133,12 @@ def write_stats(self):
len(transaction.payments), scenario_runner._peernumber, partner_peer_id))

# Write transactions
with open('transactions.log', 'w', 0) as transactions_file:
with open('transactions.log', 'w') as transactions_file:
for transaction in transactions:
transactions_file.write("%s,%s,%s,%s,%s,%s\n" % transaction)

# Write orders
with open('orders.log', 'w', 0) as orders_file:
with open('orders.log', 'w') as orders_file:
for order in self.overlay.order_manager.order_repository.find_all():
order_data = (int(order.timestamp) / 1000.0, order.order_id, scenario_runner._peernumber,
'ask' if order.is_ask() else 'bid',
Expand All @@ -148,11 +149,11 @@ def write_stats(self):
orders_file.write("%s,%s,%s,%s,%s,%s,%s,%s,%s,%s\n" % order_data)

# Write ticks in order book
with open('orderbook.txt', 'w', 0) as orderbook_file:
with open('orderbook.txt', 'w') as orderbook_file:
orderbook_file.write(str(self.overlay.order_book))

# Write known matchmakers
with open('matchmakers.txt', 'w', 0) as matchmakers_file:
with open('matchmakers.txt', 'w') as matchmakers_file:
for matchmaker in self.overlay.matchmakers:
matchmakers_file.write("%s,%d\n" % (matchmaker.address[0], matchmaker.address[1]))

Expand All @@ -166,18 +167,18 @@ def write_stats(self):
else:
fulfilled_bids += 1

with open('market_stats.log', 'w', 0) as stats_file:
with open('market_stats.log', 'w') as stats_file:
stats_dict = {'asks': self.num_asks, 'bids': self.num_bids,
'fulfilled_asks': fulfilled_asks, 'fulfilled_bids': fulfilled_bids}
stats_file.write(json.dumps(stats_dict))

# Write mid register
with open('mid_register.log', 'w', 0) as mid_file:
for trader_id, host in self.overlay.mid_register.iteritems():
with open('mid_register.log', 'w') as mid_file:
for trader_id, host in self.overlay.mid_register.items():
mid_file.write("%s,%s\n" % (trader_id.as_hex(), "%s:%d" % host))

# Write items in the matching queue
with open('match_queue.txt', 'w', 0) as queue_file:
with open('match_queue.txt', 'w') as queue_file:
for match_cache in self.overlay.get_match_caches():
for retries, price, other_order_id in match_cache.queue.queue:
queue_file.write(
Expand Down
8 changes: 4 additions & 4 deletions experiments/market/parse_market_statistics.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python
import json
import os
import sys
Expand Down Expand Up @@ -42,7 +42,7 @@ def aggregate_transaction_data(self):
total_transactions += 1
transactions_cumulative_str += str(transaction_time) + "," + str(total_transactions) + "\n"

with open('transactions.log', 'w', 0) as transactions_file:
with open('transactions.log', 'w') as transactions_file:
transactions_file.write("time,price,quantity,payments,peer1,peer2\n")
transactions_file.write(transactions_str)

Expand All @@ -63,7 +63,7 @@ def aggregate_order_data(self):
orders_str += orders_data
orders_data_all += orders_data

with open('orders.log', 'w', 0) as orders_file:
with open('orders.log', 'w') as orders_file:
orders_file.write(orders_str)

# Calculate the average order latency
Expand Down Expand Up @@ -104,7 +104,7 @@ def aggregate_general_stats(self):
fulfilled_asks += stats_dict['fulfilled_asks']
fulfilled_bids += stats_dict['fulfilled_bids']

with open('aggregated_market_stats.log', 'w', 0) as stats_file:
with open('aggregated_market_stats.log', 'w') as stats_file:
stats_dict = {'asks': total_asks, 'bids': total_bids,
'fulfilled_asks': fulfilled_asks, 'fulfilled_bids': fulfilled_bids,
'total_quantity_traded': self.total_quantity_traded,
Expand Down
2 changes: 1 addition & 1 deletion experiments/popularity/popularity_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def set_fake_dht_health_manager(self):

@experiment_callback
def introduce_peers_popularity(self):
for peer_id in self.all_vars.iterkeys():
for peer_id in self.all_vars.keys():
if int(peer_id) != self.my_id:
self.overlay.walk_to(self.experiment.get_peer_ip_port_by_id(peer_id))

Expand Down
2 changes: 1 addition & 1 deletion experiments/tribler_idle_run/tribler_idle_run.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python
# tribler_idle_run.py ---
#
# Filename: tribler_idle_run.py
Expand Down
4 changes: 2 additions & 2 deletions experiments/trustchain/post_process_trustchain.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python
from __future__ import print_function
import json
import os
Expand Down Expand Up @@ -96,7 +96,7 @@ def write_blocks_to_file(self):
trustchain_interactions_file.write("%d,%d\n" % (peer_a, peer_b))

def aggregate_trustchain_balances(self):
with open('trustchain_balances.csv', 'w', 0) as balances_file:
with open('trustchain_balances.csv', 'w') as balances_file:
balances_file.write('peer,total_up,total_down,balance\n')
for peer_nr, filename, dir in self.yield_files('trustchain.txt'):
with open(filename) as tc_file:
Expand Down
6 changes: 3 additions & 3 deletions experiments/trustchain/trustchain_mem_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def get_all_blocks(self):

def get_number_of_known_blocks(self, public_key=None):
if public_key:
return len([pk for pk, _ in self.block_cache.iterkeys() if pk == public_key])
return len([pk for pk, _ in self.block_cache.keys() if pk == public_key])
return len(self.block_cache.keys())

def contains(self, block):
Expand Down Expand Up @@ -83,7 +83,7 @@ def get_lowest_sequence_number_unknown(self, public_key):

def get_lowest_range_unknown(self, public_key):
lowest_unknown = self.get_lowest_sequence_number_unknown(public_key)
known_block_nums = [seq_num for pk, seq_num in self.block_cache.iterkeys() if pk == public_key]
known_block_nums = [seq_num for pk, seq_num in self.block_cache.keys() if pk == public_key]
filtered_block_nums = [seq_num for seq_num in known_block_nums if seq_num > lowest_unknown]
if filtered_block_nums:
return lowest_unknown, filtered_block_nums[0] - 1
Expand Down Expand Up @@ -120,7 +120,7 @@ def commit(self, my_pub_key):
Commit all information to the original database.
"""
if self.original_db:
my_blocks = [block for block in self.block_cache.itervalues() if block.public_key == my_pub_key]
my_blocks = [block for block in self.block_cache.values() if block.public_key == my_pub_key]
for block in my_blocks:
self.original_db.add_block(block)

Expand Down
2 changes: 1 addition & 1 deletion experiments/trustchain/trustchain_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,6 @@ def commit_blocks_to_db(self):
@experiment_callback
def write_trustchain_statistics(self):
from Tribler.Core.Modules.wallet.tc_wallet import TrustchainWallet
with open('trustchain.txt', 'w', 0) as trustchain_file:
with open('trustchain.txt', 'w') as trustchain_file:
wallet = TrustchainWallet(self.overlay)
trustchain_file.write(json.dumps(wallet.get_statistics()))
8 changes: 4 additions & 4 deletions experiments/tunnels/hidden_tunnel_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@ class HiddenTunnelModule(TunnelModule):
def write_tunnels_info(self):
super(HiddenTunnelModule, self).write_tunnels_info()

with open('introduction_points.txt', 'w', 0) as ips_file:
for infohash in self.overlay.intro_point_for.iterkeys():
with open('introduction_points.txt', 'w') as ips_file:
for infohash in self.overlay.intro_point_for.keys():
ips_file.write("%s,%s\n" % (self.my_id, infohash.encode('hex')))

with open('rendezvous_points.txt', 'w', 0) as rps_file:
for cookie in self.overlay.rendezvous_point_for.iterkeys():
with open('rendezvous_points.txt', 'w') as rps_file:
for cookie in self.overlay.rendezvous_point_for.keys():
rps_file.write("%s,%s\n" % (self.my_id, cookie.encode('hex')))
14 changes: 7 additions & 7 deletions experiments/tunnels/parse_tunnel_statistics.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python
import json
import os
import sys
Expand All @@ -18,21 +18,21 @@ def __init__(self, node_directory):
self.relays_info = []

def aggregate_introduction_points(self):
with open('introduction_points.csv', 'w', 0) as ips_file:
with open('introduction_points.csv', 'w') as ips_file:
ips_file.write("peer,infohash\n")
for peer_nr, filename, dir in self.yield_files('introduction_points.txt'):
with open(filename) as ip_file:
ips_file.write(ip_file.read())

def aggregate_rendezvous_points(self):
with open('rendezvous_points.csv', 'w', 0) as rps_file:
with open('rendezvous_points.csv', 'w') as rps_file:
rps_file.write("peer,cookie\n")
for peer_nr, filename, dir in self.yield_files('rendezvous_points.txt'):
with open(filename) as rp_file:
rps_file.write(rp_file.read())

def aggregate_downloads_history(self):
with open('downloads_history.csv', 'w', 0) as downloads_file:
with open('downloads_history.csv', 'w') as downloads_file:
downloads_file.write('peer,time,infohash,progress,status,total_up,total_down,speed_up,speed_down\n')
for peer_nr, filename, dir in self.yield_files('downloads_history.txt'):
with open(filename) as individual_downloads_file:
Expand All @@ -41,7 +41,7 @@ def aggregate_downloads_history(self):
downloads_file.write('%s,%s' % (peer_nr, line))

def aggregate_circuits(self):
with open('circuits.csv', 'w', 0) as circuits_file:
with open('circuits.csv', 'w') as circuits_file:
circuits_file.write('peer,circuit_id,state,hops,bytes_up,bytes_down,creation_time,type,first_hop\n')
for peer_nr, filename, dir in self.yield_files('circuits.txt'):
with open(filename) as individual_circuits_file:
Expand All @@ -58,7 +58,7 @@ def aggregate_circuits(self):
self.circuits_info.append((peer_nr, circuit_id, circuit_type, first_hop, bytes_transferred))

def aggregate_relays(self):
with open('relays.csv', 'w', 0) as relays_file:
with open('relays.csv', 'w') as relays_file:
relays_file.write('peer,circuit_id_1,circuit_id_2,destination,bytes_up\n')
for peer_nr, filename, dir in self.yield_files('relays.txt'):
with open(filename) as individual_relays_file:
Expand Down Expand Up @@ -111,7 +111,7 @@ def build_circuits_graph(self):
cur_circuit_num += 1

# Write circuits to file
with open('circuits_graph.csv', 'w', 0) as circuits_graph_file:
with open('circuits_graph.csv', 'w') as circuits_graph_file:
circuits_graph_file.write('from,to,circuit_num,type,bytes_transferred\n')
for from_peer, to_peer, circuit_num, type, bytes_transferred in edges:
circuits_graph_file.write('%s,%s,%s,%s,%d\n' % (from_peer, to_peer, circuit_num, type, bytes_transferred))
Expand Down
Loading