-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathcsv_preprocessing.py
More file actions
157 lines (140 loc) · 5.55 KB
/
csv_preprocessing.py
File metadata and controls
157 lines (140 loc) · 5.55 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
import numpy as np
import pandas as pd
from tqdm import tqdm
import glob
import matplotlib.pyplot as plt
from datetime import datetime
from datetime import timedelta
import os
from pathlib import Path
import argparse
class Node:
def __init__(self, n):
self.name = n
class Graph:
nodes = {}
edges = []
edge_indices = {}
def add_node(self, node):
if isinstance(node, Node) and node.name not in self.nodes:
self.nodes[node.name] = node
for row in self.edges:
row.append(0)
self.edges.append([0]*(len(self.edges)+1))
self.edge_indices[node.name] = len(self.edge_indices)
return True
else:
return False
def add_edge (self, u, v, weight):
if u in self.nodes and v in self.nodes:
self.edges[self.edge_indices[u]][self.edge_indices[v]] += weight
return True
else:
return False
def get_adj_martix(self):
return np.array(self.edges)
parser = argparse.ArgumentParser()
parser.add_argument("--from_item", help="from which number of item iterate",
type=int, default=1)
parser.add_argument("--to_item", help="till which number of item iterate",
type=int, default=1)
args = parser.parse_args()
tokens_price_list = sorted(glob.glob("ERC20-1h-data/*"))
transactions_csv_list = ['data/ftt.csv',
'data/vibe.csv',
'data/dlt.csv',
'data/lto.csv',
'data/oax.csv',
'data/celr.csv',
'data/hot.csv',
'data/sub.csv',
'data/nkn.csv',
'data/key.csv',
'data/fet.csv',
'data/appc.csv',
'data/sngls.csv',
'data/mda.csv',
'data/mith.csv',
'data/ren.csv',
'data/cdt.csv',
'data/dock.csv',
'data/blz.csv',
'data/data.csv',
'data/tusd.csv',
'data/link.csv']
print("iterating from ", args.from_item)
print("iterating to ", args.to_item)
print(transactions_csv_list[args.from_item:args.to_item])
contract_hashes = pd.read_csv("erc20_contracts_with_max_page.csv")
for path_to_csv in transactions_csv_list[args.from_item:args.to_item]:
price_data_file = path_to_csv.split("/")[1].split(".")[0].upper() + "BTC-1h-data.csv"
ftt = pd.read_csv( "ERC20-1h-data/" + price_data_file)
ftt['timestamp'] = pd.to_datetime(ftt.timestamp)
frame = pd.read_csv(path_to_csv)
my_groups = frame.groupby('new_date')
my_keys = my_groups.groups.keys()
transactions_new = []
verts_new = []
transaction_prev = 0
len_gg = 0
from_nod = 0
from_nods = []
Graph.nodes = {}
Graph.edges = []
Graph.edge_indices = {}
g = Graph()
n = len(my_keys)
previous_key = list(my_keys)[0]
# cycle over days
for k, i in tqdm(zip(my_keys,range(n))):
# print('current date',k)
# To check when we don't have data for several Days
for j in range((pd.to_datetime(k) - pd.to_datetime(previous_key)).days - 1):
# print('gddgdgf')
verts_new.append([0] * 24)
transactions_new.append([0] * 24)
from_nods.append([0] * 24)
day_group = my_groups.get_group(k)
times = pd.to_datetime(day_group.timestamp)
hour_groups = day_group.groupby(times.dt.hour)
# print('Hour',hour_groups.groups)
verts_new.append([0] * 24)
transactions_new.append([0] * 24)
from_nods.append([0] * 24)
# Cycle over hours
for key, indexes in hour_groups.groups.items():
from_nod = 0
# Cyvle over elements in data related to this hour
for index in indexes:
a = Node(day_group['from_address'][index])
if (g.add_node(a) == True) or (
day_group['from_address'][index] not in day_group['from_address'][0:index]):
from_nod += 1
a = Node(day_group['to_address'][index])
g.add_node(a)
# Adding edges
hour_group = hour_groups.get_group(key)
for fromm, tooo, value in zip(hour_group['from_address'],hour_group['to_address'],
hour_group['token_qty_values']):
g.add_edge(fromm,tooo,weight=int(value))
# print(transactions_new[i])
transactions_new[i][key] = g.get_adj_martix().sum() - transaction_prev
transaction_prev = g.get_adj_martix().sum()
verts_new[i][key] = len(g.nodes) - len_gg
len_gg = len(g.nodes)
# print(key)
# print(from_nod)
from_nods[i][key] = from_nod
previous_key = k
# print('prev',previous_key)
flat_list1 = [item for sublist in transactions_new for item in sublist]
num_zeros = len(ftt) - len(flat_list1)
flat_list1.extend([0] * num_zeros)
flat_list2 = [item for sublist in from_nods for item in sublist]
num_zeros = len(ftt) - len(flat_list2)
flat_list2.extend([0] * num_zeros)
ftt['Transactions_amount'] = flat_list1
ftt['New_nodes'] = flat_list2
# ftt.drop('new_date',axis = 1, inplace=True)
name = "preprocessed/" + path_to_csv.split("/")[1]
ftt.to_csv(name + '.csv',index=False)