-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathapp.js
More file actions
243 lines (231 loc) · 7.14 KB
/
app.js
File metadata and controls
243 lines (231 loc) · 7.14 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
const jsc8 = require("jsc8");
///////////////////////////////////// User configuration
const fabricName = "_system";
const url = ""; //federation URL
const apiKey1 = ""; //API key of source tenant
const apiKey2 = ""; //API key of destination tenant
const apiKey2ID = ""; //ID of APIkey2/ name of API key
//////////////////////////////////////////
let newGFname = "";
//Connection to source and destination tenant
const client1 = new jsc8({
url,
apiKey: apiKey1,
fabricName,
});
const client2 = new jsc8({
url,
apiKey: apiKey2,
fabricName,
});
//Timeout function, safety measure for slowing down cloning process
const sleep = (milliseconds) => {
console.log(`${milliseconds / 1000} seconds timeout`);
return new Promise((resolve) => setTimeout(resolve, milliseconds));
};
//Creating GeoFabric and giving APIkey permission that it can access it
const createGF = async function () {
try {
const data = await client2.getAllEdgeLocations();
let locations = "";
newGFname =
new Date(Date.now()).toString().replace(/\s/g, "-").slice(4, 15) +
"_" +
Date.now().toString();
for (let i of data) {
locations = locations + "," + i._key;
}
const info = await client2.createFabric(newGFname, ["root"], {
dcList: locations.slice(1),
});
const result = await client2.setDatabaseAccessLevel(
apiKey2ID,
newGFname,
"rw"
);
client2.useFabric(newGFname);
console.log("GeoFabric is created!");
await sleep(2000);
} catch (e) {
console.log(e);
console.log(
"Something went wrong in GF creating process, check the error log."
);
}
};
//Cloning index configuration on backup GF. It only save index configuration.
const cloneIndexes = async function () {
try {
const collections = await client2.listCollections(true);
let arr = [];
for (let i of collections) {
const collectionIndexes = await client1.getCollectionIndexes(i.name);
for (let i of collectionIndexes) {
if (
i.type !== "primary" &&
i.type !== "edge" &&
i.fields[0] !== "expireAt"
) {
arr.push(i);
}
}
}
client2.createCollection("indexes");
const insertedDocs = await client2.insertDocumentMany("indexes", arr);
console.log("Index cloning process is DONE!");
} catch (e) {
console.log("Something went wrong with index cloning process");
console.log(e.response.body);
}
};
//This function will clone the collections to destination fabric; This function do not clone data
const cloneCollections = async function () {
const collections = await client1.listCollections(true);
try {
for (let i of collections) {
if (i.collectionModel === "DOC") {
if (i.type === 2) {
await client2.createCollection(i.name, {
stream: i.hasStream ? true : false,
});
} else {
await client2.createCollection(
i.name,
{ stream: i.hasStream ? true : false },
{ isEdge: true }
);
}
} else if (i.collectionModel === "KV") {
const result = await client2.createKVCollection(i.name, {
expiration: true,
stream: i.hasStream ? true : false,
});
} else if (i.collectionModel === "DYNAMO") {
console.log(
`"${i.name}" DYNAMO collection is not cloned, there is no driver support for this type of collection`
);
} else {
console.log("I dont know this type");
}
}
console.log("Collection cloning process id DONE");
} catch (e) {
console.log(
"Something went wrong with collection cloning request, please make sure that destination fabric is empty"
);
console.log(e.response.body);
}
};
//Cloning data
const cloneData = async function () {
const batchSize = 1000;
let collections = await client1.listCollections(true);
collections = collections.filter((item) => item.collectionModel !== "DYNAMO");
try {
for (let i of collections) {
let name = i.name;
const { count } = await client1.collection(name).count();
const num = Math.ceil(count / batchSize);
for (i = 0; i < num; i++) {
//We change the offset for each iteration
let offset = i * batchSize;
await sleep(100);
let cursor = await client1.exportDataByCollectionName(name, {
offset: offset,
limit: batchSize,
});
console.log(
`Data pulled from source fabric, collection ${name}, ${
i + 1
} of ${num}, server code: ${cursor.code}`
);
await sleep(200);
let insert = await client2.importDocuments(
name,
cursor.result,
true,
"_key",
true
);
console.log(
`Data inserted in destination fabric, collection ${name}, ${
i + 1
} of ${num} >>> created: ${insert.result.created}`
);
}
}
} catch (e) {
console.log(e);
console.log("There is error in data cloning process");
}
console.log("Cloning process is finished", newGFname);
};
//Cloning graphs configuration on backup GF. It only save graphs configuration into graphs collection.
const cloneGraph = async function () {
const graphs = await client1.getGraphs();
let arr = [];
try {
for (let i of graphs) {
const graph = await client1.getGraph(i.name);
arr.push(graph);
}
await client2.createCollection("graphs");
const insertedDocs = await client2.insertDocumentMany("graphs", arr);
console.log(`All Graphs are cloned!`);
} catch (e) {
console.log("Graph cant be created", e);
console.log(e);
}
};
//This function will clone restqls
const cloneRestqls = async function () {
try {
const listOfCreatedRestql = await client1.getRestqls();
for (let i of listOfCreatedRestql.result) {
await client2.createRestql(i.name, i.value);
}
console.log("All RESTqls are cloned");
} catch (e) {
console.log(e);
console.log("Something went wrong, RESTqls cant be cloned");
}
};
// 10 days is ~900 000 000 ms, this function will deleted all GF that are older than 10 days.
const deletingOldVersionOfGF = async function () {
let timeToDelete = 900000000; //in ms = ~10 days
try {
client2.useFabric(fabricName);
const fabrics = await client2.listUserFabrics();
for (let i of fabrics) {
if (i.isSystem == false) {
let nameOfGF = i.name.split("_")[1];
let check = Date.now().toString() - nameOfGF;
if (check > timeToDelete) {
client2.useFabric(fabricName);
await client2.dropFabric(i.name);
console.log(`"${i.name}" GeoFabric is deleted`);
}
}
}
} catch (e) {
console.log(e);
console.log("Something went wrong with GeoFabric deleting process");
}
};
const runInSeries = async () => {
const list = [
createGF,
cloneCollections,
cloneIndexes,
cloneRestqls,
cloneGraph,
cloneData,
deletingOldVersionOfGF,
];
let start = Date.now();
for (const fn of list) {
await fn(); // call function to get returned Promise
}
console.log(`${(Date.now() - start) / 1000 / 60/60} hours`);
};
runInSeries();