This repository was archived by the owner on May 3, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 35
Expand file tree
/
Copy pathipfs_storage.py
More file actions
263 lines (214 loc) · 8.44 KB
/
ipfs_storage.py
File metadata and controls
263 lines (214 loc) · 8.44 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
from abc import ABC
from io import IOBase
import re
import json
from requests import get, Response, post
from typing import Any, Dict, List, Optional, Sequence, TextIO, BinaryIO, Union, cast
from thirdweb.common.error import (
DuplicateFileNameException,
FetchException,
UploadException,
)
from thirdweb.common.keys import derive_client_id_from_secret_key
from thirdweb.constants.urls import (
DEFAULT_IPFS_GATEWAY,
TW_STORAGE_SERVER_URL,
)
from thirdweb.core.helpers.storage import (
replace_file_properties_with_hashes,
replace_gateway_url_with_hash,
replace_hash_with_gateway_url,
resolve_gateway_url,
)
from thirdweb.types.storage import CidWithFileName, UriWithMetadata
class IpfsStorage(ABC):
"""
Upload and download files from IPFS.
```python
from thirdweb import ThirdwebSDK
# You can customize this to a supported network or your own RPC URL
network = "mumbai"
sdk = ThirdwebSDK(network)
# Now all the IPFS functions will be available on the sdk.storage name space
sdk.storage.get("<IPFS_HASH>")
```
"""
_gateway_url: str
_secret_key: Optional[str]
def __init__(self, secret_key: Optional[str], gateway_url: Optional[str] = None):
self._secret_key = secret_key
if gateway_url is not None:
self._gateway_url = re.sub(r"\/$", "", gateway_url) + "/"
elif secret_key is not None:
client_id = derive_client_id_from_secret_key(self._secret_key)
self._gateway_url = f"https://{client_id}.ipfscdn.io/ipfs/"
else:
self._gateway_url = DEFAULT_IPFS_GATEWAY
def get(self, hash: str) -> Any:
"""
Gets IPFS data at a given hash and returns the data.
:param hash: hash of the data to get.
:returns: dictionary of the data if JSON, otherwise raw data.
"""
res = self._get(hash)
try:
data = res.json()
if isinstance(data, dict) or isinstance(data, list) or isinstance(data, str):
return replace_hash_with_gateway_url(data, "ipfs://", self._gateway_url)
return data
except:
return res.text
def upload(
self,
data: Union[TextIO, BinaryIO, str],
) -> str:
"""
Uploads data to IPFS and returns the hash of the data.
:param data: data to upload.
:param contract_address: optional address of the contract to get the token for.
:param signer_address: optional address of the signer to get the token for.
:returns: hash of the data.
"""
cid = self.upload_batch([data], 0)
return f"{cid}/0"
def upload_batch(
self,
files: Sequence[Union[TextIO, BinaryIO, str, Dict[str, Any]]],
file_start_number: int = 0,
) -> str:
"""
Uploads a list of files to IPFS and returns the hash.
:param files: list of files to upload.
:param file_start_number: optional number to start the file names with.
:param contract_address: optional address of the contract to get the token for.
:param signer_address: optional address of the signer to get the token for.
:returns: hash of the data.
"""
cid_with_filename = self._upload_batch_with_cid(
files,
file_start_number,
)
return f"ipfs://{cid_with_filename.cid}"
def upload_metadata(
self,
metadata: Dict[str, Any],
) -> str:
"""
Uploads metadata to IPFS and returns the hash of the metadata.
:param metadata: metadata to upload.
:param contract_address: optional address of the contract to get the token for.
:param signer_address: optional address of the signer to get the token for.
:returns: hash of the metadata.
"""
uri_with_metadata = self.upload_metadata_batch(
[metadata], 0
)
return uri_with_metadata.metadata_uris[0]
def upload_metadata_batch(
self,
metadatas: Sequence[Dict[str, Any]],
file_start_number: int = 0,
) -> UriWithMetadata:
"""
Uploads a list of metadata to IPFS and returns the hash.
:param metadatas: list of metadata to upload.
:param file_start_number: optional number to start the file names with.
:param contract_address: optional address of the contract to get the token for.
:param signer_address: optional address of the signer to get the token for.
:returns: hash of the metadata.
"""
metadata_to_upload = self._batch_upload_properties(metadatas)
cid_with_filename = self._upload_batch_with_cid(
metadata_to_upload, file_start_number
)
base_uri = f"ipfs://{cid_with_filename.cid}/"
metadata_uris = [
f"{base_uri}{filename}" for filename in cid_with_filename.filenames
]
return UriWithMetadata(base_uri, metadata_uris)
"""
INTERNAL FUNCTIONS
"""
def _get(self, hash: str) -> Response:
hash = resolve_gateway_url(hash, "ipfs://", self._gateway_url)
headers = { "x-secret-key": self._secret_key } if ".ipfscdn.io" in self._gateway_url else {}
res = get(hash, headers=headers)
if not res.ok:
raise FetchException(f"Could not get {hash}")
return res
def _batch_upload_properties(self, metadatas: Sequence[Dict[str, Any]]):
sanitized_metadatas = [
replace_gateway_url_with_hash(metadatas, "ipfs://", self._gateway_url)
for metadatas in metadatas
]
file_lists = [
self._build_file_properties_map(metadata, [])
for metadata in sanitized_metadatas
]
files_to_upload = [file for file_list in file_lists for file in file_list]
if len(files_to_upload) == 0:
return sanitized_metadatas
cid_with_filename = self._upload_batch_with_cid(
cast(List[Union[TextIO, BinaryIO, str, Dict[str, Any]]], files_to_upload)
)
cids = []
for filename in cid_with_filename.filenames:
cids.append(f"{cid_with_filename.cid}/{filename}")
final_metadata = replace_file_properties_with_hashes(metadatas, cids)
return final_metadata
def _build_file_properties_map(
self,
object: Union[Dict[str, Any], List[Any]],
files: List[IOBase],
) -> List[IOBase]:
if isinstance(object, list):
[self._build_file_properties_map(item, files) for item in object]
else:
for val in object.values():
if isinstance(val, IOBase):
files.append(cast(IOBase, val))
elif isinstance(val, dict) or isinstance(val, list):
self._build_file_properties_map(val, files)
return files
def _upload_batch_with_cid(
self,
files: Sequence[Union[TextIO, BinaryIO, str, Dict[str, Any]]],
file_start_number: int = 0
) -> CidWithFileName:
form: List[Any] = []
file_names: List[str] = []
for i, file in enumerate(files):
file_name = f"{file_start_number + i}"
file_data = cast(Union[str, Dict[str, Any]], file)
if not isinstance(file, str) and not isinstance(file, dict):
if file.name:
extensions = file.name.split(".")
extension = extensions[-1]
file_name = f"{file_start_number + i}.{extension}"
elif (
isinstance(file, dict)
and "name" in file
and file["name"] is not None
and "data" in file
and file["data"] is not None
):
file_name = file["name"]
file_data = file["data"]
else:
file_data = json.dumps(file)
if file_name in file_names:
raise DuplicateFileNameException(file_name)
file_names.append(file_name)
form.append(("file", (f"files/{file_name}", file_data)))
# form.append(("pinataMetadata", metadata))
res = post(
f"{TW_STORAGE_SERVER_URL}/ipfs/upload",
files=form,
headers={
"x-secret-key": self._secret_key,
},
)
body = res.json()
if not res.ok:
raise UploadException(f"Failed to upload files to IPFS. {res.json()}")
return CidWithFileName(body["IpfsHash"], file_names)