-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy patheval_sword.py
More file actions
44 lines (39 loc) · 1.03 KB
/
eval_sword.py
File metadata and controls
44 lines (39 loc) · 1.03 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import json
import gzip
import sys
from tqdm import tqdm
from process import (
MockGPT3, GPT3, read_cache, write_cache, get_key
)
if len(sys.argv) == 2:
json_fp = sys.argv[1]
dry = True
elif len(sys.argv) == 3:
json_fp, api_key = sys.argv[1:]
import openai
openai.api_key = api_key.strip()
dry = False
cache = read_cache('cache_chris_GPT3.jsonl')
gpt3 = GPT3(cache)
else:
raise ValueError()
with gzip.open(json_fp, 'rt') as f:
tid_to_inputs = json.load(f)['inputs']
count = 0
while count < len(tid_to_inputs):
count = 0
tid_to_outputs = {}
for tid, inputs in tqdm(tid_to_inputs.items(), total=len(tid_to_inputs)):
if dry:
response = {'choices': ['foo']}
else:
try:
response = gpt3.make_query(**inputs)
count += 1
except Exception as e:
print(e)
response = None
tid_to_outputs[tid] = response
print('%d/%d' % (count, len(tid_to_inputs)))
with gzip.open(json_fp.replace('.json.gz', '.out.json.gz'), 'wt') as f:
f.write(json.dumps(tid_to_outputs, indent=2))