-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathestimate_usage.py
More file actions
64 lines (54 loc) · 1.35 KB
/
estimate_usage.py
File metadata and controls
64 lines (54 loc) · 1.35 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import sys, os
from collections import OrderedDict
# import exrex
import numpy as np
import pandas as pd
import random
import scipy
import scipy.special
from termcolor import colored
# from text_histogram import histogram
from datetime import datetime
from process import (
GPT3,
MockGPT3,
read_cache,
)
from util import escape_ansi, set_seed
from transformers import GPT2TokenizerFast
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2")
def count_tokens(s):
return len(tokenizer.encode(s))
if __name__ == '__main__':
# GPT = GPT3 if 'submit' in argv else MockGPT3
# print('Using ' + GPT.__name__)
cache_fname = f'cache_GPT3.jsonl'
cache = read_cache(cache_fname)
usage = OrderedDict()
total = 0
for k, v in cache.items():
if k == '__filename__': continue
# import pdb; pdb.set_trace()
try:
prompt = dict(k)['prompt']
completion = ''.join([_['text'] for _ in v['choices']])
s = prompt + completion
n_tokens = count_tokens(s)
date = datetime.fromtimestamp(v['created'])
kk = (date.year, date.month, date.day)
if kk not in usage:
usage[kk] = 0
usage[kk] += n_tokens
total += n_tokens
# total += count_tokens(s)
# if 'P-H-A-N-T-O-M' in s:
# print(count_tokens(s))
except Exception as e:
print(e)
print(total)
cumu = 0
# last_date =
for k, v in usage.items():
cumu += v
print(k, v, cumu)
# histogram(arr)