更新 evaluation.py
This commit is contained in:
parent
47f269a047
commit
ea532373ec
|
|
@ -3,7 +3,6 @@ import openai
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
import argparse
|
import argparse
|
||||||
import tiktoken
|
|
||||||
from openai import OpenAI
|
from openai import OpenAI
|
||||||
from openai import OpenAIError
|
from openai import OpenAIError
|
||||||
|
|
||||||
|
|
@ -61,19 +60,9 @@ def get_qa_response(model, question, answer):
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def num_tokens_from_message(message, model="davinci"):
|
|
||||||
encoding = tiktoken.encoding_for_model(model)
|
|
||||||
num_tokens = len(encoding.encode(message))
|
|
||||||
return num_tokens
|
|
||||||
|
|
||||||
|
|
||||||
def truncate_message(prompt1, prompt2, model="davinci"):
|
|
||||||
if num_tokens_from_message(prompt1 + prompt2, model) > 2033:
|
|
||||||
truncation_length = 2033 - num_tokens_from_message(prompt2)
|
|
||||||
while num_tokens_from_message(prompt1) > truncation_length:
|
|
||||||
prompt1 = " ".join(prompt1.split()[:-1])
|
|
||||||
prompt = prompt1 + prompt2
|
|
||||||
return prompt
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue