GPT2 自定义聊天机器人单次交互。我在这个实现中没有使用任何线程,但它仍然显示“multiprocessing_chunksize”。在这里,我们尝试使用 gpt2 构建聊天机器人,并与由自定义日期集(类型:角色)训练的自定义模型进行交互。
#from bot_make.utils import helper
import shutil
from simpletransformers.conv_ai import ConvAIModel
import os
import pickle
from termcolor import colored
import requests
def cache_select(cache_name):
shutil.unpack_archive('base_model/'+cache_name,'./single_interact/cache_dir','zip')
print('done')
def cache_down(url):
try:
r = requests.get(url, allow_redirects=True)
open('single_interact/cache_dir.zip', 'wb').write(r.content)
shutil.unpack_archive('single_interact/cache_dir.zip','./single_interact/cache_dir','zip')
os.remove(f"single_interact/cache_dir.zip")
return True
except:
return False
def model_down(url):
try:
r = requests.get(url, allow_redirects=True)
open('single_interact/for_load.tar.gz', 'wb').write(r.content)
return True
except:
return False
print('Model and Cache down')
cache_ck = cache_down('https://bucket.s3.amazonaws.com/generated_data_1_model_2_cache_dir.zip')
model_ck = model_down('https://bucket.s3.amazonaws.com/generated_data_1_model_2.tar.gz')
print('Start BOT')
def model_load(model_name):
return pickle.load(open('single_interact/'+model_name, 'rb'))
#cache_select('base_model_small_cache_dir.zip')
my_model = model_load('for_load.tar.gz')
#my_model.interact()
os.chdir('single_interact/')
a = ['hello']
def single_response(user_input):
reply, history = my_model.interact_single(message=user_input, history=a)
for i in history :
a.append(i)
return reply
print("\n===============================================")
print("================= Conv AI.V ===================")
print("===============================================\n")
while 1:
talk = input(colored("You: ",'green'))
if talk=='exit':break
response = single_response(talk)
myset = set(a)
a = list(myset)
#print('Bot : ',response)
print(colored("Bot:",'red'),response)
#print('History of re : ',a)