1
|
|
|
# -*- coding: utf-8 -*- |
2
|
|
|
|
3
|
|
|
|
4
|
|
|
from gpt_3.preprocessing import (encoding_text, |
5
|
|
|
decoding_text, |
6
|
|
|
get_text_gpt3) |
7
|
|
|
|
8
|
|
|
from utils.misc.throttling import rate_limit |
9
|
|
|
from aiogram.dispatcher import FSMContext |
10
|
|
|
from aiogram import types |
11
|
|
|
from loader import dp |
12
|
|
|
import asyncio |
13
|
|
|
|
14
|
|
|
|
15
|
|
|
""" |
16
|
|
|
|
17
|
|
|
|
18
|
|
|
Created on 10.09.2021 |
19
|
|
|
|
20
|
|
|
@author: Nikita |
21
|
|
|
|
22
|
|
|
|
23
|
|
|
""" |
24
|
|
|
|
25
|
|
|
|
26
|
|
|
async def zero_output(message: types.Message) -> types.Message: |
27
|
|
|
|
28
|
|
|
await message.answer(text="Упс...") |
29
|
|
|
|
30
|
|
|
await asyncio.sleep(1) |
31
|
|
|
|
32
|
|
|
await message.answer(text="Что-то я не смог сформулировать ответ, " |
33
|
|
|
"попробуйте перефразировать сообщение 🙁") |
34
|
|
|
|
35
|
|
|
|
36
|
|
|
@rate_limit(3, 'message') |
37
|
|
|
@dp.message_handler() |
38
|
|
|
async def processing_message(message: types.Message, state: FSMContext) -> types.Message: |
39
|
|
|
|
40
|
|
|
""" |
41
|
|
|
|
42
|
|
|
The function is designed to receive messages in russian and generate a response. |
43
|
|
|
|
44
|
|
|
""" |
45
|
|
|
|
46
|
|
|
await types.ChatActions.typing() |
47
|
|
|
|
48
|
|
|
data_storage = await state.get_data() |
49
|
|
|
text = message.text.lower() |
50
|
|
|
|
51
|
|
|
await state.update_data(history_text=text) |
52
|
|
|
await state.update_data(chat_id=message.chat.id) |
53
|
|
|
await state.update_data(first_name=message.from_user.first_name) |
54
|
|
|
|
55
|
|
|
input_text, check_question = encoding_text(text_encode=text) |
56
|
|
|
|
57
|
|
|
if text == data_storage.get('history_text'): |
58
|
|
|
|
59
|
|
|
await message.answer(text="Ой... Где-то я уже это видел! 🥱") |
60
|
|
|
|
61
|
|
|
# input_text = torch.cat([context.chat_data['output'][-1], input_text[0]], dim=0) |
62
|
|
|
# input_text = input_text.unsqueeze(0) |
63
|
|
|
|
64
|
|
|
return |
65
|
|
|
|
66
|
|
|
text_gpt3 = get_text_gpt3(text_gpt=input_text, check_question=check_question) |
67
|
|
|
output_text = decoding_text(text_decode=text_gpt3) |
68
|
|
|
|
69
|
|
|
if len(output_text.split()) < 1: |
70
|
|
|
|
71
|
|
|
await zero_output(message) |
72
|
|
|
|
73
|
|
|
return |
74
|
|
|
|
75
|
|
|
await message.answer(text=output_text) |
76
|
|
|
|
77
|
|
|
if 'input' in data_storage: |
78
|
|
|
|
79
|
|
|
await state.update_data(input=input_text) |
80
|
|
|
await state.update_data(output=text_gpt3) |
81
|
|
|
|
82
|
|
|
return |
83
|
|
|
|
84
|
|
|
data_history = {'input': [input_text], |
85
|
|
|
'output': [text_gpt3]} |
86
|
|
|
|
87
|
|
|
await state.update_data(data_history) |
88
|
|
|
|