-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtasks.py
More file actions
144 lines (106 loc) · 4.86 KB
/
tasks.py
File metadata and controls
144 lines (106 loc) · 4.86 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
import os
import gc
import traceback
import shutil
import requests
from django.db import IntegrityError
from rest_framework.exceptions import ValidationError
from celery import shared_task, states
from .models import TaskStatus, Document
from .services import FetchDataService, DataConsolidationService, CreateFaissTreeService, GetResponseFromGeminiService
from core.models import LogSystem
@shared_task
def set_database_and_train_data(faiss_save_dir: str):
task_id = set_database_and_train_data.request.id
try:
TaskStatus.objects.get_or_create(
task_id = task_id,
defaults = {'status': states.PENDING, 'result': 'Iniciando processamento'}
)
Document.objects.all().delete()
fetch_data = FetchDataService(
"https://huggingface.co/api/datasets/tyson0420/stackexchange-overflow-fil-python/parquet/default/train",
"https://huggingface.co/api/datasets/tyson0420/stackexchange-overflow-fil-python/parquet/default/test"
)
TaskStatus.objects.filter(task_id = task_id).update(
status = states.PENDING,
result = 'Analisando dados e criando lotes de QIDs'
)
qid_batches = fetch_data.get_qid_batches()
total_batches = len(qid_batches)
total_documents_saved = 0
TaskStatus.objects.filter(task_id = task_id).update(
status = states.PENDING,
result = f'Processando {total_batches} lotes de dados'
)
for batch_idx, qid_batch in enumerate(qid_batches, 1):
TaskStatus.objects.filter(task_id = task_id).update(
status = states.PENDING,
result = f'Processando lote {batch_idx}/{total_batches} - Baixando dados'
)
batch_df = fetch_data.fetch_data_by_qids(qid_batch)
TaskStatus.objects.filter(task_id = task_id).update(
status = states.PENDING,
result = f'Processando lote {batch_idx}/{total_batches} - Consolidando dados'
)
data_consolidation = DataConsolidationService(batch_df)
docs_saved = data_consolidation.consolidate_batch()
total_documents_saved += docs_saved
del batch_df, data_consolidation
gc.collect()
TaskStatus.objects.filter(task_id = task_id).update(
status = states.PENDING,
result = f'Lote {batch_idx}/{total_batches} concluído - {docs_saved} documentos salvos'
)
TaskStatus.objects.filter(task_id = task_id).update(
status = states.PENDING,
result = f'Dados consolidados: {total_documents_saved} documentos. Criando embeddings e índice FAISS'
)
create_faiss_index = CreateFaissTreeService()
create_faiss_index.create_faiss_index(
batch_size = 512,
faiss_save_path = faiss_save_dir,
task_id = task_id
)
TaskStatus.objects.filter(task_id = task_id).update(
status = states.SUCCESS,
result = f'Processamento concluído! {total_documents_saved} documentos processados e índice FAISS criado'
)
fetch_data.clear_cache()
gc.collect()
except requests.HTTPError as e:
TaskStatus.objects.filter(task_id = task_id).update(
status = states.FAILURE,
result = f'Erro ao baixar dados: {str(e)}'
)
except ValidationError as e:
if os.path.exists(faiss_save_dir):
shutil.rmtree(faiss_save_dir)
TaskStatus.objects.filter(task_id = task_id).update(
status = states.FAILURE,
result = f'Erro de validação: {str(e)}'
)
except IntegrityError as e:
LogSystem.objects.create(error = str(e), stacktrace = traceback.format_exc())
TaskStatus.objects.filter(task_id = task_id).update(
status = states.FAILURE,
result = f'Erro de integridade no banco: {str(e)}'
)
except Exception as e:
LogSystem.objects.create(error = str(e), stacktrace = traceback.format_exc())
TaskStatus.objects.filter(task_id = task_id).update(
status = states.FAILURE,
result = f'Erro inesperado: {str(e)}'
)
if os.path.exists(faiss_save_dir):
shutil.rmtree(faiss_save_dir)
def get_response_from_vector_base(question: str, faiss_path: str) -> str:
''''''
if not question:
raise ValidationError('no sentence provided for search.')
get_reponse_from_gemini = GetResponseFromGeminiService(faiss_path = faiss_path)
verify_greetings = get_reponse_from_gemini.classify_greeting(question)
if verify_greetings != 'other':
return verify_greetings
response = get_reponse_from_gemini.get_answer(question)
return response