Skip to content

Commit 8b0f014

Browse files
committed
feat(log): cleanup
1 parent 4666072 commit 8b0f014

File tree

2 files changed

+0
-40
lines changed

2 files changed

+0
-40
lines changed

ai/analyzer.py

Lines changed: 0 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,6 @@ def _process_file_sync(
215215

216216
# Log batch processing start
217217
batch_start_time = time.time()
218-
logger.info(f"Generating embeddings for {rel_path}: batch {batch_num}/{num_batches} ({len(batch)} chunks)")
219218

220219
embedding_futures = []
221220

@@ -231,11 +230,6 @@ def _process_file_sync(
231230
for idx, chunk_doc, future, embedding_start_time in embedding_futures:
232231
try:
233232
emb = future.result(timeout=EMBEDDING_TIMEOUT) # Add timeout to prevent hanging indefinitely
234-
embedding_duration = time.time() - embedding_start_time
235-
236-
# Log slow embedding generation (> 5 seconds)
237-
if embedding_duration > 5.0:
238-
logger.warning(f"Slow embedding API response for {rel_path} chunk {idx}: {embedding_duration:.2f}s total")
239233
except concurrent.futures.TimeoutError:
240234
elapsed = time.time() - embedding_start_time
241235

@@ -268,26 +262,6 @@ def _process_file_sync(
268262
else:
269263
diagnostic_info.append(f" - Future status: Pending/Unknown")
270264

271-
# Generate curl command for debugging
272-
try:
273-
payload = {
274-
"model": _embedding_client.model,
275-
"input": chunk_doc.text,
276-
}
277-
curl_command = _embedding_client._generate_curl_command(payload)
278-
except Exception as e:
279-
curl_command = f"Failed to generate curl command: {e}"
280-
281-
diagnostic_info.extend([
282-
f" - The future.result() call timed out after {EMBEDDING_TIMEOUT}s",
283-
f" - Embedding API state:",
284-
f" - API timeout: {_embedding_client.timeout}s",
285-
f" - Max retries: {_embedding_client.max_retries}",
286-
f" - Curl command to reproduce:",
287-
f"{curl_command}"
288-
])
289-
290-
logger.error("\n".join(diagnostic_info))
291265
emb = None
292266
failed_count += 1
293267
except Exception as e:
@@ -303,11 +277,6 @@ def _process_file_sync(
303277
_load_sqlite_vector_extension(conn2)
304278
_insert_chunk_vector_with_retry(conn2, fid, rel_path, idx, emb)
305279
saved_count += 1
306-
db_duration = time.time() - db_start_time
307-
308-
# Log slow database operations (> 2 seconds)
309-
if db_duration > 2.0:
310-
logger.warning(f"Slow database insert for {rel_path} chunk {idx}: {db_duration:.2f}s")
311280
finally:
312281
conn2.close()
313282
embedded_any = True
@@ -320,11 +289,6 @@ def _process_file_sync(
320289
logger.exception("Failed to write chunk-insert error to disk for %s chunk %d", rel_path, idx)
321290
else:
322291
logger.debug(f"Skipping chunk {idx} for {rel_path} - no embedding vector available")
323-
324-
# Log batch completion with timing and status
325-
batch_duration = time.time() - batch_start_time
326-
batch_status = "FAILED" if failed_count > 0 and saved_count == 0 else ("PARTIAL" if failed_count > 0 else "SUCCESS")
327-
logger.info(f"Batch {batch_num}/{num_batches} for {rel_path} - Status: {batch_status} - {saved_count} saved, {failed_count} failed, {batch_duration:.2f}s elapsed")
328292

329293
return {"stored": True, "embedded": embedded_any, "skipped": False}
330294
except Exception:

ai/openai.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -263,10 +263,6 @@ def embed_text(self, text: str, file_path: str = "<unknown>", chunk_index: int =
263263
if resp and hasattr(resp, 'data') and len(resp.data) > 0:
264264
embedding = resp.data[0].embedding
265265
if embedding and isinstance(embedding, list):
266-
_embedding_logger.info(
267-
"Embedding succeeded",
268-
extra={"request_id": request_id, "file": file_path, "chunk_index": chunk_index},
269-
)
270266
return embedding
271267
else:
272268
raise EmbeddingError(f"Invalid embedding format in response")

0 commit comments

Comments
 (0)