-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapi_prompting_chatgpt.py
More file actions
67 lines (60 loc) · 2.54 KB
/
api_prompting_chatgpt.py
File metadata and controls
67 lines (60 loc) · 2.54 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
# !/usr/bin/env python3
"""
Iteratives Suchanfragen-Skript für ChatGPT gpt-5 API
"""
import os
import re
import pandas as pd
from api_keys import ChatGPT_key
from openai import OpenAI, RateLimitError
# API-Key setzen
os.environ["OPENAI_API_KEY"] = ChatGPT_key
# OpenAI-Client initialisieren (ohne Argumente!)
client = OpenAI()
llm_model = "gpt-5.2-chat-latest"
file_path = r"C:\Users\andre\OneDrive\Desktop\Marketing\KI-Performance\KI-Performance Schuhe"
source_file = "KI-Performance Schuhe_2026-01-20" + ".xlsx"
modify_response_filename = "normalize_response.txt"
introduction = "Beantworte zuerst ausschließlich inhaltlich die folgende Frage so, wie du sie auch beantworten würdest, wenn es keine zusätzlichen Format- oder Analyseanforderungen gäbe:"
#Dependencies
# pip install openai
#pip install openpyxl
#pip install tabulate
# https://platform.openai.com/api-keys
########################################################################################################################
def send_prompt(llm_model, prompt):
try:
response = client.chat.completions.create(
model=llm_model,
messages=[{"role": "user", "content": prompt}]
)
return response.choices[0].message.content.strip()
except RateLimitError as e:
return f"Rate Limit überschritten: {e}"
def main(row, number_name, prompt_name):
number = row[number_name]
prompt = row[prompt_name]
full_prompt = introduction + "\n" + prompt + "\n" + modify_response
print(f"{number}: {prompt}")
response = send_prompt(llm_model, full_prompt)
response_final = str(number) + ":" + "\n" + response.replace("\n\n", "\n")
return response_final
########################################################################################################################
if __name__ == '__main__':
os.chdir(file_path)
with open(modify_response_filename, "r", encoding="utf-8") as f:
modify_response = f.read()
# Quellendatei mit den 50 Suchanfragen
df_source_file = pd.read_excel(source_file, sheet_name="Suchanfragen")
for n in df_source_file.columns:
if 'Nr' in n:
number_name = n
if 'Such' in n:
prompt_name = n
for ID, row in df_source_file.iterrows():
response = main(row, number_name, prompt_name)
# Speichern der Antworten als Textdatei
with open("full_responses_" + llm_model + "_.txt", "a", encoding="utf-8") as f:
f.write(response + "\n")
dt_str_now = datetime.now().strftime("%Y-%m-%d_%H_%M_%S")
print(dt_str_now)