Skip to content

Commit 0ea4cbb

Browse files
author
calvin
committed
v0.2.5- release
1 parent db0ae4e commit 0ea4cbb

51 files changed

Lines changed: 1517 additions & 676 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

admin/app/config/config_private.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
app.config['LOG_LEVEL'] = "INFO"
1515
app.config['LOG_MAX_BYTES'] = 10*1024*1024
1616
app.config['LOG_BACKUP_COUNT'] = 10
17-
app.config['SECRET_KEY'] = '146c827-a8ac-4a28-8fd5-c13e5df11529'
17+
app.config['SECRET_KEY'] = '1e5df11529'
1818
app.config['download_cool_time'] = 7200
1919
app.config['download_max_count'] = 100
2020

418 Bytes
Binary file not shown.
50 Bytes
Binary file not shown.

admin/app/services/app_center/app_manage_service.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ def search_all_apps(params):
5656
if not check_has_role(admin_user.user_id, "next_console_admin"):
5757
# 非平台管理员
5858
target_apps = target_apps.filter(
59-
UserInfo.user_company_id == admin_user.user_company_id
59+
UserInfo.user_company_id == admin_user.user_company_id,
6060
)
6161
target_apps = target_apps.order_by(
6262
AppMetaInfo.create_time.desc()
@@ -152,7 +152,11 @@ def delete_app(data):
152152
"""
153153
app_code = data.get("app_code")
154154
user_id = int(data.get("user_id"))
155-
app_info = AppMetaInfo.query.filter_by(app_code=app_code).first()
155+
app_info = AppMetaInfo.query.filter(
156+
AppMetaInfo.app_code == app_code,
157+
AppMetaInfo.environment == '开发',
158+
AppMetaInfo.app_status != '已删除'
159+
).first()
156160
if not app_info:
157161
return next_console_response(error_status=True, error_message="应用不存在!", error_code=1002)
158162
if app_info.app_type != '个人应用':

admin/app/services/app_center/app_run_service.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1316,11 +1316,10 @@ def transform_to_message(task_record, global_params):
13161316
result = message_item
13171317
# 保存消息
13181318
answer_msg.msg_content = json.dumps(message_item)
1319-
answer_msg.msg_format = "workflow"
1319+
answer_msg.msg_format = "customize"
13201320
db.session.add(answer_msg)
13211321
db.session.commit()
13221322
global_params["message_queue"].put(result)
1323-
13241323
else:
13251324
continue
13261325

admin/app/services/app_center/expermental_features.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def parallel_llm_node_execute(params, task_record, global_params):
3535
for item in params[parallel_attr]:
3636
new_sub_params = {k: params[k] for k in params if k not in params[parallel_attr]}
3737
new_sub_params[parallel_attr] = [item]
38-
workflow_node_llm_params = load_llm_prams(new_sub_params, task_record, global_params)
38+
workflow_node_llm_params = load_llm_prams(new_sub_params, task_record, global_params, imgUrl='base64')
3939
future = global_params["executor"].submit(single_llm_sub_node_execute,
4040
llm_client, workflow_node_llm_params,
4141
task_record.to_dict(), global_params)
@@ -119,7 +119,7 @@ def single_llm_sub_node_execute(llm_client, workflow_node_llm_params, task_recor
119119
except GeneratorExit:
120120
pass
121121
except Exception as e3:
122-
app.logger.error(f"调用基模型异常:{str(e3)}")
122+
app.logger.error(f"调用基模型异常:{str(e3)}, {workflow_node_llm_params}")
123123
msg_content += "\n\n **对不起,模型服务正忙,请稍等片刻后重试,或者可以试试切换其他模型~**"
124124
if task_record.get("workflow_node_enable_message") and global_params["stream"]:
125125
if task_record.get("workflow_node_message_schema_type") == "messageFlow":

admin/app/services/app_center/file_reader.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,11 @@ def file_reader_node_execute(task_params, task_record, global_params):
4949
if transform_engine == "PyMuPDF":
5050
for k in config:
5151
task_params[k] = config[k]
52-
output_resources.append(pymupdf_reader(resource, task_params))
52+
res = pymupdf_reader(resource, task_params)
53+
if isinstance(res, list):
54+
output_resources.extend(res)
55+
else:
56+
output_resources.append(res)
5357
elif src_format in ("docx", "doc") and resource.resource_format in ("docx", "doc"):
5458
if transform_engine == "python-docx":
5559
pass
@@ -66,10 +70,9 @@ def file_reader_node_execute(task_params, task_record, global_params):
6670
output_resources.append(html2text_reader(resource, config))
6771
elif src_format == "未知":
6872
output_resources.append(text_reader(resource, config))
69-
if mode == 'list':
73+
if mode == 'list' or (src_format == 'pdf'
74+
and transform_engine == 'PyMuPDF' and tgt_format in ('jpg', 'png', 'webp')):
7075
node_results = {"output_resources": [resource for resource in output_resources]}
71-
elif src_format == 'pdf' and transform_engine == 'PyMuPDF' and tgt_format in ('jpg', 'png', 'webp'):
72-
node_results = {"output_resources": [resource for resource in output_resources[0]]}
7376
else:
7477
node_results = {"output_resource": output_resources[0] if output_resources else {}}
7578
task_record.task_result = json.dumps(node_results)

admin/app/services/app_center/llm_node_service.py

Lines changed: 46 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import json
22
from pathlib import Path
3-
3+
import base64
44
from openai import OpenAI
55
from app.models.next_console.next_console_model import NextConsoleMessage
66
from app.services.next_console.llm import NextConsoleLLMClient, LLMInstance
@@ -43,6 +43,8 @@ def llm_node_execute(params, task_record, global_params):
4343
msg_reasoning_content = ""
4444
msg_token_used = 0
4545
answer_msg = None
46+
task_status = '执行中'
47+
task_trace_log = ''
4648
if workflow_node_llm_params.get("stream", False):
4749
all_message_format = [msg_schema.get("schema_type") for msg_schema in task_record.workflow_node_message_schema]
4850
output_flag = task_record.workflow_node_enable_message and global_params["stream"] and 'messageFlow' in all_message_format
@@ -101,30 +103,33 @@ def llm_node_execute(params, task_record, global_params):
101103
pass
102104
except Exception as e3:
103105
app.logger.error(f"调用基模型异常:{str(e3)}")
106+
task_status = "异常"
107+
task_trace_log = str(e3)
104108
msg_content += "\n\n **对不起,模型服务正忙,请稍等片刻后重试,或者可以试试切换其他模型~**"
105-
if task_record.workflow_node_enable_message and global_params["stream"]:
106-
if task_record.workflow_node_message_schema_type == "messageFlow":
107-
except_result = {
108-
"id": "",
109-
"session_id": task_record.session_id,
110-
"qa_id": task_record.qa_id,
111-
"msg_parent_id": task_record.msg_id,
112-
"created": 0,
113-
"model": '',
114-
"object": "chat.completion",
115-
"choices": [
116-
{
117-
"finish_reason": "error",
118-
"index": 0,
119-
"delta": {
120-
"content": msg_content,
121-
"role": "assistant"
122-
},
109+
if task_record.workflow_node_enable_message and global_params["stream"] and output_flag:
110+
except_result = {
111+
"id": "",
112+
"session_id": task_record.session_id,
113+
"qa_id": task_record.qa_id,
114+
"msg_parent_id": task_record.msg_id,
115+
'msg_id': answer_msg.msg_id,
116+
"created": 0,
117+
"model": '',
118+
"object": "chat.completion",
119+
"choices": [
120+
{
121+
"finish_reason": "error",
122+
"index": 0,
123+
"delta": {
124+
"reasoning_content": "",
125+
"content": msg_content,
126+
"role": "assistant"
127+
},
123128

124-
}
125-
]
126-
}
127-
global_params["message_queue"].put(except_result)
129+
}
130+
]
131+
}
132+
global_params["message_queue"].put(except_result)
128133
finally:
129134
# 更新llm节点记录
130135
# 如果有RAG引用,则添加到消息中
@@ -155,7 +160,10 @@ def llm_node_execute(params, task_record, global_params):
155160
"reasoning_content": msg_reasoning_content,
156161
})
157162
task_record.end_time = datetime.now()
158-
task_record.task_status = "已完成"
163+
if task_status == '执行中':
164+
task_status = "已完成"
165+
task_record.task_status = task_status
166+
task_record.task_trace_log = task_trace_log
159167
task_record.task_token_used = msg_token_used
160168
db.session.add(task_record)
161169
db.session.commit()
@@ -178,24 +186,24 @@ def llm_node_execute(params, task_record, global_params):
178186
task_record.task_token_used = msg_token_used
179187
except Exception as e:
180188
task_record.task_status = "异常"
189+
task_record.task_trace_log = str(e)
181190
app.logger.error(f"workflow_chat error: {e}")
182191
msg_content = '对不起,模型服务正忙,请稍等片刻后重试,或者可以试试切换其他模型~'
183-
task_record.task_trace_log = str(e)
192+
184193
if task_record.workflow_node_rag_ref_show:
185194
reference_md = add_reference_md(task_record, global_params)
186195
msg_content += reference_md
187196
task_record.task_result = json.dumps({
188197
"content": msg_content,
189198
"reasoning_content": msg_reasoning_content,
190199
})
191-
task_record.task_status = "已完成"
192200
task_record.end_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
193201
db.session.add(task_record)
194202
db.session.commit()
195203
return True
196204

197205

198-
def load_llm_prams(task_params, task_record, global_params):
206+
def load_llm_prams(task_params, task_record, global_params, imgUrl='url'):
199207
"""
200208
载入大模型的所有参数
201209
"""
@@ -244,10 +252,20 @@ def load_llm_prams(task_params, task_record, global_params):
244252
).all()
245253
for resource in target_attachments:
246254
if resource.resource_type in ("image", "media") and resource.resource_download_url:
255+
url = app.config.get("domain") + "/next_console/knowledge_center/resource_images?resource_id={}".format(resource.id)
256+
if imgUrl == 'base64':
257+
# 如果是base64编码的图片
258+
try:
259+
with open(resource.resource_path, "rb") as image_file:
260+
encoded_string = base64.b64encode(image_file.read()).decode('utf-8')
261+
url = "data:image/png;base64,{}".format(encoded_string)
262+
except Exception as e:
263+
app.logger.error(f"读取图片文件失败:{e}")
264+
continue
247265
image_list.append({
248266
"type": "image_url",
249267
"image_url": {
250-
"url": app.config.get("domain") + "/next_console/knowledge_center/resource_images?resource_id={}".format(resource.id),
268+
"url": url,
251269
"detail": "auto"
252270
}
253271
})

admin/app/services/app_center/node_params_service.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ def load_task_result(task_record):
7979
validate(exec_result, task_record.workflow_node_rpjs)
8080
except Exception as e:
8181
print(e)
82-
task_record.task_trace_log = str(e)
82+
task_record.task_trace_log = f"{exec_result}, 验证失败: {str(e)}"
8383
db.session.add(task_record)
8484
db.session.commit()
8585
return
@@ -225,15 +225,15 @@ def load_properties(properties, global_params, isStart=False):
225225
if properties.get(k).get("value"):
226226
data[k] = properties.get(k).get("value")
227227
continue
228-
elif ref:
228+
elif ref != '' and ref is not None:
229229
# 固定值
230230
if isinstance(ref, (str, int)):
231231
if required and ref == '':
232232
raise ValueError(f"Required field '{k}' is missing in global parameters.")
233233
if properties.get(k).get("type") == "boolean":
234-
if ref == 'false':
234+
if ref == 'false' or ref is False:
235235
ref = False
236-
elif ref == 'true':
236+
elif ref == 'true' or ref is True:
237237
ref = True
238238
else:
239239
ref = None

admin/app/services/app_center/workflow_service.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1385,7 +1385,7 @@ def init_file_reader_node(new_node):
13851385
"showSubArea": False,
13861386
"type": "string",
13871387
"typeName": "string",
1388-
"description": "文件名称",
1388+
"description": "文件格式",
13891389
"value": ""
13901390
},
13911391
"size": {

0 commit comments

Comments
 (0)