@@ -75,6 +75,25 @@ export class DeepSeekApi implements LLMApi {
7575 }
7676 }
7777
78+ // 检测并修复消息顺序,确保除system外的第一个消息是user
79+ const filteredMessages : ChatOptions [ "messages" ] = [ ] ;
80+ let hasFoundFirstUser = false ;
81+
82+ for ( const msg of messages ) {
83+ if ( msg . role === "system" ) {
84+ // Keep all system messages
85+ filteredMessages . push ( msg ) ;
86+ } else if ( msg . role === "user" ) {
87+ // User message directly added
88+ filteredMessages . push ( msg ) ;
89+ hasFoundFirstUser = true ;
90+ } else if ( hasFoundFirstUser ) {
91+ // After finding the first user message, all subsequent non-system messages are retained.
92+ filteredMessages . push ( msg ) ;
93+ }
94+ // If hasFoundFirstUser is false and it is not a system message, it will be skipped.
95+ }
96+
7897 const modelConfig = {
7998 ...useAppConfig . getState ( ) . modelConfig ,
8099 ...useChatStore . getState ( ) . currentSession ( ) . mask . modelConfig ,
@@ -85,7 +104,7 @@ export class DeepSeekApi implements LLMApi {
85104 } ;
86105
87106 const requestPayload : RequestPayload = {
88- messages,
107+ messages : filteredMessages ,
89108 stream : options . config . stream ,
90109 model : modelConfig . model ,
91110 temperature : modelConfig . temperature ,
0 commit comments