1313
1414import requests
1515
16- from .auth import get_fireworks_account_id , get_fireworks_api_base , get_fireworks_api_key
17- from .common_utils import get_user_agent
16+ from .auth import get_fireworks_account_id , get_fireworks_api_base , get_fireworks_api_key , get_platform_headers
1817
1918
2019def _map_api_host_to_app_host (api_base : str ) -> str :
@@ -142,11 +141,17 @@ def create_dataset_from_jsonl(
142141 display_name : Optional [str ],
143142 jsonl_path : str ,
144143) -> Tuple [str , Dict [str , Any ]]:
145- headers = {
146- "Authorization" : f"Bearer { api_key } " ,
147- "Content-Type" : "application/json" ,
148- "User-Agent" : get_user_agent (),
149- }
144+ import os
145+
146+ # DEBUG: Check environment variable
147+ extra_headers_env = os .environ .get ("FIREWORKS_EXTRA_HEADERS" , "<NOT SET>" )
148+ print (f"[DEBUG] FIREWORKS_EXTRA_HEADERS env: { extra_headers_env } " )
149+
150+ headers = get_platform_headers (api_key = api_key , content_type = "application/json" )
151+
152+ # DEBUG: Print headers (mask auth token)
153+ debug_headers = {k : (v [:20 ] + "..." if k == "Authorization" else v ) for k , v in headers .items ()}
154+ print (f"[DEBUG] Headers being sent: { debug_headers } " )
150155 # Count examples quickly
151156 example_count = 0
152157 with open (jsonl_path , "r" , encoding = "utf-8" ) as f :
@@ -171,10 +176,8 @@ def create_dataset_from_jsonl(
171176 upload_url = f"{ api_base .rstrip ('/' )} /v1/accounts/{ account_id } /datasets/{ dataset_id } :upload"
172177 with open (jsonl_path , "rb" ) as f :
173178 files = {"file" : f }
174- up_headers = {
175- "Authorization" : f"Bearer { api_key } " ,
176- "User-Agent" : get_user_agent (),
177- }
179+ # For file uploads, omit Content-Type (let requests set multipart boundary)
180+ up_headers = get_platform_headers (api_key = api_key , content_type = None )
178181 up_resp = requests .post (upload_url , files = files , headers = up_headers , timeout = 600 )
179182 if up_resp .status_code not in (200 , 201 ):
180183 raise RuntimeError (f"Dataset upload failed: { up_resp .status_code } { up_resp .text } " )
@@ -196,12 +199,8 @@ def create_reinforcement_fine_tuning_job(
196199 # Remove from body and append as query param
197200 body .pop ("jobId" , None )
198201 url = f"{ url } ?{ urlencode ({'reinforcementFineTuningJobId' : job_id })} "
199- headers = {
200- "Authorization" : f"Bearer { api_key } " ,
201- "Content-Type" : "application/json" ,
202- "Accept" : "application/json" ,
203- "User-Agent" : get_user_agent (),
204- }
202+ headers = get_platform_headers (api_key = api_key , content_type = "application/json" )
203+ headers ["Accept" ] = "application/json"
205204 resp = requests .post (url , json = body , headers = headers , timeout = 60 )
206205 if resp .status_code not in (200 , 201 ):
207206 raise RuntimeError (f"RFT job creation failed: { resp .status_code } { resp .text } " )
@@ -217,22 +216,22 @@ def build_default_dataset_id(evaluator_id: str) -> str:
217216def build_default_output_model (evaluator_id : str ) -> str :
218217 base = evaluator_id .lower ().replace ("_" , "-" )
219218 uuid_suffix = str (uuid .uuid4 ())[:4 ]
220-
219+
221220 # suffix is "-rft-{4chars}" -> 9 chars
222221 suffix_len = 9
223222 max_len = 63
224-
223+
225224 # Check if we need to truncate
226225 if len (base ) + suffix_len > max_len :
227226 # Calculate hash of the full base to preserve uniqueness
228227 hash_digest = hashlib .sha256 (base .encode ("utf-8" )).hexdigest ()[:6 ]
229228 # New structure: {truncated_base}-{hash}-{uuid_suffix}
230229 # Space needed for "-{hash}" is 1 + 6 = 7
231230 hash_part_len = 7
232-
231+
233232 allowed_base_len = max_len - suffix_len - hash_part_len
234233 truncated_base = base [:allowed_base_len ].strip ("-" )
235-
234+
236235 return f"{ truncated_base } -{ hash_digest } -rft-{ uuid_suffix } "
237236
238237 return f"{ base } -rft-{ uuid_suffix } "
0 commit comments