@@ -457,26 +457,30 @@ async def call_anthropic(prompt, temperature, model, max_tokens=2048, top_p=1, t
457
457
await asyncio .sleep (0.5 )
458
458
459
459
async def call_claude (messages , temperature , model , max_tokens , top_p , top_k ):
460
- system_prompt = None
461
- filtered_messages = []
462
- for message in messages :
463
- if message ["role" ] == "system" :
464
- system_prompt = message ["content" ]
465
- else :
466
- filtered_messages .append (message )
460
+ try :
461
+ bt .logging .info (f"calling claude for { messages } with temperature: { temperature } , model: { model } , max_tokens: { max_tokens } , top_p: { top_p } , top_k: { top_k } " )
462
+ system_prompt = None
463
+ filtered_messages = []
464
+ for message in messages :
465
+ if message ["role" ] == "system" :
466
+ system_prompt = message ["content" ]
467
+ else :
468
+ filtered_messages .append (message )
467
469
468
- kwargs = {
469
- "max_tokens" : max_tokens ,
470
- "messages" : filtered_messages ,
471
- "model" : model ,
472
- }
470
+ kwargs = {
471
+ "max_tokens" : max_tokens ,
472
+ "messages" : filtered_messages ,
473
+ "model" : model ,
474
+ }
473
475
474
- if system_prompt :
475
- kwargs ["system" ] = system_prompt
476
-
477
- message = await claude_client .messages .create (** kwargs )
478
- bt .logging .debug (f"validator response is { message .content [0 ].text } " )
479
- return message .content [0 ].text
476
+ if system_prompt :
477
+ kwargs ["system" ] = system_prompt
478
+
479
+ message = await claude_client .messages .create (** kwargs )
480
+ bt .logging .debug (f"validator response is { message .content [0 ].text } " )
481
+ return message .content [0 ].text
482
+ except :
483
+ bt .logging .error (f"error in call_claude { traceback .format_exc ()} " )
480
484
481
485
async def call_stability (prompt , seed , steps , cfg_scale , width , height , samples , sampler ):
482
486
# bt.logging.info(f"calling stability for {prompt, seed, steps, cfg_scale, width, height, samples, sampler}")
0 commit comments