1
1
import asyncio
2
2
import concurrent
3
3
import random
4
- import traceback
5
4
import threading
6
5
7
6
import torch
8
7
import time
8
+ import requests
9
9
10
10
from black .trans import defaultdict
11
11
from substrateinterface import SubstrateInterface
12
12
from functools import partial
13
- from typing import Tuple , List
13
+ from typing import Tuple
14
14
import bittensor as bt
15
15
from bittensor import StreamingSynapse
16
16
import cortext
17
-
17
+ import json
18
18
from starlette .types import Send
19
19
20
20
from cortext .protocol import IsAlive , StreamPrompting , ImageResponse , Embeddings
@@ -93,6 +93,7 @@ def __init__(self, config, cache: QueryResponseCache, loop=None):
93
93
self .loop .create_task (self .process_queries_from_database ())
94
94
95
95
self .saving_datas = []
96
+ self .url = None
96
97
daemon_thread = threading .Thread (target = self .saving_resp_answers_from_miners )
97
98
daemon_thread .start ()
98
99
@@ -104,11 +105,28 @@ def saving_resp_answers_from_miners(self):
104
105
time .sleep (1 )
105
106
else :
106
107
bt .logging .info (f"saving responses..." )
108
+ start_time = time .time ()
107
109
self .cache .set_cache_in_batch ([item .get ('synapse' ) for item in self .saving_datas ],
108
110
block_num = self .current_block ,
109
111
cycle_num = self .current_block // 36 , epoch_num = self .current_block // 360 )
110
112
bt .logging .info (f"total saved responses is { len (self .saving_datas )} " )
111
113
self .saving_datas .clear ()
114
+ if not self .url :
115
+ return
116
+ bt .logging .info ("sending datas to central server." )
117
+ json_data = [item .get ('synapse' ).dict () for item in self .saving_datas ]
118
+ headers = {
119
+ 'Content-Type' : 'application/json' # Specify that we're sending JSON
120
+ }
121
+ response = requests .post (self .url , data = json .dumps (json_data ), headers = headers )
122
+ # Check the response
123
+ if response .status_code == 200 :
124
+ bt .logging .info (
125
+ f"Successfully sent data to central server. { time .time () - start_time } sec total elapsed for sending to central server." )
126
+ else :
127
+ bt .logging .info (
128
+ f"Failed to send data. Status code: { response .status_code } { time .time () - start_time } sec total elapsed for sending to central server." )
129
+ bt .logging .info (f"Response:{ response .text } " )
112
130
113
131
async def run_sync_in_async (self , fn ):
114
132
return await self .loop .run_in_executor (None , fn )
@@ -162,7 +180,7 @@ async def update_and_refresh(self):
162
180
await self .initialize_uids_and_capacities ()
163
181
bt .logging .info ("Metagraph refreshed." )
164
182
165
- async def query_miner (self , uid , query_syn : cortext .ALL_SYNAPSE_TYPE ):
183
+ async def query_miner (self , uid , query_syn : cortext .ALL_SYNAPSE_TYPE , organic = True ):
166
184
query_syn .uid = uid
167
185
if query_syn .streaming :
168
186
if uid is None :
@@ -195,6 +213,7 @@ async def handle_response(resp):
195
213
target_axon = axon ,
196
214
synapse = query_syn ,
197
215
timeout = query_syn .timeout ,
216
+ organic = organic
198
217
)
199
218
await handle_response (response )
200
219
else :
@@ -256,7 +275,7 @@ async def perform_synthetic_queries(self):
256
275
uid = self .task_mgr .assign_task (query_syn )
257
276
if uid is None :
258
277
bt .logging .debug (f"No available uids for synthetic query process." )
259
- synthetic_tasks .append ((uid , self .query_miner (uid , query_syn )))
278
+ synthetic_tasks .append ((uid , self .query_miner (uid , query_syn , organic = False )))
260
279
261
280
bt .logging .debug (f"{ time .time () - start_time } elapsed for creating and submitting synthetic queries." )
262
281
@@ -283,7 +302,7 @@ async def perform_synthetic_queries(self):
283
302
f"synthetic queries and answers has been processed in cache successfully. total times { time .time () - start_time } " )
284
303
285
304
def pop_synthetic_tasks_max_100_per_miner (self , synthetic_tasks ):
286
- batch_size = 10000
305
+ batch_size = 3000
287
306
max_query_cnt_per_miner = 50
288
307
batch_tasks = []
289
308
remain_tasks = []
0 commit comments