Skip to content

Commit cfa1026

Browse files
authored
Merge pull request #109 from Datura-ai/upgrade-version
Upgrade version
2 parents c0315b3 + 66ed153 commit cfa1026

File tree

11 files changed

+87
-392
lines changed

11 files changed

+87
-392
lines changed

cortext/axon.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import bittensor
22
import bittensor as bt
33
from substrateinterface import Keypair
4-
from bittensor.errors import SynapseDendriteNoneException
4+
from bittensor.core.errors import SynapseDendriteNoneException
55

66

77
class CortexAxon(bt.axon):

cortext/dendrite.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ async def call_stream(
5656
async with session.post(
5757
url,
5858
headers=synapse.to_headers(),
59-
json=synapse.dict(),
59+
json=synapse.model_dump(),
6060
) as response:
6161
# Use synapse subclass' process_streaming_response method to yield the response chunks
6262
try:

cortext/protocol.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -232,7 +232,7 @@ class StreamPrompting(bt.StreamingSynapse):
232232
"This attribute is immutable and cannot be updated.",
233233
)
234234

235-
completion: str = pydantic.Field(
235+
completion: Optional[str] = pydantic.Field(
236236
None,
237237
title="Completion",
238238
description="Completion status of the current StreamPrompting object. "
@@ -347,15 +347,15 @@ def to_headers(self) -> dict:
347347
headers.update(
348348
{
349349
f"bt_header_axon_{k}": str(v)
350-
for k, v in self.axon.dict().items()
350+
for k, v in self.axon.model_dump().items()
351351
if v is not None
352352
}
353353
)
354354
if self.dendrite:
355355
headers.update(
356356
{
357357
f"bt_header_dendrite_{k}": str(v)
358-
for k, v in self.dendrite.dict().items()
358+
for k, v in self.dendrite.model_dump().items()
359359
if v is not None
360360
}
361361
)
@@ -379,9 +379,9 @@ async def process_streaming_response(self, response: StreamingResponse, organic=
379379
self.completion += tokens
380380
yield tokens
381381
except asyncio.TimeoutError as err:
382-
self.completion += remain_chunk
383-
yield remain_chunk
384-
382+
# self.completion += remain_chunk
383+
# yield remain_chunk
384+
pass
385385

386386
def extract_response_json(self, response: StreamingResponse) -> dict:
387387
headers = {

cursor/app/core/query_to_validator.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from cursor.app.models import ChatRequest
55
from cursor.app.core.protocol import StreamPrompting
66
from cursor.app.core.config import config
7-
from cursor.app.core.dendrite import CortexDendrite
7+
from cortext.dendrite import CortexDendrite
88
import traceback
99

1010
subtensor = bt.subtensor(network="finney")

requirements.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
bittensor==6.9.4
1+
bittensor==8.4.3
22
datasets==2.*
33
envparse==0.2.0
44
openai>=1.3.2, ==1.*

server/app/database.py

+20-1
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
1+
import time
12
import psycopg2
23
import os
3-
from contextlib import asynccontextmanager
44

55
DATABASE_URL = os.getenv("DATABASE_URL")
66
TABEL_NAME = 'query_resp_data'
7+
TIME_EXPIRATION = 3600 * 24 * 10 # delete records after 10 days since it's creation.
8+
LAST_EXECUTION = time.time()
79
# PostgreSQL connection parameters
810
conn = psycopg2.connect(DATABASE_URL)
911

@@ -49,6 +51,7 @@ async def create_table(app):
4951
CREATE INDEX IF NOT EXISTS miner_id_index ON {TABEL_NAME} (miner_uid);
5052
CREATE INDEX IF NOT EXISTS miner_hot_key_index ON {TABEL_NAME} (miner_hot_key);
5153
CREATE INDEX IF NOT EXISTS idx_score_sim_timestamp ON {TABEL_NAME} (score, similarity, timestamp);
54+
CREATE INDEX IF NOT EXISTS idx_block__cycle_epoch_num_ ON {TABEL_NAME} (block_num, cycle_num, epoch_num);
5255
"""
5356
cur.execute(create_index_query)
5457
conn.commit()
@@ -57,4 +60,20 @@ async def create_table(app):
5760
except Exception as e:
5861
print(f"Error creating table: {e}")
5962

63+
64+
def delete_records():
65+
global TIME_EXPIRATION, LAST_EXECUTION
66+
if (time.time() - LAST_EXECUTION) < TIME_EXPIRATION:
67+
return
68+
LAST_EXECUTION = time.time()
69+
timestamp = time.time() - TIME_EXPIRATION
70+
conn = psycopg2.connect(DATABASE_URL)
71+
cur = conn.cursor()
72+
query_str = f"""
73+
delete from query_resp_data where timestamp <= {timestamp}
74+
"""
75+
cur.execute(query_str)
76+
conn.commit()
77+
78+
6079
create_table(None)

server/app/main.py

+8-1
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,17 @@
1+
from apscheduler.schedulers.background import BackgroundScheduler
2+
from apscheduler.triggers.cron import CronTrigger
13
from contextlib import asynccontextmanager
24
from fastapi import FastAPI, Depends, HTTPException
35
from fastapi.middleware.cors import CORSMiddleware
6+
47
from . import curd, models, schemas
5-
from .database import create_table, conn, cur
8+
from .database import create_table, conn, cur, delete_records
69
from typing import List
710

11+
scheduler = BackgroundScheduler()
12+
scheduler.add_job(delete_records, CronTrigger(day_of_week="mon", hour=10, minute=0))
13+
scheduler.start()
14+
815

916
@asynccontextmanager
1017
async def lifespan(app: FastAPI):

server/requirements.txt

+2-1
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,5 @@ fastapi
22
uvicorn[standard]
33
psycopg2-binary
44
sqlalchemy
5-
pydantic
5+
pydantic
6+
apscheduler

0 commit comments

Comments
 (0)