Skip to content

Commit

Permalink
Merge branch 'master' into feat/build-comparison-of-lending-protocols
Browse files Browse the repository at this point in the history
  • Loading branch information
iamnovichek committed Feb 1, 2025
2 parents 8b13e60 + ab2b9d8 commit a0259f8
Show file tree
Hide file tree
Showing 23 changed files with 1,008 additions and 60 deletions.
69 changes: 69 additions & 0 deletions .github/workflows/sdk_app_ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
name: SDK testing workflow

on: [push]

jobs:
run_tests:
runs-on: ubuntu-latest
services:
postgres:
image: postgres:16
env:
POSTGRES_HOST_AUTH_METHOD: trust
ports:
- 5433:5432
options: >-
--health-cmd "pg_isready -U postgres"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout repository
uses: actions/checkout@v3

- name: Set up Python 3.11
uses: actions/setup-python@v4
with:
python-version: "3.11"

- name: Install Poetry
run: |
curl -sSL https://install.python-poetry.org | python3 -
echo "$HOME/.local/bin" >> $GITHUB_PATH
- name: Install dependencies for sdk
working-directory: ./apps/sdk
run: |
echo "PATH=$HOME/.local/bin:$PATH" >> $GITHUB_ENV
poetry lock
poetry install --no-root
- name: Prepare Environment File
working-directory: ./apps/sdk
run: |
cp .env.dev .env
sed -i 's/DB_HOST=db/DB_HOST=127.0.0.1/' .env
sed -i 's/DB_PORT=5432/DB_PORT=5433/' .env
- name: Wait for Database to be Ready
run: |
for i in {1..30}; do
pg_isready -h 127.0.0.1 -p 5433 -U postgres && break || sleep 2;
done
- name: Create Test Database
run: |
PGPASSWORD=postgres psql -h 127.0.0.1 -p 5433 -U postgres -c "CREATE DATABASE sdk;"
- name: Run Tests for sdk
working-directory: ./apps/sdk
run: |
echo "PATH=$HOME/.local/bin:$PATH" >> $GITHUB_ENV
poetry run pytest --junitxml=results.xml
- name: Upload Test Results
uses: actions/upload-artifact@v3
with:
name: test-results
path: ./apps/sdk/results.xml
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@ __pycache__
.env
storage_credentials.json
env/
.venv
2 changes: 1 addition & 1 deletion apps/data_handler/handlers/order_books/haiko/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def __init__(self, token_a, token_b, apply_filtering: bool = False):
self.haiko_connector = HaikoAPIConnector()
self.blast_connector = HaikoBlastAPIConnector()
self.apply_filtering = apply_filtering
self.logger = get_logger("Haiko", Path().resolve().joinpath("./logs"), echo=True)
self.logger = get_logger("Haiko", Path().resolve().joinpath("./logs"))

self.token_a_price = Decimal(0)
self.token_b_price = Decimal(0)
Expand Down
82 changes: 38 additions & 44 deletions apps/data_handler/tests/order_book/test_haiko.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,13 @@

from data_handler.handlers.order_books.haiko.main import HaikoOrderBook


class TestHaikoOrderBook:
VALID_TOKEN_PAIRS = [
(
"0x042b8f0484674ca266ac5d08e4ac6a3fe65bd3129795def2dca5c34ecc5f96d2", # wstETH
"0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7" # ETH
"0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7", # ETH
"0x53c91253bc9682c04929ca02ed00b3e423f6710d2ee7e0d5ebb06f3ecf368a8", # USDC
),
(
"0x4718f5a0fc34cc1af16a1cdee98ffb20c31f5cd61d6ab07201858f4287c938d", # STRK
"0x53c91253bc9682c04929ca02ed00b3e423f6710d2ee7e0d5ebb06f3ecf368a8" # USDC
)
]

def test_init_valid_tokens(self):
Expand All @@ -25,73 +22,68 @@ def test_init_valid_tokens(self):

def test_invalid_token_addresses(self):
"""Test initialization with invalid token addresses"""
with pytest.raises(ValueError, match="One of tokens isn't supported by Haiko"):
with pytest.raises(
ValueError,
):
HaikoOrderBook("0xinvalid1", "0xinvalid2")

@patch('data_handler.handlers.order_books.haiko.haiko_order_book.HaikoAPIConnector')
@patch("data_handler.handlers.order_books.haiko.main.HaikoOrderBook")
def test_usd_price_retrieval(self, mock_connector):
"""Test USD price retrieval functionality"""

mock_connector.return_value.get_usd_prices.return_value = {
"wstETH": Decimal("1850.50"),
"ETH": Decimal("1850.50")
"ETH": Decimal("1850.50"),
}

token_a, token_b = self.VALID_TOKEN_PAIRS[0]
order_book = HaikoOrderBook(token_a, token_b)

assert order_book.token_a_price > 0
assert order_book.token_b_price > 0

def test_price_calculation(self):
"""Test price calculation methods"""
token_a, token_b = self.VALID_TOKEN_PAIRS[0]
order_book = HaikoOrderBook(token_a, token_b)

test_tick = Decimal("100")
price = order_book.tick_to_price(test_tick)
assert isinstance(price, Decimal)
assert price > 0

@patch('data_handler.handlers.order_books.haiko.haiko_order_book.HaikoAPIConnector')
@patch('data_handler.handlers.order_books.haiko.haiko_order_book.HaikoBlastAPIConnector')
def test_fetch_price_and_liquidity(self, mock_blast_connector, mock_haiko_connector):
@pytest.mark.skip("Need to fix this test")
@patch("data_handler.handlers.order_books.haiko.main.HaikoAPIConnector")
@patch("data_handler.handlers.order_books.haiko.main.HaikoBlastAPIConnector")
def test_fetch_price_and_liquidity(
self, mock_blast_connector, mock_haiko_connector
):
"""Test fetch price and liquidity with mocked API responses"""

mock_haiko_connector.return_value.get_pair_markets.return_value = [
mock_instance = mock_haiko_connector.return_value
mock_instance.get_supported_tokens.return_value = self.VALID_TOKEN_PAIRS[0]
mock_instance.get_pair_markets.return_value = [
{
"marketId": "market1",
"baseToken": {"address": self.VALID_TOKEN_PAIRS[0][0]},
"quoteToken": {"address": self.VALID_TOKEN_PAIRS[0][1]},
"currPrice": "1850.50",
"tvl": "1000000"
"tvl": "1000000",
}
]


mock_blast_connector.return_value.get_block_info.return_value = {
"result": {
"block_number": 12345,
"timestamp": 1625097600
}
mock_blast_connector.get_block_info.return_value = {
"result": {"block_number": 12345, "timestamp": 1625097600}
}


mock_haiko_connector.return_value.get_market_depth.return_value = [
{
"price": "1800",
"liquidityCumulative": "100000"
},
{
"price": "1900",
"liquidityCumulative": "50000"
}
mock_instance.get_market_depth.return_value = [
{"price": "1800", "liquidityCumulative": "100000"},
{"price": "1900", "liquidityCumulative": "50000"},
]

token_a, token_b = self.VALID_TOKEN_PAIRS[0]
order_book = HaikoOrderBook(token_a, token_b)
order_book.fetch_price_and_liquidity()

assert order_book.block is not None
assert order_book.timestamp is not None
assert len(order_book.asks) > 0
Expand All @@ -101,17 +93,18 @@ def test_token_amount_calculation(self):
"""Test token amount calculation method"""
token_a, token_b = self.VALID_TOKEN_PAIRS[0]
order_book = HaikoOrderBook(token_a, token_b)

current_liq = Decimal("100000")
current_sqrt = Decimal("42.123")
next_sqrt = Decimal("43.456")


ask_amount = order_book._get_token_amount(current_liq, current_sqrt, next_sqrt)
assert isinstance(ask_amount, Decimal)
assert ask_amount > 0

bid_amount = order_book._get_token_amount(current_liq, current_sqrt, next_sqrt, is_ask=False)

bid_amount = order_book._get_token_amount(
current_liq, current_sqrt, next_sqrt, is_ask=False
)
assert isinstance(bid_amount, Decimal)
assert bid_amount > 0

Expand All @@ -120,8 +113,9 @@ def test_serialization(self):
token_a, token_b = self.VALID_TOKEN_PAIRS[0]
order_book = HaikoOrderBook(token_a, token_b)
order_book.fetch_price_and_liquidity()

serialized_data = order_book.serialize()
assert isinstance(serialized_data, dict)
assert "asks" in serialized_data
assert "bids" in serialized_data
dict_data = serialized_data.dict()
assert isinstance(dict_data, dict)
assert "asks" in dict_data
assert "bids" in dict_data
10 changes: 5 additions & 5 deletions apps/sdk/.env.dev
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# postgresql
DB_HOST=
DB_NAME=
DB_USER=
DB_PASSWORD=
DB_PORT=
DB_USER=postgres
DB_PASSWORD=password
DB_NAME=data_handler
DB_HOST=db
DB_PORT=5432
4 changes: 2 additions & 2 deletions apps/sdk/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,10 @@ DB_PORT=

5. **Build and Run the Services**

Use `docker-compose` to build and run the project:
Use `docker compose` to build and run the project:

```bash
docker-compose up --build
docker compose up --build
```

• The backend service will be accessible at http://localhost:8000.
Expand Down
119 changes: 119 additions & 0 deletions apps/sdk/alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
# A generic, single database configuration.

[alembic]
# path to migration scripts
# Use forward slashes (/) also on windows to provide an os agnostic path
script_location = alembic

# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s

# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .

# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =

# max length of characters to apply to the "slug" field
# truncate_slug_length = 40

# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false

# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false

# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions

# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
# version_path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
version_path_separator = os

# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false

# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8

sqlalchemy.url = driver://user:pass@localhost/dbname


[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples

# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME

# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME

# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARNING
handlers = console
qualname =

[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
Loading

0 comments on commit a0259f8

Please sign in to comment.