refactor: 重构 API 接口模块,整合为 api_wrappers 目录结构

- 将独立 API 模块 (daily, stock_basic, trade_cal) 整合至 api_wrappers/
- 重写 sync.py 使用新的 wrapper 结构,支持更多同步功能
- 更新测试文件适配新的模块结构
- 添加 pytest.ini 配置文件
This commit is contained in:
2026-02-21 03:43:30 +08:00
parent e81d39ae0d
commit 9965ce5706
15 changed files with 1042 additions and 952 deletions

View File

@@ -5,29 +5,30 @@ Tests the daily interface implementation against api.md requirements:
- tor 换手率
- vr 量比
"""
import pytest
import pandas as pd
from src.data.daily import get_daily
from src.data.api_wrappers import get_daily
# Expected output fields according to api.md
EXPECTED_BASE_FIELDS = [
'ts_code', # 股票代码
'trade_date', # 交易日期
'open', # 开盘价
'high', # 最高价
'low', # 最低价
'close', # 收盘价
'pre_close', # 昨收价
'change', # 涨跌额
'pct_chg', # 涨跌幅
'vol', # 成交量
'amount', # 成交额
"ts_code", # 股票代码
"trade_date", # 交易日期
"open", # 开盘价
"high", # 最高价
"low", # 最低价
"close", # 收盘价
"pre_close", # 昨收价
"change", # 涨跌额
"pct_chg", # 涨跌幅
"vol", # 成交量
"amount", # 成交额
]
EXPECTED_FACTOR_FIELDS = [
'turnover_rate', # 换手率 (tor)
'volume_ratio', # 量比 (vr)
"turnover_rate", # 换手率 (tor)
"volume_ratio", # 量比 (vr)
]
@@ -36,19 +37,19 @@ class TestGetDaily:
def test_fetch_basic(self):
"""Test basic daily data fetch with real API."""
result = get_daily('000001.SZ', start_date='20240101', end_date='20240131')
result = get_daily("000001.SZ", start_date="20240101", end_date="20240131")
assert isinstance(result, pd.DataFrame)
assert len(result) >= 1
assert result['ts_code'].iloc[0] == '000001.SZ'
assert result["ts_code"].iloc[0] == "000001.SZ"
def test_fetch_with_factors(self):
"""Test fetch with tor and vr factors."""
result = get_daily(
'000001.SZ',
start_date='20240101',
end_date='20240131',
factors=['tor', 'vr'],
"000001.SZ",
start_date="20240101",
end_date="20240131",
factors=["tor", "vr"],
)
assert isinstance(result, pd.DataFrame)
@@ -61,25 +62,26 @@ class TestGetDaily:
def test_output_fields_completeness(self):
"""Verify all required output fields are returned."""
result = get_daily('600000.SH')
result = get_daily("600000.SH")
# Verify all base fields are present
assert set(EXPECTED_BASE_FIELDS).issubset(result.columns.tolist()), \
assert set(EXPECTED_BASE_FIELDS).issubset(result.columns.tolist()), (
f"Missing fields: {set(EXPECTED_BASE_FIELDS) - set(result.columns)}"
)
def test_empty_result(self):
"""Test handling of empty results."""
# 使用真实 API 测试无效股票代码的空结果
result = get_daily('INVALID.SZ')
result = get_daily("INVALID.SZ")
assert isinstance(result, pd.DataFrame)
assert result.empty
def test_date_range_query(self):
"""Test query with date range."""
result = get_daily(
'000001.SZ',
start_date='20240101',
end_date='20240131',
"000001.SZ",
start_date="20240101",
end_date="20240131",
)
assert isinstance(result, pd.DataFrame)
@@ -87,7 +89,7 @@ class TestGetDaily:
def test_with_adj(self):
"""Test fetch with adjustment type."""
result = get_daily('000001.SZ', adj='qfq')
result = get_daily("000001.SZ", adj="qfq")
assert isinstance(result, pd.DataFrame)
@@ -95,11 +97,14 @@ class TestGetDaily:
def test_integration():
"""Integration test with real Tushare API (requires valid token)."""
import os
token = os.environ.get('TUSHARE_TOKEN')
token = os.environ.get("TUSHARE_TOKEN")
if not token:
pytest.skip("TUSHARE_TOKEN not configured")
result = get_daily('000001.SZ', start_date='20240101', end_date='20240131', factors=['tor', 'vr'])
result = get_daily(
"000001.SZ", start_date="20240101", end_date="20240131", factors=["tor", "vr"]
)
# Verify structure
assert isinstance(result, pd.DataFrame)
@@ -112,6 +117,6 @@ def test_integration():
assert field in result.columns, f"Missing factor field: {field}"
if __name__ == '__main__':
if __name__ == "__main__":
# 运行 pytest 单元测试真实API调用
pytest.main([__file__, '-v'])
pytest.main([__file__, "-v"])

View File

@@ -9,7 +9,7 @@ import pytest
import pandas as pd
from pathlib import Path
from src.data.storage import Storage
from src.data.stock_basic import _get_csv_path
from src.data.api_wrappers.api_stock_basic import _get_csv_path
class TestDailyStorageValidation:

View File

@@ -5,6 +5,7 @@ Tests the sync module's full/incremental sync logic for daily data:
- Incremental sync when local data exists (from last_date + 1)
- Data integrity validation
"""
import pytest
import pandas as pd
from unittest.mock import Mock, patch, MagicMock
@@ -17,6 +18,8 @@ from src.data.sync import (
get_next_date,
DEFAULT_START_DATE,
)
from src.data.storage import Storage
from src.data.client import TushareClient
class TestDateUtilities:
@@ -63,30 +66,32 @@ class TestDataSync:
def test_get_all_stock_codes_from_daily(self, mock_storage):
"""Test getting stock codes from daily data."""
with patch('src.data.sync.Storage', return_value=mock_storage):
with patch("src.data.sync.Storage", return_value=mock_storage):
sync = DataSync()
sync.storage = mock_storage
mock_storage.load.return_value = pd.DataFrame({
'ts_code': ['000001.SZ', '000001.SZ', '600000.SH'],
})
mock_storage.load.return_value = pd.DataFrame(
{
"ts_code": ["000001.SZ", "000001.SZ", "600000.SH"],
}
)
codes = sync.get_all_stock_codes()
assert len(codes) == 2
assert '000001.SZ' in codes
assert '600000.SH' in codes
assert "000001.SZ" in codes
assert "600000.SH" in codes
def test_get_all_stock_codes_fallback(self, mock_storage):
"""Test fallback to stock_basic when daily is empty."""
with patch('src.data.sync.Storage', return_value=mock_storage):
with patch("src.data.sync.Storage", return_value=mock_storage):
sync = DataSync()
sync.storage = mock_storage
# First call (daily) returns empty, second call (stock_basic) returns data
mock_storage.load.side_effect = [
pd.DataFrame(), # daily empty
pd.DataFrame({'ts_code': ['000001.SZ', '600000.SH']}), # stock_basic
pd.DataFrame({"ts_code": ["000001.SZ", "600000.SH"]}), # stock_basic
]
codes = sync.get_all_stock_codes()
@@ -95,21 +100,23 @@ class TestDataSync:
def test_get_global_last_date(self, mock_storage):
"""Test getting global last date."""
with patch('src.data.sync.Storage', return_value=mock_storage):
with patch("src.data.sync.Storage", return_value=mock_storage):
sync = DataSync()
sync.storage = mock_storage
mock_storage.load.return_value = pd.DataFrame({
'ts_code': ['000001.SZ', '600000.SH'],
'trade_date': ['20240102', '20240103'],
})
mock_storage.load.return_value = pd.DataFrame(
{
"ts_code": ["000001.SZ", "600000.SH"],
"trade_date": ["20240102", "20240103"],
}
)
last_date = sync.get_global_last_date()
assert last_date == '20240103'
assert last_date == "20240103"
def test_get_global_last_date_empty(self, mock_storage):
"""Test getting last date from empty storage."""
with patch('src.data.sync.Storage', return_value=mock_storage):
with patch("src.data.sync.Storage", return_value=mock_storage):
sync = DataSync()
sync.storage = mock_storage
@@ -120,18 +127,23 @@ class TestDataSync:
def test_sync_single_stock(self, mock_storage):
"""Test syncing a single stock."""
with patch('src.data.sync.Storage', return_value=mock_storage):
with patch('src.data.sync.get_daily', return_value=pd.DataFrame({
'ts_code': ['000001.SZ'],
'trade_date': ['20240102'],
})):
with patch("src.data.sync.Storage", return_value=mock_storage):
with patch(
"src.data.sync.get_daily",
return_value=pd.DataFrame(
{
"ts_code": ["000001.SZ"],
"trade_date": ["20240102"],
}
),
):
sync = DataSync()
sync.storage = mock_storage
result = sync.sync_single_stock(
ts_code='000001.SZ',
start_date='20240101',
end_date='20240102',
ts_code="000001.SZ",
start_date="20240101",
end_date="20240102",
)
assert isinstance(result, pd.DataFrame)
@@ -139,15 +151,15 @@ class TestDataSync:
def test_sync_single_stock_empty(self, mock_storage):
"""Test syncing a stock with no data."""
with patch('src.data.sync.Storage', return_value=mock_storage):
with patch('src.data.sync.get_daily', return_value=pd.DataFrame()):
with patch("src.data.sync.Storage", return_value=mock_storage):
with patch("src.data.sync.get_daily", return_value=pd.DataFrame()):
sync = DataSync()
sync.storage = mock_storage
result = sync.sync_single_stock(
ts_code='INVALID.SZ',
start_date='20240101',
end_date='20240102',
ts_code="INVALID.SZ",
start_date="20240101",
end_date="20240102",
)
assert result.empty
@@ -158,40 +170,46 @@ class TestSyncAll:
def test_full_sync_mode(self, mock_storage):
"""Test full sync mode when force_full=True."""
with patch('src.data.sync.Storage', return_value=mock_storage):
with patch('src.data.sync.get_daily', return_value=pd.DataFrame()):
with patch("src.data.sync.Storage", return_value=mock_storage):
with patch("src.data.sync.get_daily", return_value=pd.DataFrame()):
sync = DataSync()
sync.storage = mock_storage
sync.sync_single_stock = Mock(return_value=pd.DataFrame())
mock_storage.load.return_value = pd.DataFrame({
'ts_code': ['000001.SZ'],
})
mock_storage.load.return_value = pd.DataFrame(
{
"ts_code": ["000001.SZ"],
}
)
result = sync.sync_all(force_full=True)
# Verify sync_single_stock was called with default start date
sync.sync_single_stock.assert_called_once()
call_args = sync.sync_single_stock.call_args
assert call_args[1]['start_date'] == DEFAULT_START_DATE
assert call_args[1]["start_date"] == DEFAULT_START_DATE
def test_incremental_sync_mode(self, mock_storage):
"""Test incremental sync mode when data exists."""
with patch('src.data.sync.Storage', return_value=mock_storage):
with patch("src.data.sync.Storage", return_value=mock_storage):
sync = DataSync()
sync.storage = mock_storage
sync.sync_single_stock = Mock(return_value=pd.DataFrame())
# Mock existing data with last date
mock_storage.load.side_effect = [
pd.DataFrame({
'ts_code': ['000001.SZ'],
'trade_date': ['20240102'],
}), # get_all_stock_codes
pd.DataFrame({
'ts_code': ['000001.SZ'],
'trade_date': ['20240102'],
}), # get_global_last_date
pd.DataFrame(
{
"ts_code": ["000001.SZ"],
"trade_date": ["20240102"],
}
), # get_all_stock_codes
pd.DataFrame(
{
"ts_code": ["000001.SZ"],
"trade_date": ["20240102"],
}
), # get_global_last_date
]
result = sync.sync_all(force_full=False)
@@ -199,28 +217,30 @@ class TestSyncAll:
# Verify sync_single_stock was called with next date
sync.sync_single_stock.assert_called_once()
call_args = sync.sync_single_stock.call_args
assert call_args[1]['start_date'] == '20240103'
assert call_args[1]["start_date"] == "20240103"
def test_manual_start_date(self, mock_storage):
"""Test sync with manual start date."""
with patch('src.data.sync.Storage', return_value=mock_storage):
with patch("src.data.sync.Storage", return_value=mock_storage):
sync = DataSync()
sync.storage = mock_storage
sync.sync_single_stock = Mock(return_value=pd.DataFrame())
mock_storage.load.return_value = pd.DataFrame({
'ts_code': ['000001.SZ'],
})
mock_storage.load.return_value = pd.DataFrame(
{
"ts_code": ["000001.SZ"],
}
)
result = sync.sync_all(force_full=False, start_date='20230601')
result = sync.sync_all(force_full=False, start_date="20230601")
sync.sync_single_stock.assert_called_once()
call_args = sync.sync_single_stock.call_args
assert call_args[1]['start_date'] == '20230601'
assert call_args[1]["start_date"] == "20230601"
def test_no_stocks_found(self, mock_storage):
"""Test sync when no stocks are found."""
with patch('src.data.sync.Storage', return_value=mock_storage):
with patch("src.data.sync.Storage", return_value=mock_storage):
sync = DataSync()
sync.storage = mock_storage
@@ -236,7 +256,7 @@ class TestSyncAllConvenienceFunction:
def test_sync_all_function(self):
"""Test sync_all convenience function."""
with patch('src.data.sync.DataSync') as MockSync:
with patch("src.data.sync.DataSync") as MockSync:
mock_instance = Mock()
mock_instance.sync_all.return_value = {}
MockSync.return_value = mock_instance
@@ -251,5 +271,5 @@ class TestSyncAllConvenienceFunction:
)
if __name__ == '__main__':
pytest.main([__file__, '-v'])
if __name__ == "__main__":
pytest.main([__file__, "-v"])

256
tests/test_sync_real.py Normal file
View File

@@ -0,0 +1,256 @@
"""Tests for data sync with REAL data (read-only).
Tests verify:
1. get_global_last_date() correctly reads local data's max date
2. Incremental sync date calculation (local_last_date + 1)
3. Full sync date calculation (20180101)
4. Multi-stock scenario with real data
⚠️ IMPORTANT: These tests ONLY read data, no write operations.
- NO sync_all() calls (writes daily.h5)
- NO check_sync_needed() calls (writes trade_cal.h5)
"""
import pytest
import pandas as pd
from pathlib import Path
from src.data.sync import (
DataSync,
get_next_date,
DEFAULT_START_DATE,
)
from src.data.storage import Storage
class TestDataSyncReadOnly:
"""Read-only tests for data sync - verify date calculation logic."""
@pytest.fixture
def storage(self):
"""Create storage instance."""
return Storage()
@pytest.fixture
def data_sync(self):
"""Create DataSync instance."""
return DataSync()
@pytest.fixture
def daily_exists(self, storage):
"""Check if daily.h5 exists."""
return storage.exists("daily")
def test_daily_h5_exists(self, storage):
"""Verify daily.h5 data file exists before running tests."""
assert storage.exists("daily"), (
"daily.h5 not found. Please run full sync first: "
"uv run python -c 'from src.data.sync import sync_all; sync_all(force_full=True)'"
)
def test_get_global_last_date(self, data_sync, daily_exists):
"""Test get_global_last_date returns correct max date from local data."""
if not daily_exists:
pytest.skip("daily.h5 not found")
last_date = data_sync.get_global_last_date()
# Verify it's a valid date string
assert last_date is not None, "get_global_last_date returned None"
assert isinstance(last_date, str), f"Expected str, got {type(last_date)}"
assert len(last_date) == 8, f"Expected 8-digit date, got {last_date}"
assert last_date.isdigit(), f"Expected numeric date, got {last_date}"
# Verify by reading storage directly
daily_data = data_sync.storage.load("daily")
expected_max = str(daily_data["trade_date"].max())
assert last_date == expected_max, (
f"get_global_last_date returned {last_date}, "
f"but actual max date is {expected_max}"
)
print(f"[TEST] Local data last date: {last_date}")
def test_incremental_sync_date_calculation(self, data_sync, daily_exists):
"""Test incremental sync: start_date = local_last_date + 1.
This verifies that when local data exists, incremental sync should
fetch data from (local_last_date + 1), not from 20180101.
"""
if not daily_exists:
pytest.skip("daily.h5 not found")
# Get local last date
local_last_date = data_sync.get_global_last_date()
assert local_last_date is not None, "No local data found"
# Calculate expected incremental start date
expected_start_date = get_next_date(local_last_date)
# Verify the calculation is correct
local_last_int = int(local_last_date)
expected_int = local_last_int + 1
actual_int = int(expected_start_date)
assert actual_int == expected_int, (
f"Incremental start date calculation error: "
f"expected {expected_int}, got {actual_int}"
)
print(
f"[TEST] Incremental sync: local_last={local_last_date}, "
f"start_date should be {expected_start_date}"
)
# Verify this is NOT 20180101 (would be full sync)
assert expected_start_date != DEFAULT_START_DATE, (
f"Incremental sync should NOT start from {DEFAULT_START_DATE}"
)
def test_full_sync_date_calculation(self):
"""Test full sync: start_date = 20180101 when force_full=True.
This verifies that force_full=True always starts from 20180101.
"""
# Full sync should always use DEFAULT_START_DATE
full_sync_start = DEFAULT_START_DATE
assert full_sync_start == "20180101", (
f"Full sync should start from 20180101, got {full_sync_start}"
)
print(f"[TEST] Full sync start date: {full_sync_start}")
def test_date_comparison_logic(self, data_sync, daily_exists):
"""Test date comparison: incremental vs full sync selection logic.
Verify that:
- If local_last_date < today: incremental sync needed
- If local_last_date >= today: no sync needed
"""
if not daily_exists:
pytest.skip("daily.h5 not found")
from datetime import datetime
local_last_date = data_sync.get_global_last_date()
today = datetime.now().strftime("%Y%m%d")
local_last_int = int(local_last_date)
today_int = int(today)
# Log the comparison
print(
f"[TEST] Date comparison: local_last={local_last_date} ({local_last_int}), "
f"today={today} ({today_int})"
)
# This test just verifies the comparison logic works
if local_last_int < today_int:
print("[TEST] Local data is older than today - sync needed")
# Incremental sync should fetch from local_last_date + 1
sync_start = get_next_date(local_last_date)
assert int(sync_start) > local_last_int, (
"Sync start should be after local last"
)
else:
print("[TEST] Local data is up-to-date - no sync needed")
def test_get_all_stock_codes_real_data(self, data_sync, daily_exists):
"""Test get_all_stock_codes returns multiple real stock codes."""
if not daily_exists:
pytest.skip("daily.h5 not found")
codes = data_sync.get_all_stock_codes()
# Verify it's a list
assert isinstance(codes, list), f"Expected list, got {type(codes)}"
assert len(codes) > 0, "No stock codes found"
# Verify multiple stocks
assert len(codes) >= 10, (
f"Expected at least 10 stocks for multi-stock test, got {len(codes)}"
)
# Verify format (should be like 000001.SZ, 600000.SH)
sample_codes = codes[:5]
for code in sample_codes:
assert "." in code, f"Invalid stock code format: {code}"
suffix = code.split(".")[-1]
assert suffix in ["SZ", "SH"], f"Invalid exchange suffix: {suffix}"
print(f"[TEST] Found {len(codes)} stock codes (sample: {sample_codes})")
def test_multi_stock_date_range(self, data_sync, daily_exists):
"""Test that multiple stocks share the same date range in local data.
This verifies that local data has consistent date coverage across stocks.
"""
if not daily_exists:
pytest.skip("daily.h5 not found")
daily_data = data_sync.storage.load("daily")
# Get date range for each stock
stock_dates = daily_data.groupby("ts_code")["trade_date"].agg(["min", "max"])
# Get global min and max
global_min = str(daily_data["trade_date"].min())
global_max = str(daily_data["trade_date"].max())
print(f"[TEST] Global date range: {global_min} to {global_max}")
print(f"[TEST] Total stocks: {len(stock_dates)}")
# Verify we have data for multiple stocks
assert len(stock_dates) >= 10, (
f"Expected at least 10 stocks, got {len(stock_dates)}"
)
# Verify date range is reasonable (at least 1 year of data)
global_min_int = int(global_min)
global_max_int = int(global_max)
days_span = global_max_int - global_min_int
assert days_span > 100, (
f"Date range too small: {days_span} days. "
f"Expected at least 100 days of data."
)
print(f"[TEST] Date span: {days_span} days")
class TestDateUtilities:
"""Test date utility functions."""
def test_get_next_date(self):
"""Test get_next_date correctly calculates next day."""
# Test normal cases
assert get_next_date("20240101") == "20240102"
assert get_next_date("20240131") == "20240201" # Month boundary
assert get_next_date("20241231") == "20250101" # Year boundary
def test_incremental_vs_full_sync_logic(self):
"""Test the logic difference between incremental and full sync.
Incremental: start_date = local_last_date + 1
Full: start_date = 20180101
"""
# Scenario 1: Local data exists
local_last_date = "20240115"
incremental_start = get_next_date(local_last_date)
assert incremental_start == "20240116"
assert incremental_start != DEFAULT_START_DATE
# Scenario 2: Force full sync
full_sync_start = DEFAULT_START_DATE # "20180101"
assert full_sync_start == "20180101"
assert incremental_start != full_sync_start
print("[TEST] Incremental vs Full sync logic verified")
if __name__ == "__main__":
pytest.main([__file__, "-v", "-s"])