This commit is contained in:
sam 2025-09-26 21:08:32 +08:00
parent 6eac6c5f69
commit c6c781cc6b
7 changed files with 554 additions and 69 deletions

View File

@ -4,24 +4,34 @@
- `app/data`:数据库初始化与 Schema 定义。 - `app/data`:数据库初始化与 Schema 定义。
- `app/utils`:配置、数据库连接、日志和交易日历工具。 - `app/utils`:配置、数据库连接、日志和交易日历工具。
- `app/ingest`TuShare 与 RSS 数据拉取骨架 - `app/ingest`TuShare 数据抓取、新闻 RSS、数据覆盖检查器
- `app/features`:指标与信号计算接口。 - `app/features`:指标与信号计算接口。
- `app/agents`:多智能体博弈实现,包括动量、价值、新闻、流动性、宏观与风险代理。 - `app/agents`:多智能体博弈实现,包括动量、价值、新闻、流动性、宏观与风险代理。
- `app/backtest`:日线回测引擎与指标计算的占位实现。 - `app/backtest`:日线回测引擎与指标计算的占位实现。
- `app/llm`:人类可读卡片与摘要生成入口(仅构建提示,不直接交易)。 - `app/llm`:人类可读卡片与摘要生成入口(仅构建提示,不直接交易)。
- `app/ui`Streamlit 三页界面骨架 - `app/ui`Streamlit 四页界面骨架,含“自检测试”页
## 快速开始 ## 快速开始
```bash ```bash
python -m app.main # 初始化数据库 # 初始化数据库结构
python -m app.cli init-db
# 一键开机检查(默认回溯 365 天,缺失数据会自动补齐)
python -m app.cli boot-check --days 365
# 启动界面
streamlit run app/ui/streamlit_app.py streamlit run app/ui/streamlit_app.py
``` ```
Streamlit `自检测试` 页签提供:
- 数据库初始化快捷按钮;
- TuShare 小范围拉取测试;
- 开机检查器(展示当前数据覆盖范围与股票基础信息完整度)。
## 下一步 ## 下一步
1. 在 `app/ingest` 中补充 TuShare 和 RSS 数据抓取逻辑。 1. 在 `app/features``app/backtest` 中完善信号计算、事件驱动撮合与绩效指标输出。
2. 完善 `app/features``app/backtest` 以实现实际的信号计算与事件驱动回测。 2. 将代理效用写入 SQLite 的 `agent_utils``alloc_log` 表,驱动 UI 决策解释。
3. 将代理效用写入 SQLite 的 `agent_utils``alloc_log` 表,驱动 UI 展示。 3. 使用轻量情感分析与热度计算填充 `news`、`heat_daily` 与热点指数。
4. 使用轻量情感分析与热度计算,填充 `news``heat_daily` 4. 接入本地小模型或 API 完成人类可读的策略建议卡片,形成端到端体验。
5. 接入本地小模型或 API 完成 LLM 文本解释,并在 UI 中展示。

77
app/cli.py Normal file
View File

@ -0,0 +1,77 @@
"""Command line entry points for routine tasks."""
from __future__ import annotations
import argparse
from datetime import date
from app.backtest.engine import BtConfig, run_backtest
from app.data.schema import initialize_database
from app.ingest.checker import run_boot_check
def init_db() -> None:
result = initialize_database()
if result.skipped:
print("Database already initialized; skipping schema creation")
else:
print(f"Initialized database with {result.executed} statements")
def run_sample_backtest() -> None:
cfg = BtConfig(
id="demo",
name="Demo Strategy",
start_date=date(2020, 1, 1),
end_date=date(2020, 3, 31),
universe=["000001.SZ"],
params={
"target": 0.035,
"stop": -0.015,
"hold_days": 10,
},
)
run_backtest(cfg)
def run_boot_check_cli(days: int) -> None:
report = run_boot_check(days=days)
print("Boot check summary:")
print(f" Period: {report.start} ~ {report.end}")
print(f" Expected trading days: {report.expected_trading_days}")
for name, info in report.tables.items():
print(
f" {name}: min={info.get('min')}, max={info.get('max')}, "
f"distinct={info.get('distinct_days')}, ok={info.get('meets_expectation')}"
)
stock = report.stock_basic
print(
f" stock_basic: total={stock.get('total')}, "
f"SSE listed={stock.get('sse_listed')}, SZSE listed={stock.get('szse_listed')}"
)
def main() -> None:
parser = argparse.ArgumentParser(description="Investment assistant toolkit")
sub = parser.add_subparsers(dest="command")
sub.add_parser("init-db", help="Initialize SQLite schema")
boot_parser = sub.add_parser("boot-check", help="Run startup data coverage check")
boot_parser.add_argument("--days", type=int, default=365, help="Lookback window in days")
sub.add_parser("sample-backtest", help="Execute demo backtest run")
args = parser.parse_args()
if args.command is None or args.command == "init-db":
init_db()
elif args.command == "boot-check":
run_boot_check_cli(days=args.days)
elif args.command == "sample-backtest":
run_sample_backtest()
else:
parser.print_help()
if __name__ == "__main__":
main()

View File

@ -5,11 +5,101 @@ import sqlite3
from dataclasses import dataclass from dataclasses import dataclass
from typing import Iterable from typing import Iterable
from app.utils.config import get_config
from app.utils.db import db_session from app.utils.db import db_session
SCHEMA_STATEMENTS: Iterable[str] = ( SCHEMA_STATEMENTS: Iterable[str] = (
"""
CREATE TABLE IF NOT EXISTS stock_basic (
ts_code TEXT PRIMARY KEY,
symbol TEXT,
name TEXT,
area TEXT,
industry TEXT,
market TEXT,
exchange TEXT,
list_status TEXT,
list_date TEXT,
delist_date TEXT
);
""",
"""
CREATE TABLE IF NOT EXISTS daily (
ts_code TEXT,
trade_date TEXT,
open REAL,
high REAL,
low REAL,
close REAL,
pre_close REAL,
change REAL,
pct_chg REAL,
vol REAL,
amount REAL,
PRIMARY KEY (ts_code, trade_date)
);
""",
"""
CREATE TABLE IF NOT EXISTS daily_basic (
ts_code TEXT,
trade_date TEXT,
close REAL,
turnover_rate REAL,
turnover_rate_f REAL,
volume_ratio REAL,
pe REAL,
pe_ttm REAL,
pb REAL,
ps REAL,
ps_ttm REAL,
dv_ratio REAL,
dv_ttm REAL,
total_share REAL,
float_share REAL,
free_share REAL,
total_mv REAL,
circ_mv REAL,
PRIMARY KEY (ts_code, trade_date)
);
""",
"""
CREATE TABLE IF NOT EXISTS adj_factor (
ts_code TEXT,
trade_date TEXT,
adj_factor REAL,
PRIMARY KEY (ts_code, trade_date)
);
""",
"""
CREATE TABLE IF NOT EXISTS suspend (
ts_code TEXT,
suspend_date TEXT,
resume_date TEXT,
suspend_type TEXT,
ann_date TEXT,
suspend_timing TEXT,
resume_timing TEXT,
reason TEXT,
PRIMARY KEY (ts_code, suspend_date)
);
""",
"""
CREATE TABLE IF NOT EXISTS trade_calendar (
exchange TEXT,
cal_date TEXT PRIMARY KEY,
is_open INTEGER,
pretrade_date TEXT
);
""",
"""
CREATE TABLE IF NOT EXISTS stk_limit (
ts_code TEXT,
trade_date TEXT,
up_limit REAL,
down_limit REAL,
PRIMARY KEY (ts_code, trade_date)
);
""",
""" """
CREATE TABLE IF NOT EXISTS news ( CREATE TABLE IF NOT EXISTS news (
id TEXT PRIMARY KEY, id TEXT PRIMARY KEY,

67
app/ingest/checker.py Normal file
View File

@ -0,0 +1,67 @@
"""数据覆盖开机检查器。"""
from __future__ import annotations
import logging
from dataclasses import dataclass
from datetime import date, timedelta
from typing import Dict
from app.data.schema import initialize_database
from app.ingest.tushare import collect_data_coverage, ensure_data_coverage
LOGGER = logging.getLogger(__name__)
@dataclass
class CoverageReport:
start: str
end: str
expected_trading_days: int
tables: Dict[str, Dict[str, object]]
stock_basic: Dict[str, object]
def to_dict(self) -> Dict[str, object]:
return {
"start": self.start,
"end": self.end,
"expected_trading_days": self.expected_trading_days,
"tables": self.tables,
"stock_basic": self.stock_basic,
}
def _default_window(days: int = 365) -> tuple[date, date]:
end = date.today()
start = end - timedelta(days=days)
return start, end
def run_boot_check(days: int = 365, auto_fetch: bool = True) -> CoverageReport:
"""执行开机自检,必要时自动补数据。"""
initialize_database()
start, end = _default_window(days)
LOGGER.info("开机检查覆盖窗口:%s%s", start, end)
if auto_fetch:
ensure_data_coverage(start, end)
coverage = collect_data_coverage(start, end)
report = CoverageReport(
start=coverage["period"]["start"],
end=coverage["period"]["end"],
expected_trading_days=coverage["period"]["expected_trading_days"],
tables={k: v for k, v in coverage.items() if k not in ("period", "stock_basic")},
stock_basic=coverage["stock_basic"],
)
LOGGER.info(
"数据覆盖情况:日线[%s,%s]Distinct=%s,目标交易日=%s",
report.tables["daily"].get("min"),
report.tables["daily"].get("max"),
report.tables["daily"].get("distinct_days"),
report.expected_trading_days,
)
return report

View File

@ -1,4 +1,4 @@
"""TuShare 数据拉取管线实现""" """TuShare 数据拉取与数据覆盖检查工具"""
from __future__ import annotations from __future__ import annotations
import logging import logging
@ -11,11 +11,12 @@ import pandas as pd
try: try:
import tushare as ts import tushare as ts
except ImportError as exc: # pragma: no cover - dependency error surfaced at runtime except ImportError: # pragma: no cover - 运行时提示
ts = None # type: ignore[assignment] ts = None # type: ignore[assignment]
from app.utils.config import get_config from app.utils.config import get_config
from app.utils.db import db_session from app.utils.db import db_session
from app.data.schema import initialize_database
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
@ -30,6 +31,20 @@ class FetchJob:
_TABLE_SCHEMAS: Dict[str, str] = { _TABLE_SCHEMAS: Dict[str, str] = {
"stock_basic": """
CREATE TABLE IF NOT EXISTS stock_basic (
ts_code TEXT PRIMARY KEY,
symbol TEXT,
name TEXT,
area TEXT,
industry TEXT,
market TEXT,
exchange TEXT,
list_status TEXT,
list_date TEXT,
delist_date TEXT
);
""",
"daily": """ "daily": """
CREATE TABLE IF NOT EXISTS daily ( CREATE TABLE IF NOT EXISTS daily (
ts_code TEXT, ts_code TEXT,
@ -46,6 +61,37 @@ _TABLE_SCHEMAS: Dict[str, str] = {
PRIMARY KEY (ts_code, trade_date) PRIMARY KEY (ts_code, trade_date)
); );
""", """,
"daily_basic": """
CREATE TABLE IF NOT EXISTS daily_basic (
ts_code TEXT,
trade_date TEXT,
close REAL,
turnover_rate REAL,
turnover_rate_f REAL,
volume_ratio REAL,
pe REAL,
pe_ttm REAL,
pb REAL,
ps REAL,
ps_ttm REAL,
dv_ratio REAL,
dv_ttm REAL,
total_share REAL,
float_share REAL,
free_share REAL,
total_mv REAL,
circ_mv REAL,
PRIMARY KEY (ts_code, trade_date)
);
""",
"adj_factor": """
CREATE TABLE IF NOT EXISTS adj_factor (
ts_code TEXT,
trade_date TEXT,
adj_factor REAL,
PRIMARY KEY (ts_code, trade_date)
);
""",
"suspend": """ "suspend": """
CREATE TABLE IF NOT EXISTS suspend ( CREATE TABLE IF NOT EXISTS suspend (
ts_code TEXT, ts_code TEXT,
@ -62,9 +108,10 @@ _TABLE_SCHEMAS: Dict[str, str] = {
"trade_calendar": """ "trade_calendar": """
CREATE TABLE IF NOT EXISTS trade_calendar ( CREATE TABLE IF NOT EXISTS trade_calendar (
exchange TEXT, exchange TEXT,
cal_date TEXT PRIMARY KEY, cal_date TEXT,
is_open INTEGER, is_open INTEGER,
pretrade_date TEXT pretrade_date TEXT,
PRIMARY KEY (exchange, cal_date)
); );
""", """,
"stk_limit": """ "stk_limit": """
@ -79,6 +126,18 @@ _TABLE_SCHEMAS: Dict[str, str] = {
} }
_TABLE_COLUMNS: Dict[str, List[str]] = { _TABLE_COLUMNS: Dict[str, List[str]] = {
"stock_basic": [
"ts_code",
"symbol",
"name",
"area",
"industry",
"market",
"exchange",
"list_status",
"list_date",
"delist_date",
],
"daily": [ "daily": [
"ts_code", "ts_code",
"trade_date", "trade_date",
@ -92,6 +151,31 @@ _TABLE_COLUMNS: Dict[str, List[str]] = {
"vol", "vol",
"amount", "amount",
], ],
"daily_basic": [
"ts_code",
"trade_date",
"close",
"turnover_rate",
"turnover_rate_f",
"volume_ratio",
"pe",
"pe_ttm",
"pb",
"ps",
"ps_ttm",
"dv_ratio",
"dv_ttm",
"total_share",
"float_share",
"free_share",
"total_mv",
"circ_mv",
],
"adj_factor": [
"ts_code",
"trade_date",
"adj_factor",
],
"suspend": [ "suspend": [
"ts_code", "ts_code",
"suspend_date", "suspend_date",
@ -137,14 +221,78 @@ def _format_date(value: date) -> str:
def _df_to_records(df: pd.DataFrame, allowed_cols: List[str]) -> List[Dict]: def _df_to_records(df: pd.DataFrame, allowed_cols: List[str]) -> List[Dict]:
if df is None or df.empty: if df is None or df.empty:
return [] return []
# 对缺失列进行补全,防止写库时缺少绑定参数
reindexed = df.reindex(columns=allowed_cols) reindexed = df.reindex(columns=allowed_cols)
return reindexed.where(pd.notnull(reindexed), None).to_dict("records") return reindexed.where(pd.notnull(reindexed), None).to_dict("records")
def fetch_daily_bars(job: FetchJob) -> Iterable[Dict]: def _range_stats(table: str, date_col: str, start_str: str, end_str: str) -> Dict[str, Optional[str]]:
"""拉取日线行情。""" sql = (
f"SELECT MIN({date_col}) AS min_d, MAX({date_col}) AS max_d, "
f"COUNT(DISTINCT {date_col}) AS distinct_days FROM {table} "
f"WHERE {date_col} BETWEEN ? AND ?"
)
with db_session(read_only=True) as conn:
row = conn.execute(sql, (start_str, end_str)).fetchone()
return {
"min": row["min_d"],
"max": row["max_d"],
"distinct": row["distinct_days"] if row else 0,
}
def _range_needs_refresh(
table: str,
date_col: str,
start_str: str,
end_str: str,
expected_days: int = 0,
) -> bool:
stats = _range_stats(table, date_col, start_str, end_str)
if stats["min"] is None or stats["max"] is None:
return True
if stats["min"] > start_str or stats["max"] < end_str:
return True
if expected_days and (stats["distinct"] or 0) < expected_days:
return True
return False
def _calendar_needs_refresh(exchange: str, start_str: str, end_str: str) -> bool:
sql = """
SELECT MIN(cal_date) AS min_d, MAX(cal_date) AS max_d, COUNT(*) AS cnt
FROM trade_calendar
WHERE exchange = ? AND cal_date BETWEEN ? AND ?
"""
with db_session(read_only=True) as conn:
row = conn.execute(sql, (exchange, start_str, end_str)).fetchone()
if row is None or row["min_d"] is None:
return True
if row["min_d"] > start_str or row["max_d"] < end_str:
return True
# 交易日历允许不连续(节假日),此处不比较天数
return False
def _expected_trading_days(start_str: str, end_str: str, exchange: str = "SSE") -> int:
sql = """
SELECT COUNT(*) AS cnt
FROM trade_calendar
WHERE exchange = ? AND cal_date BETWEEN ? AND ? AND is_open = 1
"""
with db_session(read_only=True) as conn:
row = conn.execute(sql, (exchange, start_str, end_str)).fetchone()
return int(row["cnt"]) if row and row["cnt"] is not None else 0
def fetch_stock_basic(exchange: Optional[str] = None, list_status: str = "L") -> Iterable[Dict]:
client = _ensure_client()
LOGGER.info("拉取股票基础信息(交易所:%s,状态:%s", exchange or "全部", list_status)
fields = "ts_code,symbol,name,area,industry,market,exchange,list_status,list_date,delist_date"
df = client.stock_basic(exchange=exchange, list_status=list_status, fields=fields)
return _df_to_records(df, _TABLE_COLUMNS["stock_basic"])
def fetch_daily_bars(job: FetchJob) -> Iterable[Dict]:
client = _ensure_client() client = _ensure_client()
start_date = _format_date(job.start) start_date = _format_date(job.start)
end_date = _format_date(job.end) end_date = _format_date(job.end)
@ -167,6 +315,24 @@ def fetch_daily_bars(job: FetchJob) -> Iterable[Dict]:
return _df_to_records(df, _TABLE_COLUMNS["daily"]) return _df_to_records(df, _TABLE_COLUMNS["daily"])
def fetch_daily_basic(start: date, end: date, ts_code: Optional[str] = None) -> Iterable[Dict]:
client = _ensure_client()
start_date = _format_date(start)
end_date = _format_date(end)
LOGGER.info("拉取日线基础指标(%s-%s,股票:%s", start_date, end_date, ts_code or "全部")
df = client.daily_basic(ts_code=ts_code, start_date=start_date, end_date=end_date)
return _df_to_records(df, _TABLE_COLUMNS["daily_basic"])
def fetch_adj_factor(start: date, end: date, ts_code: Optional[str] = None) -> Iterable[Dict]:
client = _ensure_client()
start_date = _format_date(start)
end_date = _format_date(end)
LOGGER.info("拉取复权因子(%s-%s,股票:%s", start_date, end_date, ts_code or "全部")
df = client.adj_factor(ts_code=ts_code, start_date=start_date, end_date=end_date)
return _df_to_records(df, _TABLE_COLUMNS["adj_factor"])
def fetch_suspensions(start: date, end: date, ts_code: Optional[str] = None) -> Iterable[Dict]: def fetch_suspensions(start: date, end: date, ts_code: Optional[str] = None) -> Iterable[Dict]:
client = _ensure_client() client = _ensure_client()
start_date = _format_date(start) start_date = _format_date(start)
@ -195,8 +361,6 @@ def fetch_stk_limit(start: date, end: date, ts_code: Optional[str] = None) -> It
def save_records(table: str, rows: Iterable[Dict]) -> None: def save_records(table: str, rows: Iterable[Dict]) -> None:
"""将拉取的数据写入 SQLite。"""
items = list(rows) items = list(rows)
if not items: if not items:
LOGGER.info("%s 没有新增记录,跳过写入", table) LOGGER.info("%s 没有新增记录,跳过写入", table)
@ -219,22 +383,122 @@ def save_records(table: str, rows: Iterable[Dict]) -> None:
) )
def ensure_stock_basic(list_status: str = "L") -> None:
exchanges = ("SSE", "SZSE")
with db_session(read_only=True) as conn:
row = conn.execute(
"SELECT COUNT(*) AS cnt FROM stock_basic WHERE exchange IN (?, ?) AND list_status = ?",
(*exchanges, list_status),
).fetchone()
if row and row["cnt"]:
LOGGER.info("股票基础信息已存在 %d 条记录,跳过拉取", row["cnt"])
return
for exch in exchanges:
save_records("stock_basic", fetch_stock_basic(exchange=exch, list_status=list_status))
def ensure_trade_calendar(start: date, end: date, exchanges: Sequence[str] = ("SSE", "SZSE")) -> None:
start_str = _format_date(start)
end_str = _format_date(end)
for exch in exchanges:
if _calendar_needs_refresh(exch, start_str, end_str):
save_records("trade_calendar", fetch_trade_calendar(start, end, exchange=exch))
def ensure_data_coverage(
start: date,
end: date,
ts_codes: Optional[Sequence[str]] = None,
include_limits: bool = True,
force: bool = False,
) -> None:
initialize_database()
start_str = _format_date(start)
end_str = _format_date(end)
ensure_stock_basic()
ensure_trade_calendar(start, end)
codes = tuple(dict.fromkeys(ts_codes)) if ts_codes else tuple()
expected_days = _expected_trading_days(start_str, end_str)
job = FetchJob("daily_autofill", start=start, end=end, ts_codes=codes)
if force or _range_needs_refresh("daily", "trade_date", start_str, end_str, expected_days):
save_records("daily", fetch_daily_bars(job))
def _save_with_codes(table: str, fetch_fn) -> None:
if codes:
for code in codes:
save_records(table, fetch_fn(start, end, ts_code=code))
else:
save_records(table, fetch_fn(start, end))
if force or _range_needs_refresh("daily_basic", "trade_date", start_str, end_str, expected_days):
_save_with_codes("daily_basic", fetch_daily_basic)
if force or _range_needs_refresh("adj_factor", "trade_date", start_str, end_str, expected_days):
_save_with_codes("adj_factor", fetch_adj_factor)
if include_limits and (force or _range_needs_refresh("stk_limit", "trade_date", start_str, end_str, expected_days)):
_save_with_codes("stk_limit", fetch_stk_limit)
if force or _range_needs_refresh("suspend", "suspend_date", start_str, end_str):
_save_with_codes("suspend", fetch_suspensions)
def collect_data_coverage(start: date, end: date) -> Dict[str, Dict[str, object]]:
start_str = _format_date(start)
end_str = _format_date(end)
expected_days = _expected_trading_days(start_str, end_str)
coverage: Dict[str, Dict[str, object]] = {
"period": {
"start": start_str,
"end": end_str,
"expected_trading_days": expected_days,
}
}
def add_table(name: str, date_col: str, require_days: bool = True) -> None:
stats = _range_stats(name, date_col, start_str, end_str)
coverage[name] = {
"min": stats["min"],
"max": stats["max"],
"distinct_days": stats["distinct"],
"meets_expectation": (
stats["min"] is not None
and stats["max"] is not None
and stats["min"] <= start_str
and stats["max"] >= end_str
and ((not require_days) or (stats["distinct"] or 0) >= expected_days)
),
}
add_table("daily", "trade_date")
add_table("daily_basic", "trade_date")
add_table("adj_factor", "trade_date")
add_table("stk_limit", "trade_date")
add_table("suspend", "suspend_date", require_days=False)
with db_session(read_only=True) as conn:
stock_tot = conn.execute("SELECT COUNT(*) AS cnt FROM stock_basic").fetchone()
stock_sse = conn.execute(
"SELECT COUNT(*) AS cnt FROM stock_basic WHERE exchange = 'SSE' AND list_status = 'L'"
).fetchone()
stock_szse = conn.execute(
"SELECT COUNT(*) AS cnt FROM stock_basic WHERE exchange = 'SZSE' AND list_status = 'L'"
).fetchone()
coverage["stock_basic"] = {
"total": stock_tot["cnt"] if stock_tot else 0,
"sse_listed": stock_sse["cnt"] if stock_sse else 0,
"szse_listed": stock_szse["cnt"] if stock_szse else 0,
}
return coverage
def run_ingestion(job: FetchJob, include_limits: bool = True) -> None: def run_ingestion(job: FetchJob, include_limits: bool = True) -> None:
"""按任务配置拉取 TuShare 数据。"""
LOGGER.info("启动 TuShare 拉取任务:%s", job.name) LOGGER.info("启动 TuShare 拉取任务:%s", job.name)
ensure_data_coverage(job.start, job.end, ts_codes=job.ts_codes, include_limits=include_limits, force=True)
daily_rows = fetch_daily_bars(job)
save_records("daily", daily_rows)
suspend_rows = fetch_suspensions(job.start, job.end)
save_records("suspend", suspend_rows)
calendar_rows = fetch_trade_calendar(job.start, job.end)
save_records("trade_calendar", calendar_rows)
if include_limits:
limit_rows = fetch_stk_limit(job.start, job.end)
save_records("stk_limit", limit_rows)
LOGGER.info("任务 %s 完成", job.name) LOGGER.info("任务 %s 完成", job.name)

View File

@ -1,35 +0,0 @@
"""Command line entry points for routine tasks."""
from __future__ import annotations
from datetime import date
from app.backtest.engine import BtConfig, run_backtest
from app.data.schema import initialize_database
def init_db() -> None:
result = initialize_database()
if result.skipped:
print("Database already initialized; skipping schema creation")
else:
print(f"Initialized database with {result.executed} statements")
def run_sample_backtest() -> None:
cfg = BtConfig(
id="demo",
name="Demo Strategy",
start_date=date(2020, 1, 1),
end_date=date(2020, 3, 31),
universe=["000001.SZ"],
params={
"target": 0.035,
"stop": -0.015,
"hold_days": 10,
},
)
run_backtest(cfg)
if __name__ == "__main__":
init_db()

View File

@ -12,6 +12,7 @@ if str(ROOT) not in sys.path:
import streamlit as st import streamlit as st
from app.data.schema import initialize_database from app.data.schema import initialize_database
from app.ingest.checker import run_boot_check
from app.ingest.tushare import FetchJob, run_ingestion from app.ingest.tushare import FetchJob, run_ingestion
from app.llm.explain import make_human_card from app.llm.explain import make_human_card
@ -67,6 +68,17 @@ def render_tests() -> None:
st.info("注意TuShare 拉取依赖网络与 Token若环境未配置将出现错误提示。") st.info("注意TuShare 拉取依赖网络与 Token若环境未配置将出现错误提示。")
st.divider()
days = int(st.number_input("检查窗口(天数)", min_value=30, max_value=1095, value=365, step=30))
if st.button("执行开机检查"):
with st.spinner("正在执行开机检查..."):
try:
report = run_boot_check(days=days)
st.success("开机检查完成,以下为数据覆盖摘要。")
st.json(report.to_dict())
except Exception as exc: # noqa: BLE001
st.error(f"开机检查失败:{exc}")
def main() -> None: def main() -> None:
st.set_page_config(page_title="多智能体投资助理", layout="wide") st.set_page_config(page_title="多智能体投资助理", layout="wide")