Files
NewStock/main/train/Classify2-HyperGBM.ipynb
2025-05-26 21:34:36 +08:00

2551 lines
250 KiB
Plaintext
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "79a7758178bafdd3",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T12:46:06.987506Z",
"start_time": "2025-04-03T12:46:06.259551Z"
},
"jupyter": {
"source_hidden": true
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"e:\\PyProject\\NewStock\\main\\train\n"
]
}
],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
"\n",
"import gc\n",
"import os\n",
"import sys\n",
"sys.path.append('../../')\n",
"print(os.getcwd())\n",
"import pandas as pd\n",
"from main.factor.factor import get_rolling_factor, get_simple_factor\n",
"from main.utils.factor import read_industry_data\n",
"from main.utils.factor_processor import calculate_score\n",
"from main.utils.utils import read_and_merge_h5_data, merge_with_industry_data\n",
"\n",
"import warnings\n",
"\n",
"warnings.filterwarnings(\"ignore\")"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "a79cafb06a7e0e43",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T12:47:00.212859Z",
"start_time": "2025-04-03T12:46:06.998047Z"
},
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"daily data\n",
"daily basic\n",
"inner merge on ['ts_code', 'trade_date']\n",
"stk limit\n",
"left merge on ['ts_code', 'trade_date']\n",
"money flow\n",
"left merge on ['ts_code', 'trade_date']\n",
"cyq perf\n",
"left merge on ['ts_code', 'trade_date']\n",
"<class 'pandas.core.frame.DataFrame'>\n",
"RangeIndex: 8638639 entries, 0 to 8638638\n",
"Data columns (total 32 columns):\n",
" # Column Dtype \n",
"--- ------ ----- \n",
" 0 ts_code object \n",
" 1 trade_date datetime64[ns]\n",
" 2 open float64 \n",
" 3 close float64 \n",
" 4 high float64 \n",
" 5 low float64 \n",
" 6 vol float64 \n",
" 7 pct_chg float64 \n",
" 8 turnover_rate float64 \n",
" 9 pe_ttm float64 \n",
" 10 circ_mv float64 \n",
" 11 total_mv float64 \n",
" 12 volume_ratio float64 \n",
" 13 is_st bool \n",
" 14 up_limit float64 \n",
" 15 down_limit float64 \n",
" 16 buy_sm_vol float64 \n",
" 17 sell_sm_vol float64 \n",
" 18 buy_lg_vol float64 \n",
" 19 sell_lg_vol float64 \n",
" 20 buy_elg_vol float64 \n",
" 21 sell_elg_vol float64 \n",
" 22 net_mf_vol float64 \n",
" 23 his_low float64 \n",
" 24 his_high float64 \n",
" 25 cost_5pct float64 \n",
" 26 cost_15pct float64 \n",
" 27 cost_50pct float64 \n",
" 28 cost_85pct float64 \n",
" 29 cost_95pct float64 \n",
" 30 weight_avg float64 \n",
" 31 winner_rate float64 \n",
"dtypes: bool(1), datetime64[ns](1), float64(29), object(1)\n",
"memory usage: 2.0+ GB\n",
"None\n"
]
}
],
"source": [
"from main.utils.utils import read_and_merge_h5_data\n",
"\n",
"print('daily data')\n",
"df = read_and_merge_h5_data('../../data/daily_data.h5', key='daily_data',\n",
" columns=['ts_code', 'trade_date', 'open', 'close', 'high', 'low', 'vol', 'pct_chg'],\n",
" df=None)\n",
"\n",
"print('daily basic')\n",
"df = read_and_merge_h5_data('../../data/daily_basic.h5', key='daily_basic',\n",
" columns=['ts_code', 'trade_date', 'turnover_rate', 'pe_ttm', 'circ_mv', 'total_mv', 'volume_ratio',\n",
" 'is_st'], df=df, join='inner')\n",
"\n",
"print('stk limit')\n",
"df = read_and_merge_h5_data('../../data/stk_limit.h5', key='stk_limit',\n",
" columns=['ts_code', 'trade_date', 'pre_close', 'up_limit', 'down_limit'],\n",
" df=df)\n",
"print('money flow')\n",
"df = read_and_merge_h5_data('../../data/money_flow.h5', key='money_flow',\n",
" columns=['ts_code', 'trade_date', 'buy_sm_vol', 'sell_sm_vol', 'buy_lg_vol', 'sell_lg_vol',\n",
" 'buy_elg_vol', 'sell_elg_vol', 'net_mf_vol'],\n",
" df=df)\n",
"print('cyq perf')\n",
"df = read_and_merge_h5_data('../../data/cyq_perf.h5', key='cyq_perf',\n",
" columns=['ts_code', 'trade_date', 'his_low', 'his_high', 'cost_5pct', 'cost_15pct',\n",
" 'cost_50pct',\n",
" 'cost_85pct', 'cost_95pct', 'weight_avg', 'winner_rate'],\n",
" df=df)\n",
"print(df.info())"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "cac01788dac10678",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T12:47:10.527104Z",
"start_time": "2025-04-03T12:47:00.488715Z"
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"industry\n"
]
}
],
"source": [
"print('industry')\n",
"industry_df = read_and_merge_h5_data('../../data/industry_data.h5', key='industry_data',\n",
" columns=['ts_code', 'l2_code', 'in_date'],\n",
" df=None, on=['ts_code'], join='left')\n",
"\n",
"\n",
"def merge_with_industry_data(df, industry_df):\n",
" # 确保日期字段是 datetime 类型\n",
" df['trade_date'] = pd.to_datetime(df['trade_date'])\n",
" industry_df['in_date'] = pd.to_datetime(industry_df['in_date'])\n",
"\n",
" # 对 industry_df 按 ts_code 和 in_date 排序\n",
" industry_df_sorted = industry_df.sort_values(['in_date', 'ts_code'])\n",
"\n",
" # 对原始 df 按 ts_code 和 trade_date 排序\n",
" df_sorted = df.sort_values(['trade_date', 'ts_code'])\n",
"\n",
" # 使用 merge_asof 进行向后合并\n",
" merged = pd.merge_asof(\n",
" df_sorted,\n",
" industry_df_sorted,\n",
" by='ts_code', # 按 ts_code 分组\n",
" left_on='trade_date',\n",
" right_on='in_date',\n",
" direction='backward'\n",
" )\n",
"\n",
" # 获取每个 ts_code 的最早 in_date 记录\n",
" min_in_date_per_ts = (industry_df_sorted\n",
" .groupby('ts_code')\n",
" .first()\n",
" .reset_index()[['ts_code', 'l2_code']])\n",
"\n",
" # 填充未匹配到的记录trade_date 早于所有 in_date 的情况)\n",
" merged['l2_code'] = merged['l2_code'].fillna(\n",
" merged['ts_code'].map(min_in_date_per_ts.set_index('ts_code')['l2_code'])\n",
" )\n",
"\n",
" # 保留需要的列并重置索引\n",
" result = merged.reset_index(drop=True)\n",
" return result\n",
"\n",
"\n",
"# 使用示例\n",
"df = merge_with_industry_data(df, industry_df)\n",
"# print(mdf[mdf['ts_code'] == '600751.SH'][['ts_code', 'trade_date', 'l2_code']])"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "c4e9e1d31da6dba6",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T12:47:10.719252Z",
"start_time": "2025-04-03T12:47:10.541247Z"
},
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"from main.factor.factor import *\n",
"\n",
"def calculate_indicators(df):\n",
" \"\"\"\n",
" 计算四个指标当日涨跌幅、5日移动平均、RSI、MACD。\n",
" \"\"\"\n",
" df = df.sort_values('trade_date')\n",
" df['daily_return'] = (df['close'] - df['pre_close']) / df['pre_close'] * 100\n",
" # df['5_day_ma'] = df['close'].rolling(window=5).mean()\n",
" delta = df['close'].diff()\n",
" gain = delta.where(delta > 0, 0)\n",
" loss = -delta.where(delta < 0, 0)\n",
" avg_gain = gain.rolling(window=14).mean()\n",
" avg_loss = loss.rolling(window=14).mean()\n",
" rs = avg_gain / avg_loss\n",
" df['RSI'] = 100 - (100 / (1 + rs))\n",
"\n",
" # 计算MACD\n",
" ema12 = df['close'].ewm(span=12, adjust=False).mean()\n",
" ema26 = df['close'].ewm(span=26, adjust=False).mean()\n",
" df['MACD'] = ema12 - ema26\n",
" df['Signal_line'] = df['MACD'].ewm(span=9, adjust=False).mean()\n",
" df['MACD_hist'] = df['MACD'] - df['Signal_line']\n",
"\n",
" # 4. 情绪因子1市场上涨比例Up Ratio\n",
" df['up_ratio'] = df['daily_return'].apply(lambda x: 1 if x > 0 else 0)\n",
" df['up_ratio_20d'] = df['up_ratio'].rolling(window=20).mean() # 过去20天上涨比例\n",
"\n",
" # 5. 情绪因子2成交量变化率Volume Change Rate\n",
" df['volume_mean'] = df['vol'].rolling(window=20).mean() # 过去20天的平均成交量\n",
" df['volume_change_rate'] = (df['vol'] - df['volume_mean']) / df['volume_mean'] * 100 # 成交量变化率\n",
"\n",
" # 6. 情绪因子3波动率Volatility\n",
" df['volatility'] = df['daily_return'].rolling(window=20).std() # 过去20天的日收益率标准差\n",
"\n",
" # 7. 情绪因子4成交额变化率Amount Change Rate\n",
" df['amount_mean'] = df['amount'].rolling(window=20).mean() # 过去20天的平均成交额\n",
" df['amount_change_rate'] = (df['amount'] - df['amount_mean']) / df['amount_mean'] * 100 # 成交额变化率\n",
"\n",
" # df = sentiment_panic_greed_index(df)\n",
" # df = sentiment_market_breadth_proxy(df)\n",
" # df = sentiment_reversal_indicator(df)\n",
"\n",
" return df\n",
"\n",
"\n",
"def generate_index_indicators(h5_filename):\n",
" df = pd.read_hdf(h5_filename, key='index_data')\n",
" df['trade_date'] = pd.to_datetime(df['trade_date'], format='%Y%m%d')\n",
" df = df.sort_values('trade_date')\n",
"\n",
" # 计算每个ts_code的相关指标\n",
" df_indicators = []\n",
" for ts_code in df['ts_code'].unique():\n",
" df_index = df[df['ts_code'] == ts_code].copy()\n",
" df_index = calculate_indicators(df_index)\n",
" df_indicators.append(df_index)\n",
"\n",
" # 合并所有指数的结果\n",
" df_all_indicators = pd.concat(df_indicators, ignore_index=True)\n",
"\n",
" # 保留trade_date列并将同一天的数据按ts_code合并成一行\n",
" df_final = df_all_indicators.pivot_table(\n",
" index='trade_date',\n",
" columns='ts_code',\n",
" values=['daily_return', \n",
" 'RSI', 'MACD', 'Signal_line', 'MACD_hist', \n",
" # 'sentiment_panic_greed_index',\n",
" 'up_ratio_20d', 'volume_change_rate', 'volatility',\n",
" 'amount_change_rate', 'amount_mean'],\n",
" aggfunc='last'\n",
" )\n",
"\n",
" df_final.columns = [f\"{col[1]}_{col[0]}\" for col in df_final.columns]\n",
" df_final = df_final.reset_index()\n",
"\n",
" return df_final\n",
"\n",
"\n",
"# 使用函数\n",
"h5_filename = '../../data/index_data.h5'\n",
"index_data = generate_index_indicators(h5_filename)\n",
"index_data = index_data.dropna()\n"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "a735bc02ceb4d872",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T12:47:10.821169Z",
"start_time": "2025-04-03T12:47:10.751831Z"
}
},
"outputs": [],
"source": [
"import talib\n",
"import numpy as np"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "53f86ddc0677a6d7",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T12:47:15.944254Z",
"start_time": "2025-04-03T12:47:10.826179Z"
},
"jupyter": {
"source_hidden": true
},
"scrolled": true
},
"outputs": [],
"source": [
"from main.utils.factor import get_act_factor\n",
"\n",
"\n",
"def read_industry_data(h5_filename):\n",
" # 读取 H5 文件中所有的行业数据\n",
" industry_data = pd.read_hdf(h5_filename, key='sw_daily', columns=[\n",
" 'ts_code', 'trade_date', 'open', 'close', 'high', 'low', 'pe', 'pb', 'vol'\n",
" ]) # 假设 H5 文件的键是 'industry_data'\n",
" industry_data = industry_data.sort_values(by=['ts_code', 'trade_date'])\n",
" industry_data = industry_data.reindex()\n",
" industry_data['trade_date'] = pd.to_datetime(industry_data['trade_date'], format='%Y%m%d')\n",
"\n",
" grouped = industry_data.groupby('ts_code', group_keys=False)\n",
" industry_data['obv'] = grouped.apply(\n",
" lambda x: pd.Series(talib.OBV(x['close'].values, x['vol'].values), index=x.index)\n",
" )\n",
" industry_data['return_5'] = grouped['close'].apply(lambda x: x / x.shift(5) - 1)\n",
" industry_data['return_20'] = grouped['close'].apply(lambda x: x / x.shift(20) - 1)\n",
"\n",
" industry_data = get_act_factor(industry_data, cat=False)\n",
" industry_data = industry_data.sort_values(by=['trade_date', 'ts_code'])\n",
"\n",
" # # 计算每天每个 ts_code 的因子和当天所有 ts_code 的中位数的偏差\n",
" # factor_columns = ['obv', 'return_5', 'return_20', 'act_factor1', 'act_factor2', 'act_factor3', 'act_factor4'] # 因子列\n",
" # \n",
" # for factor in factor_columns:\n",
" # if factor in industry_data.columns:\n",
" # # 计算每天每个 ts_code 的因子值与当天所有 ts_code 的中位数的偏差\n",
" # industry_data[f'{factor}_deviation'] = industry_data.groupby('trade_date')[factor].transform(\n",
" # lambda x: x - x.mean())\n",
"\n",
" industry_data['return_5_percentile'] = industry_data.groupby('trade_date')['return_5'].transform(\n",
" lambda x: x.rank(pct=True))\n",
" industry_data['return_20_percentile'] = industry_data.groupby('trade_date')['return_20'].transform(\n",
" lambda x: x.rank(pct=True))\n",
"\n",
" # cs_rank_intraday_range(industry_data)\n",
" # cs_rank_close_pos_in_range(industry_data)\n",
"\n",
" industry_data = industry_data.drop(columns=['open', 'close', 'high', 'low', 'pe', 'pb', 'vol'])\n",
"\n",
" industry_data = industry_data.rename(\n",
" columns={col: f'industry_{col}' for col in industry_data.columns if col not in ['ts_code', 'trade_date']})\n",
"\n",
" industry_data = industry_data.rename(columns={'ts_code': 'cat_l2_code'})\n",
" return industry_data\n",
"\n",
"\n",
"industry_df = read_industry_data('../../data/sw_daily.h5')\n"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "dbe2fd8021b9417f",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T12:47:15.969344Z",
"start_time": "2025-04-03T12:47:15.963327Z"
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"['ts_code', 'open', 'close', 'high', 'low', 'circ_mv', 'total_mv', 'is_st', 'up_limit', 'down_limit', 'buy_sm_vol', 'sell_sm_vol', 'buy_lg_vol', 'sell_lg_vol', 'buy_elg_vol', 'sell_elg_vol', 'net_mf_vol', 'his_low', 'his_high', 'cost_5pct', 'cost_15pct', 'cost_50pct', 'cost_85pct', 'cost_95pct', 'weight_avg', 'in_date']\n"
]
}
],
"source": [
"origin_columns = df.columns.tolist()\n",
"origin_columns = [col for col in origin_columns if\n",
" col not in ['turnover_rate', 'pe_ttm', 'volume_ratio', 'vol', 'pct_chg', 'l2_code', 'winner_rate']]\n",
"origin_columns = [col for col in origin_columns if col not in index_data.columns]\n",
"origin_columns = [col for col in origin_columns if 'cyq' not in col]\n",
"print(origin_columns)"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "85c3e3d0235ffffa",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T12:47:16.089879Z",
"start_time": "2025-04-03T12:47:15.990101Z"
}
},
"outputs": [],
"source": [
"fina_indicator_df = read_and_merge_h5_data('../../data/fina_indicator.h5', key='fina_indicator',\n",
" columns=['ts_code', 'ann_date', 'undist_profit_ps', 'ocfps', 'bps'],\n",
" df=None)\n",
"cashflow_df = read_and_merge_h5_data('../../data/cashflow.h5', key='cashflow',\n",
" columns=['ts_code', 'ann_date', 'n_cashflow_act'],\n",
" df=None)\n",
"balancesheet_df = read_and_merge_h5_data('../../data/balancesheet.h5', key='balancesheet',\n",
" columns=['ts_code', 'ann_date', 'money_cap', 'total_liab'],\n",
" df=None)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "92d84ce15a562ec6",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T13:08:01.612695Z",
"start_time": "2025-04-03T12:47:16.121802Z"
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"使用 'ann_date' 作为财务数据生效日期。\n",
"警告: 从 financial_data_subset 中移除了 366 行,因为其 'ts_code' 或 'ann_date' 列存在空值。\n",
"使用 'ann_date' 作为财务数据生效日期。\n",
"警告: 从 financial_data_subset 中移除了 366 行,因为其 'ts_code' 或 'ann_date' 列存在空值。\n",
"开始计算因子: AR, BR (原地修改)...\n",
"因子 AR, BR 计算成功。\n",
"因子 AR, BR 计算流程结束。\n",
"使用 'ann_date' 作为财务数据生效日期。\n",
"使用 'ann_date' 作为财务数据生效日期。\n",
"使用 'ann_date' 作为财务数据生效日期。\n",
"使用 'ann_date' 作为财务数据生效日期。\n",
"警告: 从 financial_data_subset 中移除了 366 行,因为其 'ts_code' 或 'ann_date' 列存在空值。\n",
"计算 BBI...\n",
"--- 计算日级别偏离度 (使用 pct_chg) ---\n",
"--- 计算日级别动量基准 (使用 pct_chg) ---\n",
"日级别动量基准计算完成 (使用 pct_chg)。\n",
"日级别偏离度计算完成 (使用 pct_chg)。\n",
"--- 计算日级别行业偏离度 (使用 pct_chg 和行业基准) ---\n",
"--- 计算日级别行业动量基准 (使用 pct_chg 和 cat_l2_code) ---\n",
"错误: 计算日级别行业动量基准需要以下列: ['pct_chg', 'cat_l2_code', 'trade_date', 'ts_code']。\n",
"错误: 计算日级别行业偏离度需要以下列: ['pct_chg', 'daily_industry_positive_benchmark', 'daily_industry_negative_benchmark']。请先运行 daily_industry_momentum_benchmark(df)。\n",
"Index(['ts_code', 'trade_date', 'open', 'close', 'high', 'low', 'vol',\n",
" 'pct_chg', 'turnover_rate', 'pe_ttm', 'circ_mv', 'total_mv',\n",
" 'volume_ratio', 'is_st', 'up_limit', 'down_limit', 'buy_sm_vol',\n",
" 'sell_sm_vol', 'buy_lg_vol', 'sell_lg_vol', 'buy_elg_vol',\n",
" 'sell_elg_vol', 'net_mf_vol', 'his_low', 'his_high', 'cost_5pct',\n",
" 'cost_15pct', 'cost_50pct', 'cost_85pct', 'cost_95pct', 'weight_avg',\n",
" 'winner_rate', 'l2_code', 'undist_profit_ps', 'ocfps', 'AR', 'BR',\n",
" 'AR_BR', 'log_circ_mv', 'cashflow_to_ev_factor', 'book_to_price_ratio',\n",
" 'turnover_rate_mean_5', 'variance_20', 'bbi_ratio_factor',\n",
" 'daily_deviation', 'lg_elg_net_buy_vol', 'flow_lg_elg_intensity',\n",
" 'sm_net_buy_vol', 'flow_divergence_diff', 'flow_divergence_ratio',\n",
" 'total_buy_vol', 'lg_elg_buy_prop', 'flow_struct_buy_change',\n",
" 'lg_elg_net_buy_vol_change', 'flow_lg_elg_accel',\n",
" 'chip_concentration_range', 'chip_skewness', 'floating_chip_proxy',\n",
" 'cost_support_15pct_change', 'cat_winner_price_zone',\n",
" 'flow_chip_consistency', 'profit_taking_vs_absorb', '_is_positive',\n",
" '_is_negative', 'cat_is_positive', '_pos_returns', '_neg_returns',\n",
" '_pos_returns_sq', '_neg_returns_sq', 'upside_vol', 'downside_vol',\n",
" 'vol_ratio', 'return_skew', 'return_kurtosis', 'volume_change_rate',\n",
" 'cat_volume_breakout', 'turnover_deviation', 'cat_turnover_spike',\n",
" 'avg_volume_ratio', 'cat_volume_ratio_breakout', 'vol_spike',\n",
" 'vol_std_5', 'atr_14', 'atr_6', 'obv'],\n",
" dtype='object')\n",
"Calculating lg_flow_mom_corr_20_60...\n",
"Finished lg_flow_mom_corr_20_60.\n",
"Calculating lg_flow_accel...\n",
"Finished lg_flow_accel.\n",
"Calculating profit_pressure...\n",
"Finished profit_pressure.\n",
"Calculating underwater_resistance...\n",
"Finished underwater_resistance.\n",
"Calculating cost_conc_std_20...\n",
"Finished cost_conc_std_20.\n",
"Calculating profit_decay_20...\n",
"Finished profit_decay_20.\n",
"Calculating vol_amp_loss_20...\n",
"Finished vol_amp_loss_20.\n",
"Calculating vol_drop_profit_cnt_5...\n",
"Finished vol_drop_profit_cnt_5.\n",
"Calculating lg_flow_vol_interact_20...\n",
"Finished lg_flow_vol_interact_20.\n",
"Calculating cost_break_confirm_cnt_5...\n",
"Finished cost_break_confirm_cnt_5.\n",
"Calculating atr_norm_channel_pos_14...\n",
"Finished atr_norm_channel_pos_14.\n",
"Calculating turnover_diff_skew_20...\n",
"Finished turnover_diff_skew_20.\n",
"Calculating lg_sm_flow_diverge_20...\n",
"Finished lg_sm_flow_diverge_20.\n",
"Calculating pullback_strong_20_20...\n",
"Finished pullback_strong_20_20.\n",
"Calculating vol_wgt_hist_pos_20...\n",
"Finished vol_wgt_hist_pos_20.\n",
"Calculating vol_adj_roc_20...\n",
"Finished vol_adj_roc_20.\n",
"Calculating cs_rank_net_lg_flow_val...\n",
"Finished cs_rank_net_lg_flow_val.\n",
"Calculating cs_rank_flow_divergence...\n",
"Finished cs_rank_flow_divergence.\n",
"Calculating cs_rank_ind_adj_lg_flow...\n",
"Finished cs_rank_ind_adj_lg_flow.\n",
"Calculating cs_rank_elg_buy_ratio...\n",
"Finished cs_rank_elg_buy_ratio.\n",
"Calculating cs_rank_rel_profit_margin...\n",
"Finished cs_rank_rel_profit_margin.\n",
"Calculating cs_rank_cost_breadth...\n",
"Finished cs_rank_cost_breadth.\n",
"Calculating cs_rank_dist_to_upper_cost...\n",
"Finished cs_rank_dist_to_upper_cost.\n",
"Calculating cs_rank_winner_rate...\n",
"Finished cs_rank_winner_rate.\n",
"Calculating cs_rank_intraday_range...\n",
"Finished cs_rank_intraday_range.\n",
"Calculating cs_rank_close_pos_in_range...\n",
"Finished cs_rank_close_pos_in_range.\n",
"Calculating cs_rank_opening_gap...\n",
"Error calculating cs_rank_opening_gap: Missing 'pre_close' column. Assigning NaN.\n",
"Calculating cs_rank_pos_in_hist_range...\n",
"Finished cs_rank_pos_in_hist_range.\n",
"Calculating cs_rank_vol_x_profit_margin...\n",
"Finished cs_rank_vol_x_profit_margin.\n",
"Calculating cs_rank_lg_flow_price_concordance...\n",
"Finished cs_rank_lg_flow_price_concordance.\n",
"Calculating cs_rank_turnover_per_winner...\n",
"Finished cs_rank_turnover_per_winner.\n",
"Calculating cs_rank_ind_cap_neutral_pe (Placeholder - requires statsmodels)...\n",
"Finished cs_rank_ind_cap_neutral_pe (Placeholder).\n",
"Calculating cs_rank_volume_ratio...\n",
"Finished cs_rank_volume_ratio.\n",
"Calculating cs_rank_elg_buy_sell_sm_ratio...\n",
"Finished cs_rank_elg_buy_sell_sm_ratio.\n",
"Calculating cs_rank_cost_dist_vol_ratio...\n",
"Finished cs_rank_cost_dist_vol_ratio.\n",
"Calculating cs_rank_size...\n",
"Finished cs_rank_size.\n",
"<class 'pandas.core.frame.DataFrame'>\n",
"RangeIndex: 4524625 entries, 0 to 4524624\n",
"Columns: 178 entries, ts_code to cs_rank_size\n",
"dtypes: bool(10), datetime64[ns](1), float64(162), int32(3), object(2)\n",
"memory usage: 5.7+ GB\n",
"None\n",
"['ts_code', 'trade_date', 'open', 'close', 'high', 'low', 'vol', 'pct_chg', 'turnover_rate', 'pe_ttm', 'circ_mv', 'total_mv', 'volume_ratio', 'is_st', 'up_limit', 'down_limit', 'buy_sm_vol', 'sell_sm_vol', 'buy_lg_vol', 'sell_lg_vol', 'buy_elg_vol', 'sell_elg_vol', 'net_mf_vol', 'his_low', 'his_high', 'cost_5pct', 'cost_15pct', 'cost_50pct', 'cost_85pct', 'cost_95pct', 'weight_avg', 'winner_rate', 'cat_l2_code', 'undist_profit_ps', 'ocfps', 'AR', 'BR', 'AR_BR', 'log_circ_mv', 'cashflow_to_ev_factor', 'book_to_price_ratio', 'turnover_rate_mean_5', 'variance_20', 'bbi_ratio_factor', 'daily_deviation', 'lg_elg_net_buy_vol', 'flow_lg_elg_intensity', 'sm_net_buy_vol', 'flow_divergence_diff', 'flow_divergence_ratio', 'total_buy_vol', 'lg_elg_buy_prop', 'flow_struct_buy_change', 'lg_elg_net_buy_vol_change', 'flow_lg_elg_accel', 'chip_concentration_range', 'chip_skewness', 'floating_chip_proxy', 'cost_support_15pct_change', 'cat_winner_price_zone', 'flow_chip_consistency', 'profit_taking_vs_absorb', 'cat_is_positive', 'upside_vol', 'downside_vol', 'vol_ratio', 'return_skew', 'return_kurtosis', 'volume_change_rate', 'cat_volume_breakout', 'turnover_deviation', 'cat_turnover_spike', 'avg_volume_ratio', 'cat_volume_ratio_breakout', 'vol_spike', 'vol_std_5', 'atr_14', 'atr_6', 'obv', 'maobv_6', 'rsi_3', 'return_5', 'return_20', 'std_return_5', 'std_return_90', 'std_return_90_2', 'act_factor1', 'act_factor2', 'act_factor3', 'act_factor4', 'rank_act_factor1', 'rank_act_factor2', 'rank_act_factor3', 'cov', 'delta_cov', 'alpha_22_improved', 'alpha_003', 'alpha_007', 'alpha_013', 'vol_break', 'weight_roc5', 'price_cost_divergence', 'smallcap_concentration', 'cost_stability', 'high_cost_break_days', 'liquidity_risk', 'turnover_std', 'mv_volatility', 'volume_growth', 'mv_growth', 'momentum_factor', 'resonance_factor', 'log_close', 'cat_vol_spike', 'up', 'down', 'obv_maobv_6', 'std_return_5_over_std_return_90', 'std_return_90_minus_std_return_90_2', 'cat_af2', 'cat_af3', 'cat_af4', 'act_factor5', 'act_factor6', 'active_buy_volume_large', 'active_buy_volume_big', 'active_buy_volume_small', 'buy_lg_vol_minus_sell_lg_vol', 'buy_elg_vol_minus_sell_elg_vol', 'ctrl_strength', 'low_cost_dev', 'asymmetry', 'lock_factor', 'cat_vol_break', 'cost_atr_adj', 'cat_golden_resonance', 'mv_turnover_ratio', 'mv_adjusted_volume', 'mv_weighted_turnover', 'nonlinear_mv_volume', 'mv_volume_ratio', 'mv_momentum', 'lg_flow_mom_corr_20_60', 'lg_flow_accel', 'profit_pressure', 'underwater_resistance', 'cost_conc_std_20', 'profit_decay_20', 'vol_amp_loss_20', 'vol_drop_profit_cnt_5', 'lg_flow_vol_interact_20', 'cost_break_confirm_cnt_5', 'atr_norm_channel_pos_14', 'turnover_diff_skew_20', 'lg_sm_flow_diverge_20', 'pullback_strong_20_20', 'vol_wgt_hist_pos_20', 'vol_adj_roc_20', 'cs_rank_net_lg_flow_val', 'cs_rank_flow_divergence', 'cs_rank_ind_adj_lg_flow', 'cs_rank_elg_buy_ratio', 'cs_rank_rel_profit_margin', 'cs_rank_cost_breadth', 'cs_rank_dist_to_upper_cost', 'cs_rank_winner_rate', 'cs_rank_intraday_range', 'cs_rank_close_pos_in_range', 'cs_rank_opening_gap', 'cs_rank_pos_in_hist_range', 'cs_rank_vol_x_profit_margin', 'cs_rank_lg_flow_price_concordance', 'cs_rank_turnover_per_winner', 'cs_rank_ind_cap_neutral_pe', 'cs_rank_volume_ratio', 'cs_rank_elg_buy_sell_sm_ratio', 'cs_rank_cost_dist_vol_ratio', 'cs_rank_size']\n"
]
}
],
"source": [
"\n",
"import numpy as np\n",
"from main.factor.factor import *\n",
"\n",
"def filter_data(df):\n",
" # df = df.groupby('trade_date').apply(lambda x: x.nlargest(1000, 'act_factor1'))\n",
" df = df[~df['is_st']]\n",
" df = df[~df['ts_code'].str.endswith('BJ')]\n",
" df = df[~df['ts_code'].str.startswith('30')]\n",
" df = df[~df['ts_code'].str.startswith('68')]\n",
" df = df[~df['ts_code'].str.startswith('8')]\n",
" df = df[df['trade_date'] >= '2019-01-01']\n",
" if 'in_date' in df.columns:\n",
" df = df.drop(columns=['in_date'])\n",
" df = df.reset_index(drop=True)\n",
" return df\n",
"\n",
"gc.collect()\n",
"\n",
"df = filter_data(df)\n",
"df = df.sort_values(by=['ts_code', 'trade_date'])\n",
"df = add_financial_factor(df, fina_indicator_df, factor_value_col='undist_profit_ps')\n",
"df = add_financial_factor(df, fina_indicator_df, factor_value_col='ocfps')\n",
"calculate_arbr(df, N=26)\n",
"df['log_circ_mv'] = np.log(df['circ_mv'])\n",
"df = calculate_cashflow_to_ev_factor(df, cashflow_df, balancesheet_df)\n",
"df = caculate_book_to_price_ratio(df, fina_indicator_df)\n",
"df = turnover_rate_n(df, n=5)\n",
"df = variance_n(df, n=20)\n",
"df = bbi_ratio_factor(df)\n",
"df = daily_deviation(df)\n",
"df = daily_industry_deviation(df)\n",
"df, _ = get_rolling_factor(df)\n",
"df, _ = get_simple_factor(df)\n",
"\n",
"df = df.rename(columns={'l1_code': 'cat_l1_code'})\n",
"df = df.rename(columns={'l2_code': 'cat_l2_code'})\n",
"\n",
"lg_flow_mom_corr(df, N=20, M=60)\n",
"lg_flow_accel(df)\n",
"profit_pressure(df)\n",
"underwater_resistance(df)\n",
"cost_conc_std(df, N=20)\n",
"profit_decay(df, N=20)\n",
"vol_amp_loss(df, N=20)\n",
"vol_drop_profit_cnt(df, N=20, M=5)\n",
"lg_flow_vol_interact(df, N=20)\n",
"cost_break_confirm_cnt(df, M=5)\n",
"atr_norm_channel_pos(df, N=14)\n",
"turnover_diff_skew(df, N=20)\n",
"lg_sm_flow_diverge(df, N=20)\n",
"pullback_strong(df, N=20, M=20)\n",
"vol_wgt_hist_pos(df, N=20)\n",
"vol_adj_roc(df, N=20)\n",
"\n",
"cs_rank_net_lg_flow_val(df)\n",
"cs_rank_flow_divergence(df)\n",
"cs_rank_industry_adj_lg_flow(df) # Needs cat_l2_code\n",
"cs_rank_elg_buy_ratio(df)\n",
"cs_rank_rel_profit_margin(df)\n",
"cs_rank_cost_breadth(df)\n",
"cs_rank_dist_to_upper_cost(df)\n",
"cs_rank_winner_rate(df)\n",
"cs_rank_intraday_range(df)\n",
"cs_rank_close_pos_in_range(df)\n",
"cs_rank_opening_gap(df) # Needs pre_close\n",
"cs_rank_pos_in_hist_range(df) # Needs his_low, his_high\n",
"cs_rank_vol_x_profit_margin(df)\n",
"cs_rank_lg_flow_price_concordance(df)\n",
"cs_rank_turnover_per_winner(df)\n",
"cs_rank_ind_cap_neutral_pe(df) # Placeholder - needs external libraries\n",
"cs_rank_volume_ratio(df) # Needs volume_ratio\n",
"cs_rank_elg_buy_sell_sm_ratio(df)\n",
"cs_rank_cost_dist_vol_ratio(df) # Needs volume_ratio\n",
"cs_rank_size(df) # Needs circ_mv\n",
"\n",
"\n",
"# df = df.merge(index_data, on='trade_date', how='left')\n",
"\n",
"print(df.info())\n",
"print(df.columns.tolist())"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "b87b938028afa206",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T13:08:03.658725Z",
"start_time": "2025-04-03T13:08:02.469611Z"
}
},
"outputs": [],
"source": [
"from scipy.stats import ks_2samp, wasserstein_distance\n",
"\n",
"\n",
"def remove_shifted_features(train_data, test_data, feature_columns, ks_threshold=0.05, wasserstein_threshold=0.1,\n",
" importance_threshold=0.05):\n",
" dropped_features = []\n",
"\n",
" # **统计数据漂移**\n",
" numeric_columns = train_data.select_dtypes(include=['float64', 'int64']).columns\n",
" numeric_columns = [col for col in numeric_columns if col in feature_columns]\n",
" for feature in numeric_columns:\n",
" ks_stat, p_value = ks_2samp(train_data[feature], test_data[feature])\n",
" wasserstein_dist = wasserstein_distance(train_data[feature], test_data[feature])\n",
"\n",
" if p_value < ks_threshold or wasserstein_dist > wasserstein_threshold:\n",
" dropped_features.append(feature)\n",
"\n",
" print(f\"检测到 {len(dropped_features)} 个可能漂移的特征: {dropped_features}\")\n",
"\n",
" # **应用阈值进行最终筛选**\n",
" filtered_features = [f for f in feature_columns if f not in dropped_features]\n",
"\n",
" return filtered_features, dropped_features\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "f4f16d63ad18d1bc",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T13:08:03.670700Z",
"start_time": "2025-04-03T13:08:03.665739Z"
}
},
"outputs": [],
"source": [
"import pandas as pd\n",
"import numpy as np\n",
"import statsmodels.api as sm # 用于中性化回归\n",
"from tqdm import tqdm # 可选,用于显示进度条\n",
"\n",
"# --- 常量 ---\n",
"epsilon = 1e-10 # 防止除零\n",
"\n",
"# --- 1. 中位数去极值 (MAD) ---\n",
"\n",
"def cs_mad_filter(df: pd.DataFrame,\n",
" features: list,\n",
" k: float = 3.0,\n",
" scale_factor: float = 1.4826):\n",
" \"\"\"\n",
" 对指定特征列进行截面 MAD 去极值处理 (原地修改)。\n",
"\n",
" 方法: 对每日截面数据,计算 median 和 MAD\n",
" 将超出 [median - k * scale * MAD, median + k * scale * MAD] 范围的值\n",
" 替换为边界值 (Winsorization)。\n",
" scale_factor=1.4826 使得 MAD 约等于正态分布的标准差。\n",
"\n",
" Args:\n",
" df (pd.DataFrame): 输入 DataFrame需包含 'trade_date' 和 features 列。\n",
" features (list): 需要处理的特征列名列表。\n",
" k (float): MAD 的倍数,用于确定边界。默认为 3.0。\n",
" scale_factor (float): MAD 的缩放因子。默认为 1.4826。\n",
"\n",
" WARNING: 此函数会原地修改输入的 DataFrame 'df'。\n",
" \"\"\"\n",
" print(f\"开始截面 MAD 去极值处理 (k={k})...\")\n",
" if not all(col in df.columns for col in features):\n",
" missing = [col for col in features if col not in df.columns]\n",
" print(f\"错误: DataFrame 中缺少以下特征列: {missing}。跳过去极值处理。\")\n",
" return\n",
"\n",
" grouped = df.groupby('trade_date')\n",
"\n",
" for col in tqdm(features, desc=\"MAD Filtering\"):\n",
" try:\n",
" # 计算截面中位数\n",
" median = grouped[col].transform('median')\n",
" # 计算截面 MAD (Median Absolute Deviation from Median)\n",
" mad = (df[col] - median).abs().groupby(df['trade_date']).transform('median')\n",
"\n",
" # 计算上下边界\n",
" lower_bound = median - k * scale_factor * mad\n",
" upper_bound = median + k * scale_factor * mad\n",
"\n",
" # 原地应用 clip\n",
" df[col] = np.clip(df[col], lower_bound, upper_bound)\n",
"\n",
" except KeyError:\n",
" print(f\"警告: 列 '{col}' 可能不存在或在分组中出错,跳过此列的 MAD 处理。\")\n",
" except Exception as e:\n",
" print(f\"警告: 处理列 '{col}' 时发生错误: {e},跳过此列的 MAD 处理。\")\n",
"\n",
" print(\"截面 MAD 去极值处理完成。\")\n",
"\n",
"\n",
"# --- 2. 行业市值中性化 ---\n",
"\n",
"def cs_neutralize_industry_cap(df: pd.DataFrame,\n",
" features: list,\n",
" industry_col: str = 'cat_l2_code',\n",
" market_cap_col: str = 'circ_mv'):\n",
" \"\"\"\n",
" 对指定特征列进行截面行业和对数市值中性化 (原地修改)。\n",
" 使用 OLS 回归: feature ~ 1 + log(market_cap) + C(industry)\n",
" 将回归残差写回原特征列。\n",
"\n",
" Args:\n",
" df (pd.DataFrame): 输入 DataFrame需包含 'trade_date', features 列,\n",
" industry_col, market_cap_col。\n",
" features (list): 需要处理的特征列名列表。\n",
" industry_col (str): 行业分类列名。\n",
" market_cap_col (str): 流通市值列名。\n",
"\n",
" WARNING: 此函数会原地修改输入的 DataFrame 'df' 的 features 列。\n",
" 计算量较大,可能耗时较长。\n",
" 需要安装 statsmodels 库 (pip install statsmodels)。\n",
" \"\"\"\n",
" print(\"开始截面行业市值中性化...\")\n",
" required_cols = features + ['trade_date', industry_col, market_cap_col]\n",
" if not all(col in df.columns for col in required_cols):\n",
" missing = [col for col in required_cols if col not in df.columns]\n",
" print(f\"错误: DataFrame 中缺少必需列: {missing}。无法进行中性化。\")\n",
" return\n",
"\n",
" # 预处理:计算 log 市值,处理 industry code 可能的 NaN\n",
" log_cap_col = '_log_market_cap'\n",
" df[log_cap_col] = np.log1p(df[market_cap_col]) # log1p 处理 0 值\n",
" # df[industry_col] = df[industry_col].cat.add_categories('UnknownIndustry')\n",
" # df[industry_col] = df[industry_col].fillna('UnknownIndustry') # 填充行业 NaN\n",
" # df[industry_col] = df[industry_col].astype('category') # 转为类别ols 会自动处理\n",
"\n",
" dates = df['trade_date'].unique()\n",
" all_residuals = [] # 用于收集所有日期的残差\n",
"\n",
" for date in tqdm(dates, desc=\"Neutralizing\"):\n",
" daily_data = df.loc[df['trade_date'] == date, features + [log_cap_col, industry_col]].copy() # 使用 .loc 获取副本\n",
"\n",
" # 准备自变量 X (常数项 + log市值 + 行业哑变量)\n",
" X = daily_data[[log_cap_col]]\n",
" X = sm.add_constant(X, prepend=True) # 添加常数项\n",
" # 创建行业哑变量 (drop_first=True 避免共线性)\n",
" industry_dummies = pd.get_dummies(daily_data[industry_col], prefix=industry_col, drop_first=True)\n",
" industry_dummies = industry_dummies.astype(int)\n",
" X = pd.concat([X, industry_dummies], axis=1)\n",
"\n",
" daily_residuals = daily_data[[col for col in features]].copy() # 创建用于存储残差的df\n",
"\n",
" for col in features:\n",
" Y = daily_data[col]\n",
"\n",
" # 处理 NaN 值,确保 X 和 Y 在相同位置有有效值\n",
" valid_mask = Y.notna() & X.notna().all(axis=1)\n",
" if valid_mask.sum() < (X.shape[1] + 1): # 数据点不足以估计模型\n",
" print(f\"警告: 日期 {date}, 特征 {col} 有效数据不足 ({valid_mask.sum()}个),无法中性化,填充 NaN。\")\n",
" daily_residuals[col] = np.nan\n",
" continue\n",
"\n",
" Y_valid = Y[valid_mask]\n",
" X_valid = X[valid_mask]\n",
"\n",
" # 执行 OLS 回归\n",
" try:\n",
" model = sm.OLS(Y_valid.to_numpy(), X_valid.to_numpy())\n",
" results = model.fit()\n",
" # 将残差填回对应位置\n",
" daily_residuals.loc[valid_mask, col] = results.resid\n",
" daily_residuals.loc[~valid_mask, col] = np.nan # 原本无效的位置填充 NaN\n",
" except Exception as e:\n",
" print(f\"警告: 日期 {date}, 特征 {col} 回归失败: {e},填充 NaN。\")\n",
" daily_residuals[col] = np.nan\n",
" break\n",
"\n",
" all_residuals.append(daily_residuals)\n",
"\n",
" # 合并所有日期的残差结果\n",
" if all_residuals:\n",
" residuals_df = pd.concat(all_residuals)\n",
" # 将残差结果更新回原始 df (原地修改)\n",
" # 使用 update 比 merge 更适合基于索引的原地更新\n",
" # 确保 residuals_df 的索引与 df 中对应部分一致\n",
" df.update(residuals_df)\n",
" else:\n",
" print(\"没有有效的残差结果可以合并。\")\n",
"\n",
"\n",
" # 清理临时列\n",
" df.drop(columns=[log_cap_col], inplace=True)\n",
" print(\"截面行业市值中性化完成。\")\n",
"\n",
"\n",
"# --- 3. Z-Score 标准化 ---\n",
"\n",
"def cs_zscore_standardize(df: pd.DataFrame, features: list, epsilon: float = 1e-10):\n",
" \"\"\"\n",
" 对指定特征列进行截面 Z-Score 标准化 (原地修改)。\n",
" 方法: Z = (value - cross_sectional_mean) / (cross_sectional_std + epsilon)\n",
"\n",
" Args:\n",
" df (pd.DataFrame): 输入 DataFrame需包含 'trade_date' 和 features 列。\n",
" features (list): 需要处理的特征列名列表。\n",
" epsilon (float): 防止除以零的小常数。\n",
"\n",
" WARNING: 此函数会原地修改输入的 DataFrame 'df'。\n",
" \"\"\"\n",
" print(\"开始截面 Z-Score 标准化...\")\n",
" if not all(col in df.columns for col in features):\n",
" missing = [col for col in features if col not in df.columns]\n",
" print(f\"错误: DataFrame 中缺少以下特征列: {missing}。跳过标准化处理。\")\n",
" return\n",
"\n",
" grouped = df.groupby('trade_date')\n",
"\n",
" for col in tqdm(features, desc=\"Standardizing\"):\n",
" try:\n",
" # 使用 transform 计算截面均值和标准差\n",
" mean = grouped[col].transform('mean')\n",
" std = grouped[col].transform('std')\n",
"\n",
" # 计算 Z-Score 并原地赋值\n",
" df[col] = (df[col] - mean) / (std + epsilon)\n",
"\n",
" except KeyError:\n",
" print(f\"警告: 列 '{col}' 可能不存在或在分组中出错,跳过此列的标准化处理。\")\n",
" except Exception as e:\n",
" print(f\"警告: 处理列 '{col}' 时发生错误: {e},跳过此列的标准化处理。\")\n",
"\n",
" print(\"截面 Z-Score 标准化完成。\")\n",
"\n",
"def fill_nan_with_daily_median(df: pd.DataFrame, feature_columns: list[str]) -> pd.DataFrame:\n",
" \"\"\"\n",
" 对指定特征列进行每日截面中位数填充缺失值 (NaN)。\n",
"\n",
" 参数:\n",
" df (pd.DataFrame): 包含多日数据的DataFrame需要包含 'trade_date' 和 feature_columns 中的列。\n",
" feature_columns (list[str]): 需要进行缺失值填充的特征列名称列表。\n",
"\n",
" 返回:\n",
" pd.DataFrame: 包含缺失值填充后特征列的DataFrame。在输入DataFrame的副本上操作。\n",
" \"\"\"\n",
" processed_df = df.copy() # 在副本上操作,保留原始数据\n",
"\n",
" # 确保 trade_date 是 datetime 类型以便正确分组\n",
" processed_df['trade_date'] = pd.to_datetime(processed_df['trade_date'])\n",
"\n",
" def _fill_daily_nan(group):\n",
" # group 是某一个交易日的 DataFrame\n",
"\n",
" # 遍历指定的特征列\n",
" for feature_col in feature_columns:\n",
" # 检查列是否存在于当前分组中\n",
" if feature_col in group.columns:\n",
" # 计算当日该特征的中位数\n",
" median_val = group[feature_col].median()\n",
"\n",
" # 使用当日中位数填充该特征列的 NaN 值\n",
" # inplace=True 会直接修改 group DataFrame\n",
" group[feature_col].fillna(median_val, inplace=True)\n",
" # else:\n",
" # print(f\"Warning: Feature column '{feature_col}' not found in daily group for {group['trade_date'].iloc[0]}. Skipping.\")\n",
"\n",
" return group\n",
"\n",
" # 按交易日期分组,并应用每日填充函数\n",
" # group_keys=False 避免将分组键添加到结果索引中\n",
" filled_df = processed_df.groupby('trade_date', group_keys=False).apply(_fill_daily_nan)\n",
"\n",
" return filled_df"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "40e6b68a91b30c79",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T13:08:04.694262Z",
"start_time": "2025-04-03T13:08:03.694904Z"
}
},
"outputs": [],
"source": [
"import pandas as pd\n",
"\n",
"\n",
"def remove_outliers_label_percentile(label: pd.Series, lower_percentile: float = 0.01, upper_percentile: float = 0.99,\n",
" log=True):\n",
" if not (0 <= lower_percentile < upper_percentile <= 1):\n",
" raise ValueError(\"Percentile values must satisfy 0 <= lower_percentile < upper_percentile <= 1.\")\n",
"\n",
" # Calculate lower and upper bounds based on percentiles\n",
" lower_bound = label.quantile(lower_percentile)\n",
" upper_bound = label.quantile(upper_percentile)\n",
"\n",
" # Filter out values outside the bounds\n",
" filtered_label = label[(label >= lower_bound) & (label <= upper_bound)]\n",
"\n",
" # Print the number of removed outliers\n",
" if log:\n",
" print(f\"Removed {len(label) - len(filtered_label)} outliers.\")\n",
" return filtered_label\n",
"\n",
"\n",
"def calculate_risk_adjusted_target(df, days=5):\n",
" df = df.sort_values(by=['ts_code', 'trade_date'])\n",
"\n",
" df['future_close'] = df.groupby('ts_code')['close'].shift(-days)\n",
" df['future_open'] = df.groupby('ts_code')['open'].shift(-1)\n",
" df['future_return'] = (df['future_close'] - df['future_open']) / df['future_open']\n",
"\n",
" df['future_volatility'] = df.groupby('ts_code')['future_return'].rolling(days, min_periods=1).std().reset_index(\n",
" level=0, drop=True)\n",
" sharpe_ratio = df['future_return'] * df['future_volatility']\n",
" sharpe_ratio.replace([np.inf, -np.inf], np.nan, inplace=True)\n",
"\n",
" return sharpe_ratio\n",
"\n",
"\n",
"def calculate_score(df, days=5, lambda_param=1.0):\n",
" def calculate_max_drawdown(prices):\n",
" peak = prices.iloc[0] # 初始化峰值\n",
" max_drawdown = 0 # 初始化最大回撤\n",
"\n",
" for price in prices:\n",
" if price > peak:\n",
" peak = price # 更新峰值\n",
" else:\n",
" drawdown = (peak - price) / peak # 计算当前回撤\n",
" max_drawdown = max(max_drawdown, drawdown) # 更新最大回撤\n",
"\n",
" return max_drawdown\n",
"\n",
" def compute_stock_score(stock_df):\n",
" stock_df = stock_df.sort_values(by=['trade_date'])\n",
" future_return = stock_df['future_return']\n",
" # 使用已有的 pct_chg 字段计算波动率\n",
" volatility = stock_df['pct_chg'].rolling(days).std().shift(-days)\n",
" max_drawdown = stock_df['close'].rolling(days).apply(calculate_max_drawdown, raw=False).shift(-days)\n",
" score = future_return - lambda_param * max_drawdown\n",
" return score\n",
"\n",
" # # 确保 DataFrame 按照股票代码和交易日期排序\n",
" # df = df.sort_values(by=['ts_code', 'trade_date'])\n",
"\n",
" # 对每个股票分别计算 score\n",
" df['score'] = df.groupby('ts_code').apply(compute_stock_score).reset_index(level=0, drop=True)\n",
"\n",
" return df['score']\n",
"\n",
"\n",
"def remove_highly_correlated_features(df, feature_columns, threshold=0.9):\n",
" numeric_features = df[feature_columns].select_dtypes(include=[np.number]).columns.tolist()\n",
" if not numeric_features:\n",
" raise ValueError(\"No numeric features found in the provided data.\")\n",
"\n",
" corr_matrix = df[numeric_features].corr().abs()\n",
" upper = corr_matrix.where(np.triu(np.ones(corr_matrix.shape), k=1).astype(bool))\n",
" to_drop = [column for column in upper.columns if any(upper[column] > threshold)]\n",
" remaining_features = [col for col in feature_columns if col not in to_drop\n",
" or 'act' in col or 'af' in col]\n",
" return remaining_features\n",
"\n",
"\n",
"def cross_sectional_standardization(df, features):\n",
" df_sorted = df.sort_values(by='trade_date') # 按时间排序\n",
" df_standardized = df_sorted.copy()\n",
"\n",
" for date in df_sorted['trade_date'].unique():\n",
" # 获取当前时间点的数据\n",
" current_data = df_standardized[df_standardized['trade_date'] == date]\n",
"\n",
" # 只对指定特征进行标准化\n",
" scaler = StandardScaler()\n",
" standardized_values = scaler.fit_transform(current_data[features])\n",
"\n",
" # 将标准化结果重新赋值回去\n",
" df_standardized.loc[df_standardized['trade_date'] == date, features] = standardized_values\n",
"\n",
" return df_standardized\n",
"\n",
"\n",
"import numpy as np\n",
"import pandas as pd\n",
"\n",
"\n",
"def neutralize_manual_revised(df: pd.DataFrame, features: list, industry_col: str, mkt_cap_col: str) -> pd.DataFrame:\n",
" \"\"\"\n",
" 手动实现简单回归以提升速度,通过构建 Series 确保索引对齐。\n",
" 对特征在行业内部进行市值中性化。\n",
"\n",
" Args:\n",
" df: 输入的 DataFrame包含特征、行业分类和市值列。\n",
" features: 需要进行中性化的特征列名列表。\n",
" industry_col: 行业分类列的列名。\n",
" mkt_cap_col: 市值列的列名。\n",
"\n",
" Returns:\n",
" 中性化后的 DataFrame。\n",
" \"\"\"\n",
"\n",
" df[mkt_cap_col] = pd.to_numeric(df[mkt_cap_col], errors='coerce')\n",
" df_cleaned = df.dropna(subset=[mkt_cap_col]).copy()\n",
" df_cleaned = df_cleaned[df_cleaned[mkt_cap_col] > 0].copy()\n",
"\n",
" if df_cleaned.empty:\n",
" print(\"警告: 清理市值异常值后 DataFrame 为空。\")\n",
" return df # 返回原始或空df取决于清理前的状态\n",
"\n",
" processed_df = df\n",
"\n",
" for col in features:\n",
" if col not in df_cleaned.columns:\n",
" print(f\"警告: 特征列 '{col}' 不存在于清理后的 DataFrame 中,已跳过。\")\n",
" # 对于原始 df 中该列不存在的,在结果 df 中也保持原样可能全是NaN\n",
" processed_df[col] = df[col] if col in df.columns else np.nan\n",
" continue\n",
"\n",
" # 跳过对控制变量本身进行中性化\n",
" if col == mkt_cap_col or col == industry_col:\n",
" print(f\"警告: 特征列 '{col}' 是控制变量或内部使用的列,跳过中性化。\")\n",
" # 在结果 df 中也保持原样\n",
" processed_df[col] = df[col] if col in df.columns else np.nan\n",
" continue\n",
"\n",
" residual_series = pd.Series(index=df_cleaned.index, dtype=float)\n",
"\n",
" # 在分组前处理特征列的 NaN只对有因子值的行进行回归计算\n",
" df_subset_factor = df_cleaned.dropna(subset=[col]).copy()\n",
"\n",
" if not df_subset_factor.empty:\n",
" for industry, group in df_subset_factor.groupby(industry_col):\n",
" x = group[mkt_cap_col] # 市值对数\n",
" y = group[col] # 因子值\n",
"\n",
" # 确保有足够的数据点 (>1) 且市值对数有方差 (>0) 进行回归计算\n",
" # 检查 np.var > 一个很小的正数,避免浮点数误差导致的零方差判断问题\n",
" if len(group) > 1 and np.var(x) > 1e-9:\n",
" try:\n",
" beta = np.cov(y, x)[0, 1] / np.var(x)\n",
" alpha = np.mean(y) - beta * np.mean(x)\n",
"\n",
" # 计算残差\n",
" resid = y - (alpha + beta * x)\n",
"\n",
" # 将计算出的残差存储到 residual_series 中,通过索引自动对齐\n",
" residual_series.loc[resid.index] = resid\n",
"\n",
" except Exception as e:\n",
" # 捕获可能的计算异常例如np.cov或np.var因为极端数据报错\n",
" print(f\"警告: 在行业 {industry} 计算回归时发生错误: {e}。该组残差将设为原始值或 NaN。\")\n",
" # 此时该组的残差会保持 residual_series 初始化时的 NaN 或后续处理\n",
" # 也可以选择保留原始值residual_series.loc[group.index] = group[col]\n",
"\n",
" else:\n",
" residual_series.loc[group.index] = group[col] # 保留原始因子值\n",
" processed_df.loc[residual_series.index, col] = residual_series\n",
"\n",
"\n",
" else:\n",
" processed_df[col] = np.nan # 或 df[col] if col in df.columns else np.nan\n",
"\n",
" return processed_df\n",
"\n",
"\n",
"import gc\n",
"\n",
"gc.collect()\n",
"\n",
"\n",
"def mad_filter(df, features, n=3):\n",
" for col in features:\n",
" median = df[col].median()\n",
" mad = np.median(np.abs(df[col] - median))\n",
" upper = median + n * mad\n",
" lower = median - n * mad\n",
" df[col] = np.clip(df[col], lower, upper) # 截断极值\n",
" return df\n",
"\n",
"\n",
"def percentile_filter(df, features, lower_percentile=0.01, upper_percentile=0.99):\n",
" for col in features:\n",
" # 按日期分组计算上下百分位数\n",
" lower_bound = df.groupby('trade_date')[col].transform(\n",
" lambda x: x.quantile(lower_percentile)\n",
" )\n",
" upper_bound = df.groupby('trade_date')[col].transform(\n",
" lambda x: x.quantile(upper_percentile)\n",
" )\n",
" # 截断超出范围的值\n",
" df[col] = np.clip(df[col], lower_bound, upper_bound)\n",
" return df\n",
"\n",
"\n",
"from scipy.stats import iqr\n",
"\n",
"\n",
"def iqr_filter(df, features):\n",
" for col in features:\n",
" df[col] = df.groupby('trade_date')[col].transform(\n",
" lambda x: (x - x.median()) / iqr(x) if iqr(x) != 0 else x\n",
" )\n",
" return df\n",
"\n",
"\n",
"def quantile_filter(df, features, lower_quantile=0.01, upper_quantile=0.99, window=60):\n",
" df = df.copy()\n",
" for col in features:\n",
" # 计算 rolling 统计量,需要按日期进行 groupby\n",
" rolling_lower = df.groupby('trade_date')[col].transform(lambda x: x.rolling(window=min(len(x), window)).quantile(lower_quantile))\n",
" rolling_upper = df.groupby('trade_date')[col].transform(lambda x: x.rolling(window=min(len(x), window)).quantile(upper_quantile))\n",
"\n",
" # 对数据进行裁剪\n",
" df[col] = np.clip(df[col], rolling_lower, rolling_upper)\n",
" \n",
" return df\n",
"\n",
"def select_top_features_by_rankic(df: pd.DataFrame, feature_columns: list, n: int, target_column: str = 'future_return') -> list:\n",
" \"\"\"\n",
" 计算给定特征与目标列的 RankIC并返回 RankIC 绝对值最高的 n 个特征。\n",
"\n",
" Args:\n",
" df: 包含特征列和目标列的 Pandas DataFrame。\n",
" feature_columns: 包含所有待评估特征列名的列表。\n",
" n: 希望选取的 RankIC 绝对值最高的特征数量。\n",
" target_column: 目标列的名称,用于计算 RankIC。默认为 'future_return'。\n",
"\n",
" Returns:\n",
" 包含 RankIC 绝对值最高的 n 个特征列名的列表。\n",
" \"\"\"\n",
" numeric_columns = df.select_dtypes(include=['float64', 'int64']).columns\n",
" numeric_columns = [col for col in numeric_columns if col in feature_columns]\n",
" if target_column not in df.columns:\n",
" raise ValueError(f\"目标列 '{target_column}' 不存在于 DataFrame 中。\")\n",
"\n",
" rankic_scores = {}\n",
" for feature in numeric_columns:\n",
" if feature not in df.columns:\n",
" print(f\"警告: 特征列 '{feature}' 不存在于 DataFrame 中,已跳过。\")\n",
" continue\n",
"\n",
" # 计算特征与目标列的 RankIC (斯皮尔曼相关系数)\n",
" # dropna() 是为了处理缺失值,确保相关性计算不失败\n",
" valid_data = df[[feature, target_column]].dropna()\n",
" if len(valid_data) > 1: # 确保有足够的数据点进行相关性计算\n",
" # 计算斯皮尔曼相关性\n",
" correlation = valid_data[feature].corr(valid_data[target_column], method='spearman')\n",
" rankic_scores[feature] = abs(correlation) # 使用绝对值来衡量相关性强度\n",
" else:\n",
" rankic_scores[feature] = 0 # 数据不足RankIC设为0或跳过\n",
"\n",
" # 将 RankIC 分数转换为 Series 便于排序\n",
" rankic_series = pd.Series(rankic_scores)\n",
"\n",
" # 按 RankIC 绝对值降序排序,选取前 n 个特征\n",
" # handle case where n might be larger than available features\n",
" n_actual = min(n, len(rankic_series))\n",
" top_features = rankic_series.sort_values(ascending=False).head(n_actual).index.tolist()\n",
" top_features = [col for col in feature_columns if col in top_features or col not in numeric_columns]\n",
" return top_features\n",
"\n",
"def create_deviation_within_dates(df, feature_columns):\n",
" groupby_col = 'cat_l2_code' # 使用 trade_date 进行分组\n",
" new_columns = {}\n",
" ret_feature_columns = feature_columns[:]\n",
"\n",
" # 自动选择所有数值型特征\n",
" num_features = [col for col in feature_columns if 'cat' not in col and 'index' not in col]\n",
"\n",
" # num_features = ['vol', 'pct_chg', 'turnover_rate', 'volume_ratio', 'cat_vol_spike', 'obv', 'maobv_6', 'return_5', 'return_10', 'return_20', 'std_return_5', 'std_return_15', 'std_return_90', 'std_return_90_2', 'act_factor1', 'act_factor2', 'act_factor3', 'act_factor4', 'act_factor5', 'act_factor6', 'rank_act_factor1', 'rank_act_factor2', 'rank_act_factor3', 'active_buy_volume_large', 'active_buy_volume_big', 'active_buy_volume_small', 'alpha_022', 'alpha_003', 'alpha_007', 'alpha_013']\n",
" num_features = [col for col in num_features if 'cat' not in col and 'industry' not in col]\n",
" num_features = [col for col in num_features if 'limit' not in col]\n",
" num_features = [col for col in num_features if 'cyq' not in col]\n",
"\n",
" # 遍历所有数值型特征\n",
" for feature in num_features:\n",
" if feature == 'trade_date': # 不需要对 'trade_date' 计算偏差\n",
" continue\n",
"\n",
" # grouped_mean = df.groupby(['trade_date'])[feature].transform('mean')\n",
" # deviation_col_name = f'deviation_mean_{feature}'\n",
" # new_columns[deviation_col_name] = df[feature] - grouped_mean\n",
" # ret_feature_columns.append(deviation_col_name)\n",
"\n",
" grouped_mean = df.groupby(['trade_date', groupby_col])[feature].transform('mean')\n",
" deviation_col_name = f'deviation_mean_{feature}'\n",
" new_columns[deviation_col_name] = df[feature] - grouped_mean\n",
" ret_feature_columns.append(deviation_col_name)\n",
"\n",
" # 将新计算的偏差特征与原始 DataFrame 合并\n",
" df = pd.concat([df, pd.DataFrame(new_columns)], axis=1)\n",
"\n",
" # for feature in ['obv', 'return_20', 'act_factor1', 'act_factor2', 'act_factor3', 'act_factor4']:\n",
" # df[f'deviation_industry_{feature}'] = df[feature] - df[f'industry_{feature}']\n",
"\n",
" return df, ret_feature_columns\n"
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "47c12bb34062ae7a",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T14:57:50.841165Z",
"start_time": "2025-04-03T14:49:25.889057Z"
}
},
"outputs": [],
"source": [
"days = 5\n",
"validation_days = 120\n",
"\n",
"import gc\n",
"\n",
"gc.collect()\n",
"\n",
"df = df.sort_values(by=['ts_code', 'trade_date'])\n",
"# df['future_return'] = df.groupby('ts_code', group_keys=False)['close'].apply(lambda x: x.shift(-days) / x - 1)\n",
"df['future_return'] = (df.groupby('ts_code')['close'].shift(-days) - df.groupby('ts_code')['open'].shift(-1)) / \\\n",
" df.groupby('ts_code')['open'].shift(-1)\n",
"\n",
"df['cat_up_limit'] = df['pct_chg'] > 5\n",
"# df['label'] = df.groupby('ts_code')['cat_up_limit'].rolling(window=5, min_periods=1).max().shift(-5).fillna(0).astype(int).reset_index(level=0, drop=True)\n",
"df['label'] = df['future_return']\n",
"\n",
"filter_index = df['future_return'].between(df['future_return'].quantile(0.01), df['future_return'].quantile(0.99))\n",
"\n",
"# for col in [col for col in df.columns]:\n",
"# train_data[col] = train_data[col].astype('str')\n",
"# test_data[col] = test_data[col].astype('str')"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "29221dde",
"metadata": {},
"outputs": [],
"source": [
"feature_columns = [col for col in df.head(10).merge(industry_df, on=['cat_l2_code', 'trade_date'], how='left').merge(index_data, on='trade_date', how='left').columns]\n",
"feature_columns = [col for col in feature_columns if col not in ['trade_date',\n",
" 'ts_code',\n",
" 'label']]\n",
"feature_columns = [col for col in feature_columns if 'future' not in col]\n",
"feature_columns = [col for col in feature_columns if 'label' not in col]\n",
"feature_columns = [col for col in feature_columns if 'score' not in col]\n",
"feature_columns = [col for col in feature_columns if 'gen' not in col]\n",
"feature_columns = [col for col in feature_columns if 'is_st' not in col]\n",
"feature_columns = [col for col in feature_columns if 'pe_ttm' not in col]\n",
"# feature_columns = [col for col in feature_columns if 'volatility' not in col]\n",
"feature_columns = [col for col in feature_columns if 'circ_mv' not in col]\n",
"feature_columns = [col for col in feature_columns if 'code' not in col]\n",
"feature_columns = [col for col in feature_columns if col not in origin_columns]\n",
"feature_columns = [col for col in feature_columns if not col.startswith('_')]\n",
"# feature_columns = [col for col in feature_columns if col not in ['ts_code', 'trade_date', 'vol_std_5', 'cov', 'delta_cov', 'alpha_22_improved', 'alpha_007', 'consecutive_up_limit', 'mv_volatility', 'volume_growth', 'mv_growth', 'arbr']]\n",
"feature_columns = [col for col in feature_columns if col not in ['intraday_lg_flow_corr_20', \n",
" 'cap_neutral_cost_metric', \n",
" 'hurst_net_mf_vol_60', \n",
" 'complex_factor_deap_1', \n",
" 'lg_buy_consolidation_20',\n",
" 'cs_rank_ind_cap_neutral_pe',\n",
" 'cs_rank_opening_gap',\n",
" 'cs_rank_ind_adj_lg_flow']]\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "03ee5daf",
"metadata": {},
"outputs": [],
"source": [
"# df = fill_nan_with_daily_median(df, feature_columns)\n",
"for feature_col in [col for col in feature_columns if col in df.columns]:\n",
" # median_val = df[feature_col].median()\n",
" df[feature_col].fillna(0, inplace=True)"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "b76ea08a",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" ts_code trade_date log_circ_mv\n",
"0 000001.SZ 2019-01-02 16.574219\n",
"1 000001.SZ 2019-01-03 16.583965\n",
"2 000001.SZ 2019-01-04 16.633371\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['vol', 'pct_chg', 'turnover_rate', 'volume_ratio', 'winner_rate', 'undist_profit_ps', 'ocfps', 'AR', 'BR', 'AR_BR', 'cashflow_to_ev_factor', 'book_to_price_ratio', 'turnover_rate_mean_5', 'variance_20', 'bbi_ratio_factor', 'daily_deviation', 'lg_elg_net_buy_vol', 'flow_lg_elg_intensity', 'sm_net_buy_vol', 'total_buy_vol', 'lg_elg_buy_prop', 'flow_struct_buy_change', 'lg_elg_net_buy_vol_change', 'flow_lg_elg_accel', 'chip_concentration_range', 'chip_skewness', 'floating_chip_proxy', 'cost_support_15pct_change', 'cat_winner_price_zone', 'flow_chip_consistency', 'profit_taking_vs_absorb', 'cat_is_positive', 'upside_vol', 'downside_vol', 'vol_ratio', 'return_skew', 'return_kurtosis', 'volume_change_rate', 'cat_volume_breakout', 'turnover_deviation', 'cat_turnover_spike', 'avg_volume_ratio', 'cat_volume_ratio_breakout', 'vol_spike', 'vol_std_5', 'atr_14', 'atr_6', 'obv', 'maobv_6', 'rsi_3', 'return_5', 'return_20', 'std_return_5', 'std_return_90', 'std_return_90_2', 'act_factor1', 'act_factor2', 'act_factor3', 'act_factor4', 'rank_act_factor1', 'rank_act_factor2', 'rank_act_factor3', 'cov', 'delta_cov', 'alpha_22_improved', 'alpha_003', 'alpha_007', 'alpha_013', 'vol_break', 'weight_roc5', 'smallcap_concentration', 'cost_stability', 'high_cost_break_days', 'liquidity_risk', 'turnover_std', 'mv_volatility', 'volume_growth', 'mv_growth', 'momentum_factor', 'resonance_factor', 'log_close', 'cat_vol_spike', 'up', 'down', 'obv_maobv_6', 'std_return_5_over_std_return_90', 'std_return_90_minus_std_return_90_2', 'cat_af2', 'cat_af3', 'cat_af4', 'act_factor5', 'act_factor6', 'active_buy_volume_large', 'active_buy_volume_big', 'active_buy_volume_small', 'buy_lg_vol_minus_sell_lg_vol', 'buy_elg_vol_minus_sell_elg_vol', 'ctrl_strength', 'low_cost_dev', 'asymmetry', 'lock_factor', 'cat_vol_break', 'cost_atr_adj', 'cat_golden_resonance', 'mv_turnover_ratio', 'mv_adjusted_volume', 'mv_weighted_turnover', 'nonlinear_mv_volume', 'mv_volume_ratio', 'mv_momentum', 'lg_flow_mom_corr_20_60', 'lg_flow_accel', 'profit_pressure', 'underwater_resistance', 'cost_conc_std_20', 'profit_decay_20', 'vol_amp_loss_20', 'vol_drop_profit_cnt_5', 'lg_flow_vol_interact_20', 'cost_break_confirm_cnt_5', 'atr_norm_channel_pos_14', 'turnover_diff_skew_20', 'lg_sm_flow_diverge_20', 'pullback_strong_20_20', 'vol_wgt_hist_pos_20', 'vol_adj_roc_20', 'cs_rank_net_lg_flow_val', 'cs_rank_elg_buy_ratio', 'cs_rank_rel_profit_margin', 'cs_rank_cost_breadth', 'cs_rank_dist_to_upper_cost', 'cs_rank_winner_rate', 'cs_rank_intraday_range', 'cs_rank_close_pos_in_range', 'cs_rank_pos_in_hist_range', 'cs_rank_vol_x_profit_margin', 'cs_rank_lg_flow_price_concordance', 'cs_rank_turnover_per_winner', 'cs_rank_volume_ratio', 'cs_rank_elg_buy_sell_sm_ratio', 'cs_rank_cost_dist_vol_ratio', 'cs_rank_size', 'cat_up_limit', 'industry_obv', 'industry_return_5', 'industry_return_20', 'industry__ema_5', 'industry__ema_13', 'industry__ema_20', 'industry__ema_60', 'industry_act_factor1', 'industry_act_factor2', 'industry_act_factor3', 'industry_act_factor4', 'industry_act_factor5', 'industry_act_factor6', 'industry_rank_act_factor1', 'industry_rank_act_factor2', 'industry_rank_act_factor3', 'industry_return_5_percentile', 'industry_return_20_percentile', '000852.SH_MACD', '000905.SH_MACD', '399006.SZ_MACD', '000852.SH_MACD_hist', '000905.SH_MACD_hist', '399006.SZ_MACD_hist', '000852.SH_RSI', '000905.SH_RSI', '399006.SZ_RSI', '000852.SH_Signal_line', '000905.SH_Signal_line', '399006.SZ_Signal_line', '000852.SH_amount_change_rate', '000905.SH_amount_change_rate', '399006.SZ_amount_change_rate', '000852.SH_amount_mean', '000905.SH_amount_mean', '399006.SZ_amount_mean', '000852.SH_daily_return', '000905.SH_daily_return', '399006.SZ_daily_return', '000852.SH_up_ratio_20d', '000905.SH_up_ratio_20d', '399006.SZ_up_ratio_20d', '000852.SH_volatility', '000905.SH_volatility', '399006.SZ_volatility', '000852.SH_volume_change_rate', '000905.SH_volume_change_rate', '399006.SZ_volume_change_rate']\n",
"去除极值\n",
"开始截面 MAD 去极值处理 (k=3.0)...\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"MAD Filtering: 100%|██████████| 131/131 [00:28<00:00, 4.63it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"截面 MAD 去极值处理完成。\n",
"开始截面 MAD 去极值处理 (k=3.0)...\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"MAD Filtering: 100%|██████████| 131/131 [00:23<00:00, 5.60it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"截面 MAD 去极值处理完成。\n",
"开始截面 MAD 去极值处理 (k=3.0)...\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"MAD Filtering: 0it [00:00, ?it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"截面 MAD 去极值处理完成。\n",
"开始截面 MAD 去极值处理 (k=3.0)...\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"MAD Filtering: 0it [00:00, ?it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"截面 MAD 去极值处理完成。\n",
"feature_columns: ['vol', 'pct_chg', 'turnover_rate', 'volume_ratio', 'winner_rate', 'undist_profit_ps', 'ocfps', 'AR', 'BR', 'AR_BR', 'cashflow_to_ev_factor', 'book_to_price_ratio', 'turnover_rate_mean_5', 'variance_20', 'bbi_ratio_factor', 'daily_deviation', 'lg_elg_net_buy_vol', 'flow_lg_elg_intensity', 'sm_net_buy_vol', 'total_buy_vol', 'lg_elg_buy_prop', 'flow_struct_buy_change', 'lg_elg_net_buy_vol_change', 'flow_lg_elg_accel', 'chip_concentration_range', 'chip_skewness', 'floating_chip_proxy', 'cost_support_15pct_change', 'cat_winner_price_zone', 'flow_chip_consistency', 'profit_taking_vs_absorb', 'cat_is_positive', 'upside_vol', 'downside_vol', 'vol_ratio', 'return_skew', 'return_kurtosis', 'volume_change_rate', 'cat_volume_breakout', 'turnover_deviation', 'cat_turnover_spike', 'avg_volume_ratio', 'cat_volume_ratio_breakout', 'vol_spike', 'vol_std_5', 'atr_14', 'atr_6', 'obv', 'maobv_6', 'rsi_3', 'return_5', 'return_20', 'std_return_5', 'std_return_90', 'std_return_90_2', 'act_factor1', 'act_factor2', 'act_factor3', 'act_factor4', 'rank_act_factor1', 'rank_act_factor2', 'rank_act_factor3', 'cov', 'delta_cov', 'alpha_22_improved', 'alpha_003', 'alpha_007', 'alpha_013', 'vol_break', 'weight_roc5', 'smallcap_concentration', 'cost_stability', 'high_cost_break_days', 'liquidity_risk', 'turnover_std', 'mv_volatility', 'volume_growth', 'mv_growth', 'momentum_factor', 'resonance_factor', 'log_close', 'cat_vol_spike', 'up', 'down', 'obv_maobv_6', 'std_return_5_over_std_return_90', 'std_return_90_minus_std_return_90_2', 'cat_af2', 'cat_af3', 'cat_af4', 'act_factor5', 'act_factor6', 'active_buy_volume_large', 'active_buy_volume_big', 'active_buy_volume_small', 'buy_lg_vol_minus_sell_lg_vol', 'buy_elg_vol_minus_sell_elg_vol', 'ctrl_strength', 'low_cost_dev', 'asymmetry', 'lock_factor', 'cat_vol_break', 'cost_atr_adj', 'cat_golden_resonance', 'mv_turnover_ratio', 'mv_adjusted_volume', 'mv_weighted_turnover', 'nonlinear_mv_volume', 'mv_volume_ratio', 'mv_momentum', 'lg_flow_mom_corr_20_60', 'lg_flow_accel', 'profit_pressure', 'underwater_resistance', 'cost_conc_std_20', 'profit_decay_20', 'vol_amp_loss_20', 'vol_drop_profit_cnt_5', 'lg_flow_vol_interact_20', 'cost_break_confirm_cnt_5', 'atr_norm_channel_pos_14', 'turnover_diff_skew_20', 'lg_sm_flow_diverge_20', 'pullback_strong_20_20', 'vol_wgt_hist_pos_20', 'vol_adj_roc_20', 'cs_rank_net_lg_flow_val', 'cs_rank_elg_buy_ratio', 'cs_rank_rel_profit_margin', 'cs_rank_cost_breadth', 'cs_rank_dist_to_upper_cost', 'cs_rank_winner_rate', 'cs_rank_intraday_range', 'cs_rank_close_pos_in_range', 'cs_rank_pos_in_hist_range', 'cs_rank_vol_x_profit_margin', 'cs_rank_lg_flow_price_concordance', 'cs_rank_turnover_per_winner', 'cs_rank_volume_ratio', 'cs_rank_elg_buy_sell_sm_ratio', 'cs_rank_cost_dist_vol_ratio', 'cs_rank_size', 'cat_up_limit', 'industry_obv', 'industry_return_5', 'industry_return_20', 'industry__ema_5', 'industry__ema_13', 'industry__ema_20', 'industry__ema_60', 'industry_act_factor1', 'industry_act_factor2', 'industry_act_factor3', 'industry_act_factor4', 'industry_act_factor5', 'industry_act_factor6', 'industry_rank_act_factor1', 'industry_rank_act_factor2', 'industry_rank_act_factor3', 'industry_return_5_percentile', 'industry_return_20_percentile', '000852.SH_MACD', '000905.SH_MACD', '399006.SZ_MACD', '000852.SH_MACD_hist', '000905.SH_MACD_hist', '399006.SZ_MACD_hist', '000852.SH_RSI', '000905.SH_RSI', '399006.SZ_RSI', '000852.SH_Signal_line', '000905.SH_Signal_line', '399006.SZ_Signal_line', '000852.SH_amount_change_rate', '000905.SH_amount_change_rate', '399006.SZ_amount_change_rate', '000852.SH_amount_mean', '000905.SH_amount_mean', '399006.SZ_amount_mean', '000852.SH_daily_return', '000905.SH_daily_return', '399006.SZ_daily_return', '000852.SH_up_ratio_20d', '000905.SH_up_ratio_20d', '399006.SZ_up_ratio_20d', '000852.SH_volatility', '000905.SH_volatility', '399006.SZ_volatility', '000852.SH_volume_change_rate', '000905.SH_volume_change_rate', '399006.SZ_volume_change_rate']\n",
"df最小日期: 2019-01-02\n",
"df最大日期: 2025-05-16\n",
"2057777\n",
"train_data最小日期: 2020-01-02\n",
"train_data最大日期: 2022-12-30\n",
"1751669\n",
"test_data最小日期: 2023-01-03\n",
"test_data最大日期: 2025-05-16\n",
" ts_code trade_date log_circ_mv\n",
"0 000001.SZ 2019-01-02 16.574219\n",
"1 000001.SZ 2019-01-03 16.583965\n",
"2 000001.SZ 2019-01-04 16.633371\n"
]
}
],
"source": [
"split_date = '2023-01-01'\n",
"train_data = df[filter_index & (df['trade_date'] <= split_date) & (df['trade_date'] >= '2020-01-01')]\n",
"test_data = df[(df['trade_date'] >= split_date)]\n",
"\n",
"print(df[['ts_code', 'trade_date', 'log_circ_mv']].head(3))\n",
"\n",
"industry_df = industry_df.sort_values(by=['trade_date'])\n",
"index_data = index_data.sort_values(by=['trade_date'])\n",
"\n",
"# train_data = train_data.merge(industry_df, on=['cat_l2_code', 'trade_date'], how='left')\n",
"# train_data = train_data.merge(index_data, on='trade_date', how='left')\n",
"# test_data = test_data.merge(industry_df, on=['cat_l2_code', 'trade_date'], how='left')\n",
"# test_data = test_data.merge(index_data, on='trade_date', how='left')\n",
"\n",
"train_data, test_data = train_data.replace([np.inf, -np.inf], np.nan), test_data.replace([np.inf, -np.inf], np.nan)\n",
"\n",
"# feature_columns_new = feature_columns[:]\n",
"# train_data, _ = create_deviation_within_dates(train_data, [col for col in feature_columns if col in train_data.columns])\n",
"# test_data, _ = create_deviation_within_dates(test_data, [col for col in feature_columns if col in train_data.columns])\n",
"\n",
"# feature_columns = [\n",
"# 'undist_profit_ps', \n",
"# 'AR_BR',\n",
"# 'pe_ttm',\n",
"# 'alpha_22_improved', \n",
"# 'alpha_003', \n",
"# 'alpha_007', \n",
"# 'alpha_013', \n",
"# 'cat_up_limit', \n",
"# 'cat_down_limit', \n",
"# 'up_limit_count_10d', \n",
"# 'down_limit_count_10d', \n",
"# 'consecutive_up_limit', \n",
"# 'vol_break', \n",
"# 'weight_roc5', \n",
"# 'price_cost_divergence', \n",
"# 'smallcap_concentration', \n",
"# 'cost_stability', \n",
"# 'high_cost_break_days', \n",
"# 'liquidity_risk', \n",
"# 'turnover_std', \n",
"# 'mv_volatility', \n",
"# 'volume_growth', \n",
"# 'mv_growth', \n",
"# 'lg_flow_mom_corr_20_60', \n",
"# 'lg_flow_accel', \n",
"# 'profit_pressure', \n",
"# 'underwater_resistance', \n",
"# 'cost_conc_std_20', \n",
"# 'profit_decay_20', \n",
"# 'vol_amp_loss_20', \n",
"# 'vol_drop_profit_cnt_5', \n",
"# 'lg_flow_vol_interact_20', \n",
"# 'cost_break_confirm_cnt_5', \n",
"# 'atr_norm_channel_pos_14', \n",
"# 'turnover_diff_skew_20', \n",
"# 'lg_sm_flow_diverge_20', \n",
"# 'pullback_strong_20_20', \n",
"# 'vol_wgt_hist_pos_20', \n",
"# 'vol_adj_roc_20',\n",
"# 'cashflow_to_ev_factor',\n",
"# 'ocfps',\n",
"# 'book_to_price_ratio',\n",
"# 'turnover_rate_mean_5',\n",
"# 'variance_20',\n",
"# 'bbi_ratio_factor'\n",
"# ]\n",
"# feature_columns = [col for col in feature_columns if col in train_data.columns]\n",
"# feature_columns = [col for col in feature_columns if not col.startswith('_')]\n",
"\n",
"numeric_columns = df.select_dtypes(include=['float64', 'int64']).columns\n",
"numeric_columns = [col for col in numeric_columns if col in feature_columns]\n",
"# feature_columns = select_top_features_by_rankic(df, numeric_columns, n=10)\n",
"print(feature_columns)\n",
"\n",
"# train_data = fill_nan_with_daily_median(train_data, feature_columns)\n",
"# test_data = fill_nan_with_daily_median(test_data, feature_columns)\n",
"\n",
"train_data = train_data.dropna(subset=[col for col in feature_columns if col in train_data.columns])\n",
"train_data = train_data.dropna(subset=['label'])\n",
"train_data = train_data.reset_index(drop=True)\n",
"# print(test_data.tail())\n",
"test_data = test_data.dropna(subset=[col for col in feature_columns if col in train_data.columns])\n",
"# test_data = test_data.dropna(subset=['label'])\n",
"test_data = test_data.reset_index(drop=True)\n",
"\n",
"transform_feature_columns = feature_columns\n",
"transform_feature_columns = [col for col in transform_feature_columns if col in feature_columns and not col.startswith('cat') and col in train_data.columns]\n",
"# transform_feature_columns.remove('undist_profit_ps')\n",
"print('去除极值')\n",
"cs_mad_filter(train_data, transform_feature_columns)\n",
"# print('中性化')\n",
"# cs_neutralize_industry_cap(train_data, transform_feature_columns)\n",
"# print('标准化')\n",
"# cs_zscore_standardize(train_data, transform_feature_columns)\n",
"\n",
"cs_mad_filter(test_data, transform_feature_columns)\n",
"# cs_neutralize_industry_cap(test_data, transform_feature_columns)\n",
"# cs_zscore_standardize(test_data, transform_feature_columns)\n",
"\n",
"mad_filter_feature_columns = [col for col in feature_columns if col not in transform_feature_columns and not col.startswith('cat') and col in train_data.columns]\n",
"cs_mad_filter(train_data, mad_filter_feature_columns)\n",
"cs_mad_filter(test_data, mad_filter_feature_columns)\n",
"\n",
"\n",
"print(f'feature_columns: {feature_columns}')\n",
"\n",
"\n",
"print(f\"df最小日期: {df['trade_date'].min().strftime('%Y-%m-%d')}\")\n",
"print(f\"df最大日期: {df['trade_date'].max().strftime('%Y-%m-%d')}\")\n",
"print(len(train_data))\n",
"print(f\"train_data最小日期: {train_data['trade_date'].min().strftime('%Y-%m-%d')}\")\n",
"print(f\"train_data最大日期: {train_data['trade_date'].max().strftime('%Y-%m-%d')}\")\n",
"print(len(test_data))\n",
"print(f\"test_data最小日期: {test_data['trade_date'].min().strftime('%Y-%m-%d')}\")\n",
"print(f\"test_data最大日期: {test_data['trade_date'].max().strftime('%Y-%m-%d')}\")\n",
"\n",
"cat_columns = [col for col in feature_columns if col.startswith('cat')]\n",
"for col in cat_columns:\n",
" train_data[col] = train_data[col].astype('category')\n",
" test_data[col] = test_data[col].astype('category')\n",
"\n",
"print(df[['ts_code', 'trade_date', 'log_circ_mv']].head(3))\n"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "3ff2d1c5",
"metadata": {},
"outputs": [],
"source": [
"from sklearn.preprocessing import StandardScaler\n",
"from sklearn.linear_model import LogisticRegression\n",
"import matplotlib.pyplot as plt # 保持 matplotlib 导入尽管LightGBM的绘图功能已移除\n",
"from sklearn.decomposition import PCA\n",
"import pandas as pd\n",
"import numpy as np\n",
"import datetime # 用于日期计算\n",
"from catboost import CatBoostClassifier, CatBoostRegressor\n",
"from catboost import Pool\n",
"import lightgbm as lgb\n",
"from hypergbm import make_experiment\n",
"# from supervised.automl import AutoML\n",
"from flaml import AutoML, tune\n",
"from flaml.automl.model import LGBMEstimator, CatBoostEstimator\n",
"from lightgbm import LGBMClassifier, LGBMRegressor\n",
"from tabpfn import TabPFNClassifier, TabPFNRegressor\n",
"\n",
"class MyLGBM(LGBMEstimator):\n",
" def __init__(self, **config):\n",
" super().__init__(max_depth=-1, **config)\n",
"\n",
"class MyCat(CatBoostEstimator):\n",
" def __init__(self, **config):\n",
" config = {\n",
" 'depth': 10, # 控制模型复杂度\n",
" 'l2_leaf_reg': 50, # L2 正则化\n",
" # 'task_type': 'GPU',\n",
" }\n",
" super().__init__(**config)\n",
"\n",
"def train_model(train_data_df, feature_columns,\n",
" print_info=True, # 调整参数名,更通用\n",
" validation_days=180, use_pca=False, split_date=None,\n",
" target_column='label', type='light'): # 增加目标列参数\n",
"\n",
" print('train data size: ', len(train_data_df))\n",
" print(train_data_df[['ts_code', 'trade_date', 'log_circ_mv']])\n",
" # 确保数据按时间排序\n",
" train_data_df = train_data_df.sort_values(by='trade_date')\n",
"\n",
" # 去除标签为空的样本\n",
" initial_len = len(train_data_df)\n",
" train_data_df = train_data_df.dropna(subset=[target_column])\n",
"\n",
" if print_info:\n",
" print(f'原始样本数: {initial_len}, 去除标签为空后样本数: {len(train_data_df)}')\n",
"\n",
" # 提取特征和标签,只取数值型特征用于线性回归\n",
" \n",
" if split_date is None:\n",
" all_dates = train_data_df['trade_date'].unique() # 获取所有唯一的 trade_date\n",
" split_date = all_dates[-validation_days] # 划分点为倒数第 validation_days 天\n",
" train_data_split = train_data_df[train_data_df['trade_date'] < split_date] # 训练集\n",
" val_data_split = train_data_df[train_data_df['trade_date'] >= split_date] # 验证集\n",
" \n",
" X_train = train_data_split[feature_columns]\n",
" y_train = train_data_split[target_column]\n",
" train_data_split = pd.concat([X_train, y_train], axis=1)\n",
" \n",
" X_val = val_data_split[feature_columns]\n",
" y_val = val_data_split['label']\n",
" val_data_split = pd.concat([X_val, y_val], axis=1)\n",
"\n",
"\n",
" automl = AutoML()\n",
" automl.add_learner(learner_name=\"my_lgbm\", learner_class=MyLGBM)\n",
" automl.add_learner(learner_name=\"my_cat\", learner_class=MyCat)\n",
"\n",
" automl_settings = {\n",
" \"time_budget\": 600, # in seconds\n",
" \"metric\": \"r2\",\n",
" \"task\": \"regression\",\n",
" \"estimator_list\": [\n",
" \"catboost\",\n",
" \"lgbm\",\n",
" \"xgboost\"\n",
" ], \n",
" \"ensemble\": {\n",
" \"final_estimator\": LGBMRegressor(),\n",
" \"passthrough\": True,\n",
" },\n",
" }\n",
"\n",
" custom_hp = {\n",
" # \"lgbm\": {\n",
" # \"subsample\": {\n",
" # \"num_leaves\": tune.uniform(lower=10, upper=64),\n",
" # \"init_value\": 10,\n",
" # },\n",
" # },\n",
" }\n",
" # Train with labeled input data\n",
" automl.fit(X_train=X_train, y_train=y_train, X_val=X_val, y_val=y_val, custom_hp=custom_hp, **automl_settings)\n",
" # model = TabPFNRegressor(model_path=\"../../model/tabpfn-v2-regressor.ckpt\", ignore_pretraining_limits=True)\n",
"\n",
" return automl"
]
},
{
"cell_type": "code",
"execution_count": 26,
"id": "c6eb5cd4-e714-420a-ac48-39af3e11ee81",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T15:03:18.426481Z",
"start_time": "2025-04-03T15:02:19.926352Z"
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"train data size: 364000\n",
" ts_code trade_date log_circ_mv\n",
"0 600306.SH 2020-01-02 11.552040\n",
"1 603269.SH 2020-01-02 11.324801\n",
"2 002633.SZ 2020-01-02 11.759023\n",
"3 603991.SH 2020-01-02 11.181150\n",
"4 000691.SZ 2020-01-02 11.677910\n",
"... ... ... ...\n",
"363995 603182.SH 2022-12-30 11.207510\n",
"363996 600749.SH 2022-12-30 12.594148\n",
"363997 605259.SH 2022-12-30 11.763909\n",
"363998 603600.SH 2022-12-30 12.594561\n",
"363999 603116.SH 2022-12-30 12.594781\n",
"\n",
"[364000 rows x 3 columns]\n",
"原始样本数: 364000, 去除标签为空后样本数: 364000\n",
"[flaml.automl.logger: 05-22 16:22:47] {1728} INFO - task = regression\n",
"[flaml.automl.logger: 05-22 16:22:47] {1736} INFO - Data split method: uniform\n",
"[flaml.automl.logger: 05-22 16:22:47] {1739} INFO - Evaluation method: holdout\n",
"[flaml.automl.logger: 05-22 16:22:47] {1838} INFO - Minimizing error metric: 1-r2\n",
"[flaml.automl.logger: 05-22 16:22:47] {1955} INFO - List of ML learners in AutoML Run: ['catboost', 'lgbm', 'xgboost']\n",
"[flaml.automl.logger: 05-22 16:22:47] {2258} INFO - iteration 0, current learner catboost\n",
"[flaml.automl.logger: 05-22 16:22:57] {2393} INFO - Estimated sufficient time budget=2563037s. Estimated necessary time budget=2563s.\n",
"[flaml.automl.logger: 05-22 16:22:57] {2442} INFO - at 21.3s,\testimator catboost's best error=1.1178,\tbest estimator catboost's best error=1.1178\n",
"[flaml.automl.logger: 05-22 16:22:57] {2258} INFO - iteration 1, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:22:57] {2442} INFO - at 21.5s,\testimator lgbm's best error=1.0006,\tbest estimator lgbm's best error=1.0006\n",
"[flaml.automl.logger: 05-22 16:22:57] {2258} INFO - iteration 2, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:22:57] {2442} INFO - at 21.8s,\testimator lgbm's best error=0.9969,\tbest estimator lgbm's best error=0.9969\n",
"[flaml.automl.logger: 05-22 16:22:57] {2258} INFO - iteration 3, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:22:57] {2442} INFO - at 22.1s,\testimator lgbm's best error=0.9969,\tbest estimator lgbm's best error=0.9969\n",
"[flaml.automl.logger: 05-22 16:22:57] {2258} INFO - iteration 4, current learner catboost\n",
"[flaml.automl.logger: 05-22 16:24:21] {2442} INFO - at 105.3s,\testimator catboost's best error=1.0733,\tbest estimator lgbm's best error=0.9969\n",
"[flaml.automl.logger: 05-22 16:24:21] {2258} INFO - iteration 5, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:21] {2442} INFO - at 105.6s,\testimator lgbm's best error=0.9969,\tbest estimator lgbm's best error=0.9969\n",
"[flaml.automl.logger: 05-22 16:24:21] {2258} INFO - iteration 6, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:21] {2442} INFO - at 105.8s,\testimator lgbm's best error=0.9969,\tbest estimator lgbm's best error=0.9969\n",
"[flaml.automl.logger: 05-22 16:24:21] {2258} INFO - iteration 7, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:21] {2442} INFO - at 106.0s,\testimator lgbm's best error=0.9917,\tbest estimator lgbm's best error=0.9917\n",
"[flaml.automl.logger: 05-22 16:24:21] {2258} INFO - iteration 8, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:22] {2442} INFO - at 106.3s,\testimator lgbm's best error=0.9917,\tbest estimator lgbm's best error=0.9917\n",
"[flaml.automl.logger: 05-22 16:24:22] {2258} INFO - iteration 9, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:22] {2442} INFO - at 106.5s,\testimator lgbm's best error=0.9917,\tbest estimator lgbm's best error=0.9917\n",
"[flaml.automl.logger: 05-22 16:24:22] {2258} INFO - iteration 10, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:22] {2442} INFO - at 106.8s,\testimator lgbm's best error=0.9917,\tbest estimator lgbm's best error=0.9917\n",
"[flaml.automl.logger: 05-22 16:24:22] {2258} INFO - iteration 11, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:22] {2442} INFO - at 107.0s,\testimator lgbm's best error=0.9917,\tbest estimator lgbm's best error=0.9917\n",
"[flaml.automl.logger: 05-22 16:24:22] {2258} INFO - iteration 12, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:23] {2442} INFO - at 107.6s,\testimator lgbm's best error=0.9828,\tbest estimator lgbm's best error=0.9828\n",
"[flaml.automl.logger: 05-22 16:24:23] {2258} INFO - iteration 13, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:23] {2442} INFO - at 108.0s,\testimator xgboost's best error=1.0014,\tbest estimator lgbm's best error=0.9828\n",
"[flaml.automl.logger: 05-22 16:24:23] {2258} INFO - iteration 14, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:24] {2442} INFO - at 108.4s,\testimator xgboost's best error=0.9876,\tbest estimator lgbm's best error=0.9828\n",
"[flaml.automl.logger: 05-22 16:24:24] {2258} INFO - iteration 15, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:24] {2442} INFO - at 108.7s,\testimator xgboost's best error=0.9876,\tbest estimator lgbm's best error=0.9828\n",
"[flaml.automl.logger: 05-22 16:24:24] {2258} INFO - iteration 16, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:24] {2442} INFO - at 109.1s,\testimator xgboost's best error=0.9876,\tbest estimator lgbm's best error=0.9828\n",
"[flaml.automl.logger: 05-22 16:24:24] {2258} INFO - iteration 17, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:25] {2442} INFO - at 109.7s,\testimator lgbm's best error=0.9828,\tbest estimator lgbm's best error=0.9828\n",
"[flaml.automl.logger: 05-22 16:24:25] {2258} INFO - iteration 18, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:25] {2442} INFO - at 110.1s,\testimator xgboost's best error=0.9876,\tbest estimator lgbm's best error=0.9828\n",
"[flaml.automl.logger: 05-22 16:24:25] {2258} INFO - iteration 19, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:26] {2442} INFO - at 110.5s,\testimator xgboost's best error=0.9876,\tbest estimator lgbm's best error=0.9828\n",
"[flaml.automl.logger: 05-22 16:24:26] {2258} INFO - iteration 20, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:26] {2442} INFO - at 111.2s,\testimator xgboost's best error=0.9802,\tbest estimator xgboost's best error=0.9802\n",
"[flaml.automl.logger: 05-22 16:24:26] {2258} INFO - iteration 21, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:27] {2442} INFO - at 111.8s,\testimator lgbm's best error=0.9828,\tbest estimator xgboost's best error=0.9802\n",
"[flaml.automl.logger: 05-22 16:24:27] {2258} INFO - iteration 22, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:28] {2442} INFO - at 112.4s,\testimator lgbm's best error=0.9828,\tbest estimator xgboost's best error=0.9802\n",
"[flaml.automl.logger: 05-22 16:24:28] {2258} INFO - iteration 23, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:28] {2442} INFO - at 113.1s,\testimator xgboost's best error=0.9802,\tbest estimator xgboost's best error=0.9802\n",
"[flaml.automl.logger: 05-22 16:24:28] {2258} INFO - iteration 24, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:29] {2442} INFO - at 113.9s,\testimator xgboost's best error=0.9802,\tbest estimator xgboost's best error=0.9802\n",
"[flaml.automl.logger: 05-22 16:24:29] {2258} INFO - iteration 25, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:30] {2442} INFO - at 114.4s,\testimator lgbm's best error=0.9828,\tbest estimator xgboost's best error=0.9802\n",
"[flaml.automl.logger: 05-22 16:24:30] {2258} INFO - iteration 26, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:30] {2442} INFO - at 115.1s,\testimator xgboost's best error=0.9750,\tbest estimator xgboost's best error=0.9750\n",
"[flaml.automl.logger: 05-22 16:24:30] {2258} INFO - iteration 27, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:31] {2442} INFO - at 115.8s,\testimator xgboost's best error=0.9750,\tbest estimator xgboost's best error=0.9750\n",
"[flaml.automl.logger: 05-22 16:24:31] {2258} INFO - iteration 28, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:32] {2442} INFO - at 116.6s,\testimator xgboost's best error=0.9750,\tbest estimator xgboost's best error=0.9750\n",
"[flaml.automl.logger: 05-22 16:24:32] {2258} INFO - iteration 29, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:33] {2442} INFO - at 117.3s,\testimator lgbm's best error=0.9806,\tbest estimator xgboost's best error=0.9750\n",
"[flaml.automl.logger: 05-22 16:24:33] {2258} INFO - iteration 30, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:33] {2442} INFO - at 117.8s,\testimator lgbm's best error=0.9806,\tbest estimator xgboost's best error=0.9750\n",
"[flaml.automl.logger: 05-22 16:24:33] {2258} INFO - iteration 31, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:34] {2442} INFO - at 118.9s,\testimator lgbm's best error=0.9806,\tbest estimator xgboost's best error=0.9750\n",
"[flaml.automl.logger: 05-22 16:24:34] {2258} INFO - iteration 32, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:35] {2442} INFO - at 119.9s,\testimator xgboost's best error=0.9750,\tbest estimator xgboost's best error=0.9750\n",
"[flaml.automl.logger: 05-22 16:24:35] {2258} INFO - iteration 33, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:36] {2442} INFO - at 120.5s,\testimator lgbm's best error=0.9806,\tbest estimator xgboost's best error=0.9750\n",
"[flaml.automl.logger: 05-22 16:24:36] {2258} INFO - iteration 34, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:36] {2442} INFO - at 121.1s,\testimator lgbm's best error=0.9806,\tbest estimator xgboost's best error=0.9750\n",
"[flaml.automl.logger: 05-22 16:24:36] {2258} INFO - iteration 35, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:37] {2442} INFO - at 122.0s,\testimator xgboost's best error=0.9750,\tbest estimator xgboost's best error=0.9750\n",
"[flaml.automl.logger: 05-22 16:24:37] {2258} INFO - iteration 36, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:40] {2442} INFO - at 124.6s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:24:40] {2258} INFO - iteration 37, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:42] {2442} INFO - at 127.2s,\testimator lgbm's best error=0.9766,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:24:42] {2258} INFO - iteration 38, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:24:46] {2442} INFO - at 130.6s,\testimator lgbm's best error=0.9766,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:24:46] {2258} INFO - iteration 39, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:48] {2442} INFO - at 133.1s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:24:48] {2258} INFO - iteration 40, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:24:52] {2442} INFO - at 136.4s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:24:52] {2258} INFO - iteration 41, current learner catboost\n",
"[flaml.automl.logger: 05-22 16:24:57] {2442} INFO - at 142.1s,\testimator catboost's best error=1.0733,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:24:57] {2258} INFO - iteration 42, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:25:00] {2442} INFO - at 144.7s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:25:00] {2258} INFO - iteration 43, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:25:05] {2442} INFO - at 149.7s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:25:05] {2258} INFO - iteration 44, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:25:07] {2442} INFO - at 152.0s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:25:07] {2258} INFO - iteration 45, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:25:10] {2442} INFO - at 154.7s,\testimator lgbm's best error=0.9766,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:25:10] {2258} INFO - iteration 46, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:25:16] {2442} INFO - at 161.0s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:25:16] {2258} INFO - iteration 47, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:25:19] {2442} INFO - at 163.5s,\testimator lgbm's best error=0.9766,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:25:19] {2258} INFO - iteration 48, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:25:22] {2442} INFO - at 166.5s,\testimator lgbm's best error=0.9766,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:25:22] {2258} INFO - iteration 49, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:25:24] {2442} INFO - at 168.9s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:25:24] {2258} INFO - iteration 50, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:25:28] {2442} INFO - at 172.4s,\testimator lgbm's best error=0.9766,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:25:28] {2258} INFO - iteration 51, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:25:30] {2442} INFO - at 175.0s,\testimator lgbm's best error=0.9766,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:25:30] {2258} INFO - iteration 52, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:25:33] {2442} INFO - at 178.1s,\testimator lgbm's best error=0.9766,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:25:33] {2258} INFO - iteration 53, current learner catboost\n",
"[flaml.automl.logger: 05-22 16:26:56] {2442} INFO - at 261.2s,\testimator catboost's best error=1.0733,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:26:56] {2258} INFO - iteration 54, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:26:59] {2442} INFO - at 263.9s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:26:59] {2258} INFO - iteration 55, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:27:02] {2442} INFO - at 266.7s,\testimator lgbm's best error=0.9765,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:27:02] {2258} INFO - iteration 56, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:27:05] {2442} INFO - at 269.5s,\testimator lgbm's best error=0.9765,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:27:05] {2258} INFO - iteration 57, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:27:08] {2442} INFO - at 273.1s,\testimator lgbm's best error=0.9765,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:27:08] {2258} INFO - iteration 58, current learner catboost\n",
"[flaml.automl.logger: 05-22 16:27:14] {2442} INFO - at 278.7s,\testimator catboost's best error=1.0733,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:27:14] {2258} INFO - iteration 59, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:27:17] {2442} INFO - at 281.8s,\testimator lgbm's best error=0.9765,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:27:17] {2258} INFO - iteration 60, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:27:20] {2442} INFO - at 285.1s,\testimator lgbm's best error=0.9765,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:27:20] {2258} INFO - iteration 61, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:27:24] {2442} INFO - at 288.5s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:27:24] {2258} INFO - iteration 62, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:27:27] {2442} INFO - at 291.5s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:27:27] {2258} INFO - iteration 63, current learner catboost\n",
"[flaml.automl.logger: 05-22 16:29:45] {2442} INFO - at 429.8s,\testimator catboost's best error=1.0733,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:29:45] {2258} INFO - iteration 64, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:29:48] {2442} INFO - at 432.5s,\testimator lgbm's best error=0.9765,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:29:48] {2258} INFO - iteration 65, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:29:50] {2442} INFO - at 434.7s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:29:50] {2258} INFO - iteration 66, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:29:53] {2442} INFO - at 437.6s,\testimator lgbm's best error=0.9765,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:29:53] {2258} INFO - iteration 67, current learner catboost\n",
"[flaml.automl.logger: 05-22 16:29:59] {2442} INFO - at 443.3s,\testimator catboost's best error=1.0733,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:29:59] {2258} INFO - iteration 68, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:30:04] {2442} INFO - at 449.0s,\testimator lgbm's best error=0.9765,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:30:04] {2258} INFO - iteration 69, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:30:10] {2442} INFO - at 454.8s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:30:10] {2258} INFO - iteration 70, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:30:13] {2442} INFO - at 457.7s,\testimator lgbm's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:30:13] {2258} INFO - iteration 71, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:30:15] {2442} INFO - at 460.0s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:30:15] {2258} INFO - iteration 72, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:30:18] {2442} INFO - at 463.1s,\testimator lgbm's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:30:18] {2258} INFO - iteration 73, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:30:21] {2442} INFO - at 466.0s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:30:21] {2258} INFO - iteration 74, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:30:24] {2442} INFO - at 468.6s,\testimator lgbm's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:30:24] {2258} INFO - iteration 75, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:30:28] {2442} INFO - at 473.0s,\testimator lgbm's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:30:28] {2258} INFO - iteration 76, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:30:31] {2442} INFO - at 475.7s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:30:31] {2258} INFO - iteration 77, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:30:34] {2442} INFO - at 478.3s,\testimator lgbm's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:30:34] {2258} INFO - iteration 78, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:30:36] {2442} INFO - at 480.8s,\testimator lgbm's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:30:36] {2258} INFO - iteration 79, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:30:41] {2442} INFO - at 485.5s,\testimator xgboost's best error=0.9734,\tbest estimator xgboost's best error=0.9734\n",
"[flaml.automl.logger: 05-22 16:30:41] {2258} INFO - iteration 80, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:30:45] {2442} INFO - at 489.4s,\testimator lgbm's best error=0.9731,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:30:45] {2258} INFO - iteration 81, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:30:51] {2442} INFO - at 495.4s,\testimator lgbm's best error=0.9731,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:30:51] {2258} INFO - iteration 82, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:30:54] {2442} INFO - at 498.4s,\testimator lgbm's best error=0.9731,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:30:54] {2258} INFO - iteration 83, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:30:58] {2442} INFO - at 502.7s,\testimator xgboost's best error=0.9734,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:30:58] {2258} INFO - iteration 84, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:31:01] {2442} INFO - at 505.3s,\testimator lgbm's best error=0.9731,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:31:01] {2258} INFO - iteration 85, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:31:12] {2442} INFO - at 517.1s,\testimator lgbm's best error=0.9731,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:31:12] {2258} INFO - iteration 86, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:31:15] {2442} INFO - at 519.9s,\testimator lgbm's best error=0.9731,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:31:15] {2258} INFO - iteration 87, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:31:27] {2442} INFO - at 531.4s,\testimator lgbm's best error=0.9731,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:31:27] {2258} INFO - iteration 88, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:31:29] {2442} INFO - at 533.8s,\testimator xgboost's best error=0.9734,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:31:29] {2258} INFO - iteration 89, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:31:32] {2442} INFO - at 536.6s,\testimator lgbm's best error=0.9731,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:31:32] {2258} INFO - iteration 90, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:31:35] {2442} INFO - at 539.6s,\testimator xgboost's best error=0.9734,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:31:35] {2258} INFO - iteration 91, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:31:37] {2442} INFO - at 542.2s,\testimator xgboost's best error=0.9734,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:31:37] {2258} INFO - iteration 92, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:31:43] {2442} INFO - at 547.9s,\testimator xgboost's best error=0.9734,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:31:43] {2258} INFO - iteration 93, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:32:01] {2442} INFO - at 565.6s,\testimator lgbm's best error=0.9731,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:32:01] {2258} INFO - iteration 94, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:32:03] {2442} INFO - at 568.0s,\testimator xgboost's best error=0.9734,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:32:03] {2258} INFO - iteration 95, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:32:12] {2442} INFO - at 577.0s,\testimator lgbm's best error=0.9731,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:32:12] {2258} INFO - iteration 96, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:32:15] {2442} INFO - at 580.2s,\testimator xgboost's best error=0.9734,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:32:15] {2258} INFO - iteration 97, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:32:18] {2442} INFO - at 583.0s,\testimator lgbm's best error=0.9731,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:32:18] {2258} INFO - iteration 98, current learner lgbm\n",
"[flaml.automl.logger: 05-22 16:32:28] {2442} INFO - at 592.6s,\testimator lgbm's best error=0.9731,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:32:28] {2258} INFO - iteration 99, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:32:30] {2442} INFO - at 595.1s,\testimator xgboost's best error=0.9734,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:32:30] {2258} INFO - iteration 100, current learner xgboost\n",
"[flaml.automl.logger: 05-22 16:32:33] {2442} INFO - at 598.0s,\testimator xgboost's best error=0.9734,\tbest estimator lgbm's best error=0.9731\n",
"[flaml.automl.logger: 05-22 16:32:33] {2550} INFO - selected model: LGBMRegressor(colsample_bytree=0.912038573841834,\n",
" learning_rate=0.02078849055957606, max_bin=1023,\n",
" min_child_samples=38, n_estimators=18, n_jobs=-1, num_leaves=11,\n",
" reg_alpha=0.061774595197632225, reg_lambda=0.003793636570300973,\n",
" verbose=-1)\n",
"[flaml.automl.logger: 05-22 16:32:33] {2582} INFO - [('lgbm', {'n_jobs': -1, 'n_estimators': 18, 'num_leaves': 11, 'min_child_samples': 38, 'learning_rate': 0.02078849055957606, 'colsample_bytree': 0.912038573841834, 'reg_alpha': 0.061774595197632225, 'reg_lambda': 0.003793636570300973, 'max_bin': 1023, 'verbose': -1}), ('xgboost', {'n_jobs': -1, 'n_estimators': 4, 'max_leaves': 12, 'min_child_weight': 5.4998639205908075, 'learning_rate': 0.04700136686803946, 'subsample': 1.0, 'colsample_bylevel': 0.8425853155117716, 'colsample_bytree': 0.9245352674213118, 'reg_alpha': 0.025118956715098555, 'reg_lambda': 27.89255832621344, 'max_depth': 0, 'grow_policy': 'lossguide', 'tree_method': 'hist', 'verbosity': 0}), ('catboost', {'early_stopping_rounds': 10, 'learning_rate': 0.005, 'n_estimators': 8192, 'thread_count': -1, 'verbose': False, 'random_seed': 10242048})]\n",
"[flaml.automl.logger: 05-22 16:32:33] {2625} INFO - Building ensemble with tuned estimators\n",
"[flaml.automl.logger: 05-22 16:33:39] {2631} INFO - ensemble: StackingRegressor(estimators=[('lgbm',\n",
" <flaml.automl.model.LGBMEstimator object at 0x0000028A9571BF50>),\n",
" ('xgboost',\n",
" <flaml.automl.model.XGBoostSklearnEstimator object at 0x0000028A95719BD0>),\n",
" ('catboost',\n",
" <flaml.automl.model.CatBoostEstimator object at 0x0000028A95719D50>)],\n",
" final_estimator=LGBMRegressor(), n_jobs=1, passthrough=True)\n",
"[flaml.automl.logger: 05-22 16:33:39] {1985} INFO - fit succeeded\n",
"[flaml.automl.logger: 05-22 16:33:39] {1986} INFO - Time taken to find the best model: 489.3946213722229\n",
"[flaml.automl.logger: 05-22 16:33:39] {1996} WARNING - Time taken to find the best model is 82% of the provided time budget and not all estimators' hyperparameter search converged. Consider increasing the time budget.\n"
]
}
],
"source": [
"\n",
"gc.collect()\n",
"\n",
"use_pca = False\n",
"type = 'cat'\n",
"# feature_contri = [2 if feat.startswith('act_factor') or 'buy' in feat or 'sell' in feat else 1 for feat in feature_columns]\n",
"# light_params['feature_contri'] = feature_contri\n",
"# print(f'feature_contri: {feature_contri}')\n",
"model = train_model(train_data\n",
" .dropna(subset=['label']).groupby('trade_date', group_keys=False)\n",
" .apply(lambda x: x.nsmallest(500, 'total_mv'))\n",
" .merge(industry_df, on=['cat_l2_code', 'trade_date'], how='left')\n",
" .merge(index_data, on='trade_date', how='left'), feature_columns, type=type)\n"
]
},
{
"cell_type": "code",
"execution_count": 27,
"id": "59132b85",
"metadata": {},
"outputs": [],
"source": [
"# for e in model.model.estimators:\n",
"# print(e[1].estimator)"
]
},
{
"cell_type": "code",
"execution_count": 33,
"id": "5d1522a7538db91b",
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-03T15:04:39.656944Z",
"start_time": "2025-04-03T15:04:39.298483Z"
}
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"100%|██████████| 6/6 [00:01<00:00, 3.22it/s]\n"
]
}
],
"source": [
"score_df = test_data.groupby('trade_date', group_keys=False).apply(lambda x: x.nsmallest(500, 'total_mv'))\n",
"# score_df = fill_nan_with_daily_median(score_df, ['pe_ttm'])\n",
"# score_df = score_df[score_df['pe_ttm'] > 0]\n",
"score_df = score_df.merge(industry_df, on=['cat_l2_code', 'trade_date'], how='left')\n",
"score_df = score_df.merge(index_data, on='trade_date', how='left')\n",
"# score_df = score_df.groupby('trade_date', group_keys=False).apply(lambda x: x.nsmallest(50, 'total_mv')).reset_index()\n",
"numeric_columns = score_df.select_dtypes(include=['float64', 'int64']).columns\n",
"numeric_columns = [col for col in feature_columns if col in numeric_columns]\n",
"# score_df.loc[:, numeric_columns] = scaler.transform(score_df[numeric_columns])\n",
"# score_df = cross_sectional_standardization(score_df, numeric_columns)\n",
"\n",
"# score_df['score'] = model.predict_proba(score_df[feature_columns])[:, -1]\n",
"# score_df['score'] = model.predict(score_df[feature_columns])\n",
"chunk_size = 5000 # 设置您的切块大小\n",
"predictions_list = []\n",
"num_rows = len(score_df)\n",
"\n",
"from tqdm import tqdm\n",
"for i in tqdm(range(0, num_rows, chunk_size)):\n",
" chunk_df = score_df.iloc[i : i + chunk_size].copy()\n",
" chunk_features = chunk_df[feature_columns]\n",
" chunk_predictions_np = model.predict(chunk_features)\n",
" chunk_predictions_series = pd.Series(chunk_predictions_np, index=chunk_df.index)\n",
" predictions_list.append(chunk_predictions_series)\n",
" del chunk_df, chunk_features, chunk_predictions_np, chunk_predictions_series\n",
" gc.collect()\n",
"\n",
"combined_predictions = pd.concat(predictions_list)\n",
"score_df['score'] = combined_predictions\n",
"\n",
"score_df['score_ranks'] = score_df.groupby('trade_date')['score'].rank(ascending=True)\n",
"\n",
"score_df = score_df.groupby('trade_date', group_keys=False).apply(\n",
" lambda x: x[x['score'] >= x['score'].quantile(0.90)] # 计算90%分位数作为阈值,筛选分数>=阈值的行\n",
").reset_index(drop=True) # drop=True 避免添加旧索引列\n",
"save_df = score_df.groupby('trade_date', group_keys=False).apply(lambda x: x.nlargest(2, 'score')).reset_index()\n",
"# save_df = score_df.groupby('trade_date', group_keys=False).apply(lambda x: x.nsmallest(2, 'total_mv')).reset_index(drop=True)\n",
"save_df = save_df.sort_values(['trade_date', 'score'])\n",
"save_df[['trade_date', 'score', 'ts_code']].to_csv('predictions_test.tsv', index=False)\n"
]
},
{
"cell_type": "code",
"execution_count": 29,
"id": "09b1799e",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"191\n",
"['vol', 'pct_chg', 'turnover_rate', 'volume_ratio', 'winner_rate', 'undist_profit_ps', 'ocfps', 'AR', 'BR', 'AR_BR', 'cashflow_to_ev_factor', 'book_to_price_ratio', 'turnover_rate_mean_5', 'variance_20', 'bbi_ratio_factor', 'daily_deviation', 'lg_elg_net_buy_vol', 'flow_lg_elg_intensity', 'sm_net_buy_vol', 'total_buy_vol', 'lg_elg_buy_prop', 'flow_struct_buy_change', 'lg_elg_net_buy_vol_change', 'flow_lg_elg_accel', 'chip_concentration_range', 'chip_skewness', 'floating_chip_proxy', 'cost_support_15pct_change', 'cat_winner_price_zone', 'flow_chip_consistency', 'profit_taking_vs_absorb', 'cat_is_positive', 'upside_vol', 'downside_vol', 'vol_ratio', 'return_skew', 'return_kurtosis', 'volume_change_rate', 'cat_volume_breakout', 'turnover_deviation', 'cat_turnover_spike', 'avg_volume_ratio', 'cat_volume_ratio_breakout', 'vol_spike', 'vol_std_5', 'atr_14', 'atr_6', 'obv', 'maobv_6', 'rsi_3', 'return_5', 'return_20', 'std_return_5', 'std_return_90', 'std_return_90_2', 'act_factor1', 'act_factor2', 'act_factor3', 'act_factor4', 'rank_act_factor1', 'rank_act_factor2', 'rank_act_factor3', 'cov', 'delta_cov', 'alpha_22_improved', 'alpha_003', 'alpha_007', 'alpha_013', 'vol_break', 'weight_roc5', 'smallcap_concentration', 'cost_stability', 'high_cost_break_days', 'liquidity_risk', 'turnover_std', 'mv_volatility', 'volume_growth', 'mv_growth', 'momentum_factor', 'resonance_factor', 'log_close', 'cat_vol_spike', 'up', 'down', 'obv_maobv_6', 'std_return_5_over_std_return_90', 'std_return_90_minus_std_return_90_2', 'cat_af2', 'cat_af3', 'cat_af4', 'act_factor5', 'act_factor6', 'active_buy_volume_large', 'active_buy_volume_big', 'active_buy_volume_small', 'buy_lg_vol_minus_sell_lg_vol', 'buy_elg_vol_minus_sell_elg_vol', 'ctrl_strength', 'low_cost_dev', 'asymmetry', 'lock_factor', 'cat_vol_break', 'cost_atr_adj', 'cat_golden_resonance', 'mv_turnover_ratio', 'mv_adjusted_volume', 'mv_weighted_turnover', 'nonlinear_mv_volume', 'mv_volume_ratio', 'mv_momentum', 'lg_flow_mom_corr_20_60', 'lg_flow_accel', 'profit_pressure', 'underwater_resistance', 'cost_conc_std_20', 'profit_decay_20', 'vol_amp_loss_20', 'vol_drop_profit_cnt_5', 'lg_flow_vol_interact_20', 'cost_break_confirm_cnt_5', 'atr_norm_channel_pos_14', 'turnover_diff_skew_20', 'lg_sm_flow_diverge_20', 'pullback_strong_20_20', 'vol_wgt_hist_pos_20', 'vol_adj_roc_20', 'cs_rank_net_lg_flow_val', 'cs_rank_elg_buy_ratio', 'cs_rank_rel_profit_margin', 'cs_rank_cost_breadth', 'cs_rank_dist_to_upper_cost', 'cs_rank_winner_rate', 'cs_rank_intraday_range', 'cs_rank_close_pos_in_range', 'cs_rank_pos_in_hist_range', 'cs_rank_vol_x_profit_margin', 'cs_rank_lg_flow_price_concordance', 'cs_rank_turnover_per_winner', 'cs_rank_volume_ratio', 'cs_rank_elg_buy_sell_sm_ratio', 'cs_rank_cost_dist_vol_ratio', 'cs_rank_size', 'cat_up_limit', 'industry_obv', 'industry_return_5', 'industry_return_20', 'industry__ema_5', 'industry__ema_13', 'industry__ema_20', 'industry__ema_60', 'industry_act_factor1', 'industry_act_factor2', 'industry_act_factor3', 'industry_act_factor4', 'industry_act_factor5', 'industry_act_factor6', 'industry_rank_act_factor1', 'industry_rank_act_factor2', 'industry_rank_act_factor3', 'industry_return_5_percentile', 'industry_return_20_percentile', '000852.SH_MACD', '000905.SH_MACD', '399006.SZ_MACD', '000852.SH_MACD_hist', '000905.SH_MACD_hist', '399006.SZ_MACD_hist', '000852.SH_RSI', '000905.SH_RSI', '399006.SZ_RSI', '000852.SH_Signal_line', '000905.SH_Signal_line', '399006.SZ_Signal_line', '000852.SH_amount_change_rate', '000905.SH_amount_change_rate', '399006.SZ_amount_change_rate', '000852.SH_amount_mean', '000905.SH_amount_mean', '399006.SZ_amount_mean', '000852.SH_daily_return', '000905.SH_daily_return', '399006.SZ_daily_return', '000852.SH_up_ratio_20d', '000905.SH_up_ratio_20d', '399006.SZ_up_ratio_20d', '000852.SH_volatility', '000905.SH_volatility', '399006.SZ_volatility', '000852.SH_volume_change_rate', '000905.SH_volume_change_rate', '399006.SZ_volume_change_rate']\n"
]
}
],
"source": [
"print(len(feature_columns))\n",
"print(feature_columns)"
]
},
{
"cell_type": "code",
"execution_count": 30,
"id": "7e9023cc",
"metadata": {},
"outputs": [],
"source": [
"def analyze_factors(\n",
" df: pd.DataFrame,\n",
" feature_columns: list[str],\n",
" target_column: str = 'target', # 假设目标列默认为 'target'\n",
" trade_date_col: str = 'trade_date', # 假设日期列默认为 'trade_date'\n",
" mcap_col: str = 'total_mv', # 新增: 市值列名称\n",
" mcap_bins: int = 5 # 新增: 市值分位数的数量 (例如 5 表示五分位数)\n",
") -> pd.DataFrame:\n",
" \"\"\"\n",
" 分析DataFrame中指定特征列的各种指标包括基本统计、相关性、日间IC、ICIR以及在不同市值分位数上的IC。\n",
"\n",
" Args:\n",
" df (pd.DataFrame): 包含日期、目标列、特征列和市值列的DataFrame。\n",
" 需要包含 trade_date_col, target_column, feature_columns 和 mcap_col 中的所有列。\n",
" feature_columns (list[str]): 需要分析的特征列名称列表。\n",
" target_column (str): 目标变量列的名称。\n",
" trade_date_col (str): 交易日期列的名称。\n",
" mcap_col (str): 市值列的名称。\n",
" mcap_bins (int): 市值分位数的数量 (例如 5 表示五分位数)。\n",
"\n",
" Returns:\n",
" pd.DataFrame: 包含各个因子分析指标的汇总DataFrame。\n",
" 同时打印因子在不同市值分位数上的平均IC表格。\n",
" 如果输入数据或列有问题可能返回空或包含NaN的DataFrame。\n",
" \"\"\"\n",
"\n",
" # --- 数据校验 ---\n",
" required_cols = [trade_date_col, target_column, mcap_col] + feature_columns\n",
" if not all(col in df.columns for col in required_cols):\n",
" missing = [col for col in required_cols if col not in df.columns]\n",
" print(f\"错误: 输入DataFrame缺少必需的列: {missing}\")\n",
" return pd.DataFrame() # 返回空DataFrame\n",
"\n",
" # 确保日期列是 datetime 类型\n",
" df = df.copy() # 在副本上操作\n",
" df[trade_date_col] = pd.to_datetime(df[trade_date_col], errors='coerce')\n",
" df.dropna(subset=[trade_date_col], inplace=True) # 移除日期转换失败的行\n",
"\n",
" # 过滤掉那些在 feature_columns, target_column, mcap_col 上有 NaN 的行,以确保后续计算是在完整数据上\n",
" # 直接在 df 副本上进行清洗\n",
" initial_rows_before_clean = len(df)\n",
" df.dropna(subset=feature_columns + [target_column, mcap_col], inplace=True)\n",
" rows_dropped_clean = initial_rows_before_clean - len(df)\n",
" if rows_dropped_clean > 0:\n",
" print(f\"警告: 移除了 {rows_dropped_clean} 行,因为其特征、目标或市值列存在空值。\")\n",
"\n",
" if df.empty:\n",
" print(\"错误: 清理缺失值后数据为空,无法进行因子分析。\")\n",
" return pd.DataFrame() # 返回空DataFrame\n",
"\n",
"\n",
" print(f\"开始分析 {len(feature_columns)} 个因子指标...\")\n",
"\n",
" # --- 1. 基本因子统计量 ---\n",
" basic_stats = df[feature_columns].describe().T\n",
"\n",
" print(\"\\n--- 基本因子统计量 ---\")\n",
" print(basic_stats)\n",
"\n",
" # --- 2. 因子与目标变量的整体相关性 ---\n",
" overall_correlation = {}\n",
" for feature in feature_columns:\n",
" # 在清理后的 df 上计算相关性\n",
" if df[[feature, target_column]].dropna().shape[0] > 1: # 确保至少有两个有效数据点\n",
" overall_correlation[feature] = {\n",
" 'Pearson_Correlation_with_Target': df[feature].corr(df[target_column], method='pearson'),\n",
" 'Spearman_Correlation_with_Target': df[feature].corr(df[target_column], method='spearman')\n",
" }\n",
" else:\n",
" overall_correlation[feature] = {\n",
" 'Pearson_Correlation_with_Target': np.nan,\n",
" 'Spearman_Correlation_with_Target': np.nan\n",
" }\n",
" overall_corr_df = pd.DataFrame.from_dict(overall_correlation, orient='index')\n",
"\n",
" print(\"\\n--- 因子与目标变量的整体相关性 ---\")\n",
" print(overall_corr_df)\n",
"\n",
" # --- 3. 因子之间的相关性矩阵 ---\n",
" # 在清理后的 df 上计算相关性\n",
" factor_correlation_matrix = df[feature_columns].corr(method='spearman') # 改回 Spearman\n",
"\n",
" print(\"\\n--- 因子之间的相关性矩阵 (Spearman) ---\") # 修正打印信息\n",
" print(factor_correlation_matrix)\n",
"\n",
" # --- 4. 日间 IC 和 ICIR ---\n",
" print(\"\\n--- 计算日间 IC (Spearman 相关性) 和 ICIR ---\")\n",
"\n",
" # 直接在清理后的 df 上计算每日 IC\n",
" if df.empty: # 理论上上面已经检查过,这里再检查一次更安全\n",
" daily_ic_series = pd.Series(dtype=float) # 空 Series\n",
" ic_stats = pd.DataFrame({\n",
" 'Mean_IC (Spearman)': np.nan, 'Std_Dev_IC': np.nan, 'ICIR': np.nan\n",
" }, index=feature_columns)\n",
" else:\n",
" daily_ic_series = df.groupby(trade_date_col).apply(\n",
" lambda day_group: {\n",
" feature: day_group[feature].corr(day_group[target_column], method='spearman')\n",
" for feature in feature_columns if day_group.shape[0] > 1 # 确保每日数据点多于1才能计算相关性\n",
" }\n",
" ).apply(pd.Series) # 将字典结果转换为 DataFrame\n",
"\n",
" # 计算 IC 的统计量\n",
" if not daily_ic_series.empty:\n",
" ic_mean = daily_ic_series.mean()\n",
" ic_std = daily_ic_series.std()\n",
" # 避免除以零\n",
" ic_ir = ic_mean / ic_std.replace(0, np.nan) # 使用 replace 0 为 NaN\n",
"\n",
" ic_stats = pd.DataFrame({\n",
" 'Mean_IC (Spearman)': ic_mean,\n",
" 'Std_Dev_IC': ic_std,\n",
" 'ICIR': ic_ir\n",
" })\n",
" print(\"\\n--- 日间 IC 和 ICIR (Spearman) ---\")\n",
" print(ic_stats)\n",
" else:\n",
" ic_stats = pd.DataFrame({\n",
" 'Mean_IC (Spearman)': np.nan, 'Std_Dev_IC': np.nan, 'ICIR': np.nan\n",
" }, index=feature_columns)\n",
"\n",
"\n",
" # --- 5. 因子在不同市值分位数上的平均 IC ---\n",
" print(f\"\\n--- 计算因子在 {mcap_bins} 个市值分位数上的平均 IC (Spearman) ---\")\n",
"\n",
" # 在清理后的 df 上计算每日市值分位数,直接添加到 df 中\n",
" # 使用 transform() 和 qcut() 在每个日期分组内计算分位数\n",
" # labels=False 返回整数 0 to mcap_bins-1\n",
" # duplicates='drop' 处理在某些日期股票数量少于 bins 导致分位数边缘重复的情况,会返回 NaN\n",
" # 添加一个临时列来存储分位数\n",
" mcap_bin_col_name = f'_mcap_bin_{mcap_bins}'\n",
" df[mcap_bin_col_name] = df.groupby(trade_date_col)[mcap_col].transform(\n",
" lambda x: pd.qcut(x, q=mcap_bins, labels=False, duplicates='drop') if len(x) >= mcap_bins else np.nan # 确保股票数量足够进行分位数划分\n",
" )\n",
"\n",
" # 过滤掉无法划分分位数 (NaN) 的行,进行分位数 IC 计算\n",
" # 创建一个临时 DataFrame df_binned_analysis\n",
" df_binned_analysis = df.dropna(subset=[mcap_bin_col_name]).copy()\n",
"\n",
" if df_binned_analysis.empty:\n",
" print(\"错误: 划分市值分位数后数据为空,无法计算分位数上的 IC。\")\n",
" avg_ic_by_bin = pd.DataFrame(index=range(mcap_bins), columns=feature_columns) # Placeholder\n",
" else:\n",
" # 按日期和市值分位数分组,计算每个分组内的因子与目标变量的截面相关性 (分位数IC)\n",
" binned_ic_by_day = df_binned_analysis.groupby([trade_date_col, mcap_bin_col_name]).apply(\n",
" lambda group: {\n",
" feature: group[feature].corr(group[target_column], method='spearman')\n",
" for feature in feature_columns if group.shape[0] > 1 # 确保分位数组内数据点多于1\n",
" }\n",
" ).apply(pd.Series) # 将嵌套结果转为 DataFrame\n",
"\n",
" # 对每个分位数组的每日 IC 求平均\n",
" # unstack(level=mcap_bin_col_name) 将 mcap_bin 作为列\n",
" # mean(axis=0) 对日期索引求平均\n",
" avg_ic_by_bin = binned_ic_by_day.unstack(level=mcap_bin_col_name).mean(axis=0).unstack()\n",
"\n",
" # 重命名索引和列,使表格更清晰\n",
" if not avg_ic_by_bin.empty:\n",
" # Index name will be the original column name used for grouping ('_mcap_bin_X')\n",
" # Rename the index name explicitly\n",
" avg_ic_by_bin.index.name = 'MarketCap_Bin'\n",
" avg_ic_by_bin.columns.name = 'Feature'\n",
" # 可以根据需要对分位数 bin 索引进行排序 (虽然 pd.qcut labels=False usually sorts)\n",
" avg_ic_by_bin = avg_ic_by_bin.sort_index()\n",
"\n",
" print(avg_ic_by_bin)\n",
"\n",
"\n",
" # --- 6. 汇总所有指标 ---\n",
" # 将基本统计、整体相关性、IC/ICIR 合并到一个 DataFrame\n",
" # 注意:合并时需要根据索引进行对齐 (因子名称)\n",
" summary_df = basic_stats\n",
" summary_df = summary_df.merge(overall_corr_df, left_index=True, right_index=True, how='left')\n",
" summary_df = summary_df.merge(ic_stats, left_index=True, right_index=True, how='left')\n",
"\n",
" # print(\"\\n--- 因子分析汇总报告 ---\")\n",
" # print(summary_df)\n",
"\n",
" # --- 清理临时列 'mcap_bin' ---\n",
" # 修正:在函数结束时从我们一直在操作的 df 副本中删除临时列\n",
" if mcap_bin_col_name in df.columns:\n",
" df.drop(columns=[mcap_bin_col_name], inplace=True)\n",
"\n",
"\n",
" return summary_df # 主要返回汇总报告分位数IC单独打印\n",
"\n",
"# # 运行分析函数\n",
"# factor_analysis_report = analyze_factors(test_data.copy(), feature_columns, 'future_return')\n",
"\n",
"# print(\"\\n--- 最终汇总报告 DataFrame ---\")\n",
"# print(factor_analysis_report)"
]
},
{
"cell_type": "code",
"execution_count": 31,
"id": "a0000d75",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"开始分析 'score' 在 'circ_mv' 和 'future_return' 下的表现...\n",
"准备数据,处理 NaN 值...\n",
"原始数据 28550 行,移除 NaN 后剩余 28175 行用于分析。\n",
"对 'circ_mv' 和 'future_return' 进行 100 分位数分箱...\n",
"按二维分箱分组计算 Spearman Rank IC...\n",
"整理结果用于绘图...\n",
"circ_mv_bin 0 1 2 3 4 5 6 7 8 9 ... 90 91 92 \\\n",
"future_return_bin ... \n",
"0 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN \n",
"1 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN \n",
"2 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN \n",
"3 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN \n",
"4 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN \n",
"... .. .. .. .. .. .. .. .. .. .. ... .. .. .. \n",
"94 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN \n",
"95 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN \n",
"96 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN \n",
"97 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN \n",
"98 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN \n",
"\n",
"circ_mv_bin 93 94 95 96 97 98 99 \n",
"future_return_bin \n",
"0 NaN NaN NaN NaN NaN NaN NaN \n",
"1 NaN NaN NaN NaN NaN NaN NaN \n",
"2 NaN NaN NaN NaN NaN NaN NaN \n",
"3 NaN NaN NaN NaN NaN NaN NaN \n",
"4 NaN NaN NaN NaN NaN NaN NaN \n",
"... .. .. .. .. .. .. .. \n",
"94 NaN NaN NaN NaN NaN NaN NaN \n",
"95 NaN NaN NaN NaN NaN NaN NaN \n",
"96 NaN NaN NaN NaN NaN NaN NaN \n",
"97 NaN NaN NaN NaN NaN NaN NaN \n",
"98 NaN NaN NaN NaN NaN NaN NaN \n",
"\n",
"[99 rows x 100 columns]\n",
"生成热力图...\n",
"分析完成。\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABdEAAASgCAYAAAAXXAHaAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjEsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvc2/+5QAAAAlwSFlzAAAPYQAAD2EBqD+naQABAABJREFUeJzs3Xd0VFX79vErvdCRokgvQiQiSBUFpRcFFAVUDIKidCk2uoIFUUAERBFEFORHk6YiIEVUQLqQgVAEVJoUQwmTkITJef/gzTyEZEIyTObMZL6ftbKeH2euPfs+ueO8693Z2cfPMAxDAAAAAAAAAAAgHX+zCwAAAAAAAAAAwFOxiA4AAAAAAAAAgAMsogMAAAAAAAAA4ACL6AAAAAAAAAAAOMAiOgAAAAAAAAAADrCIDgAAAAAAAACAAyyiAwAAAAAAAADgAIvoAAAAAAAAAAA4wCI6AADwKu3bt1eLFi2UkpJidikOXbx4UStWrJBhGPZre/bsUffu3fXzzz9n+/3Wrl2rwYMHa8eOHVkeYxiG9u3bl+25AAAAAABpBZpdAAAAQHb8999/Sk5Olr9/1vcCJCYmKjk5WeHh4WnGXb16VVeuXFHevHk1cOBAWSyWm77XokWLVKBAgUwzCxYs0Lhx47Rv3z69+uqrkqQCBQpo06ZNunz5sh5++OEs1y5JR48e1ZIlS9S6desMX09OTtbZs2f1999/68iRI9q9e7e2bNmif//9Vx9//LFatmyZrfmy4t9//9X8+fP18ssvy8/Pz+XvD5hh5syZeuCBB1S5cmWzSwEAAIAHYREdAAB4lGPHjun7779XgQIFFBQUpICAgDSvJyUlKSUlRYsXL043NiUlRYmJiSpYsKAeeeQR+/Xly5dr+PDhGc5XpEgRbdy4UZcuXdL58+fVuXPnDHObNm3Snj17FBQUlGn98fHx+uqrrxQaGqqoqCj79TJlyujxxx/XokWLtGTJEj3++OOZvs/1wsLC0vxvqtjYWLVq1UoXL16073r39/dX8eLFVaZMGTVo0EDHjh2TzWZL9328FXFxcerWrZsCAgL0wgsvKG/evC57b8AsV69e1fr16zVz5kwtXLhQd9xxh9klAQAAwEOwiA4AADzKyZMnNXHixJvmhgwZ4vC1e++9N80ieuXKldWnTx8FBQXpp59+0p9//qmePXsqKSlJwcHBkqTAwEDlz59fAwcOzPA94+PjtWfPHoWEhGRa18yZM3X27Fm9+OKLKl68eJrXBgwYoJUrV+q9995TrVq1VKpUqZvepyT77vkbd3wHBATowoULqlOnjnr37q0777xTt99+u/2ecsprr72mpKQkzZ07176AfuPO3cDAQBUpUkT169dXjx49VLZs2RytSZK2bNmiLl26qG/fvurXr1+OzdO4cWOdOHFCa9euVcmSJR3mli1bpq+++kqHDx9W3rx51aRJEw0YMECFCxfOsdrcLSoqSlu3btWBAwdu+b0qV66se++9VwsWLHBBZRk7fvy4mjRposcff1zvv/9+mtcCAwP16aef2n+G5s2bd9NfmgEAAMA3sIgOAAA8SrVq1fTLL7+oQIECCg4OTndsS7NmzWSz2bRu3bp0Y1NSUhQfHy+r1ZruPatVqyZJ+vvvv/XPP/+od+/eaTI37tReuHChgoKC1KhRozTHt2S2o/vAgQP67LPPVLhwYfXo0SPd60WLFtXrr7+ukSNHqk+fPvrmm2+UL18+h++VlJSkoKAgnTt3TtK1XzAcPHhQiYmJKlOmjH2x/M4779T999/vsC5XWrp0qX755RctWLAg3S8JJKlnz56SpPPnz2vnzp1avHixVq1apblz56pKlSpuqdETTJgwQdOmTVOJEiXUsWNHnThxQgsWLNDWrVu1cOFCh32HufLmzatPPvlEjzzyiGbMmKFevXqZXRIAAAA8AIvoAADAo4SFhSkuLk4LFy5UaGhoukVrq9Uqm82W7jgXm82mpKQkhYWFqX379rdcx5w5c7R//35t3rw5S/nk5GS98cYbSk5O1oABAxwuknbq1EkbN27UqlWr1L17d3366acZ7kweMmSI9u7dm+baa6+9Zv+/p0+frnr16mXjjm5dUlKSxo8fr65duyoyMjLDzPU7+VNSUjR8+HB9++23mjhxoj777DN3lWqqbdu2adq0aSpXrpwWLFig/PnzS7rWs3HjxumTTz7R4MGDTa4Sjtxxxx165ZVXNG7cOD311FMqVKiQ2SUBAADAZCyiAwAAj3Py5Em98847mWYcHedSqVIllyyi22w2FS5cOMtHb0ycOFExMTF68MEH1bFjx0yzY8eO1fnz57V161Z16NBBEyZM0L333psmM3ToUCUkJCgkJETjx4/XH3/8obfeekuVKlVSQkKC7r77brcfNbFmzRqdP39eL774Ypby/v7+6tWrl7799lvt2rUrh6vzHNOnT5ck9e/f376ALknPPPOMJk+erB9++IFFdA/XqVMnTZkyRYsXL9YLL7xgdjkAAAAwGYvoAADA40RGRuqPP/5QcHBwup3ojo5zMQxDycnJSk5OdkkNNptNRYsWzVJ2wYIFmjFjhooUKaKxY8emO7v8RmFhYZo+fboGDBig9evX6+mnn9Zzzz2nXr162Rdda9WqJUm6fPmyYmJiJF37BUHq9VT+/v46ePCgPv/88wznunr1qq5cuaJBgwZl6V4ys27dOj344IPZ2pl72223SZKuXLlyy/N7g8TERG3atEn+/v5q0KBBmtfy5Mmj4sWL659//tHFixfTHBMEzxIYGKjWrVtr7dq1LKIDAACARXQAADzBv//+q8mTJ2vz5s06d+6cihQpooYNG6pfv372RchUNptNs2fP1qJFi/T333/rtttuU7Vq1dS/f39VqFAh3XsvX75cX331lQ4dOqSwsDDdf//9evnll1W+fPk0uRsfyrhixQrNmzdPBw4c0FdffZXuPOt9+/Zp6tSp2rZtm6xWq8qVK6cuXbqoQ4cOTn8fDh8+rODgYAUGBjpciLbZbLLZbPr3338dvn769GklJSXZa963b5/mzZun4OBgWSwWJSYm6t1331VSUpLq1q2r1q1bp3uflJSULC2iL1iwQG+++aaCgoLUoUMHjRo1SiEhIQoICLjpYnqVKlVUsWJFTZ8+XTNnzlRISIgGDBiQJrN06VIlJiba//3PP/+oVKlS9vc2DEOHDx/WV199Zc9cvHhRNptN+fPnV2JiopKSklyyiG6xWG66y/5G+/btk6R0D1G1Wq2aOXOmfvjhB506dUoFCxZUjRo1NHDgQJUpU8aeW7x4sYYMGaIxY8aoWrVq+vDDD7Vjxw4FBgbqgQce0LBhwzL9a4GUlBQNGTJES5cuVa9evdJ9f13t8OHDSk5OVokSJewPXb3e8OHDdeHChWw//NVms+mhhx6Sn5+fNmzYkO5ZAY0bN1Z8fLx+/fVX+18oXLlyRTNnztT333+vkydPKk+ePLrnnnvUv39/RUREOH+T2bR//35NnTpVu3btUlxcnEqUKKF27dqpW7duGX4fNm7cqHHjxunPP//Ubbfdpscee0y9e/dOl01OTtaXX36p5cuX6++//1Z4eLgefPBBDRo0SHfeeect1127dm0tXrxYhmHc9L9lAAAA5G4sogMAYLK4uDh17txZJ06cUOPGjVW2bFkdP35c8+fP1549e7Ro0SL7gpnNZlOfPn20fv16lS1bVp06dVJsbKxWr16tn3/+WbNnz05zLMh7772nr776SsWKFdPjjz+u2NhYrVq1Shs2bNCMGTNUs2bNDGsaPny4Fi5cqJIlS6pMmTIKDQ1N8/qGDRvUt29fhYeHq1mzZgoNDdXPP/+s4cOH699//1W/fv2c+l506dLF/hDNm3nooYdumjlw4ICka8fDLF68WEFBQUpOTtbVq1e1ePFiJSUlKU+ePBkuosfGxqp69eqZvn98fLzmzZsnPz8/ffDBB7p48aI+/fTTLNUvSQ8//LCmTZum++67TwsWLNDLL7+c5nXDMDR79mz7v3/88Uf93//9n0aNGqUOHTrIZrPJMAy1bdtWb7/9tj3XsWNHnTt3LsOHr96K06dPZ/gw0YwkJSVp9+7dGj58uL2mVFevXlXPnj21detW1a5dW40aNdK5c+f0448/ateuXfruu+/SHIMiSYcOHdK7776rChUq6IknntBvv/2m77//Xlar1eFZ64ZhaOTIkW5bQJekU6dOSVK6X36lysrPbUYCAgLUpk0bzZw5U9u3b1edOnXsr/3xxx86ceKEnn322TRH/Lz66qv66aefdO+99+rpp59WXFycVq5cqS5dumjp0qUuWWi+mX379qlz585KSUlRy5YtVahQIe3evVsTJkzQmTNnNGLEiDT5Y8eOqUePHrrvvvvUoUMHbdq0SZ9++qn279+vTz/91L6YnZycrBdffFGbN29W7dq11aBBAx0/flw//vijtmzZoiVLlmT5L0kcKV68uOLj43XhwgXORQcAAPBxLKIDAGCyLVu26Pjx43ryySf17rvv2q9/8MEHWrlypY4fP67SpUtLuvawy/Xr1+uhhx7SlClT7Dszv//+e73yyiuaMmWK/TzmDRs26KuvvlLlypU1e/Zs+9ERv/zyi1566SW9+uqrWrVqVbrdncuWLVNcXJxmzJiR7jgKSUpISNDgwYOVJ08eLVmyRHfccYckacCAAXr00Uf1+eefKyoqSgULFsz29+KVV16Rv7+/8ubNq/DwcPv1+Ph4jRgxQrGxsXr11VdVtmxZJSUlZbiwZbPZlJiYqPPnz9uvNW3aVBaLRZI0ePBgrVmzRtu3b083LtWlS5d06dKldLunpWs7m1N/qREeHq65c+dq+/btevDBB5WYmKg2bdooNDRU27ZtU9euXdWtW7d0519HR0frySeftC9IN27cWI0bN04313fffae//vpLhQoV0vnz51WvXj0tWbJE48ePV4sWLXT16lVJ146HcYfExMSbHkFSuXLlNP/28/PTs88+q86dO9uv/f7779q6dav9lwipqlevrtGjR2vDhg1q06ZNmvf58ssv9eyzz2rYsGHy8/NTfHy8mjdvrg0bNujKlSvpftEjSaNHj9bChQvdtoAuXftZlaSQkBCXv3e7du3su/evX0T/4Ycf7K+nunTpkn766SeVLVtW8+bNs//MNmzYUO+//7527NjhlkX0L774QvHx8Zo4caJatWplv/7EE09o4cKFGjZsWJpd9bGxsYqKirL/8iU+Pl7PPvus1q9fr7Vr16pp06aSpK+//lqbN2/Wiy++qFdffdU+ftasWRozZoxmzpypN95445ZqT/1Zv/4vQQAAAOCbWEQHAMBkqbsljxw5ori4OOXLl0+S9Prrr+v1119Pk126dKmkawvB1y9+N2vWTBMmTEhzfMS3334rSRo0aFCahc+GDRuqWbNmWr16tTZt2qSHH344zRzHjh3TrFmzdP/992dY78aNGxUbG6uIiAjNmzcvzWv58uXTqVOntHPnzgwXhW8moweCJiQkqE+fPoqNjVW/fv3UuXNnNWrUSK1bt9abb75pz12+fFnvvvuuXnvttSw/DPR615+lnnoG+Y1H3kjXdlhfv2AbGhqqBx98UNK1hdPUxdPjx49LksqWLZvuPc6ePStJme7qTkpK0sSJExUUFKSOHTtq2rRpKlSokJ577jlNnTpVU6dO1ZNPPilJKlKkSHZu1WmhoaH677//Ms307NlT0rXz0w8ePKixY8emWdyVpAcffND+VwKp/v77b+3Zs0fStSNrblSiRAm9/vrr9p3I4eHhqlmzplauXKn//vsv3YLwe++9p7lz56pKlSpuW0CXZK/v+l/KuEqVKlVUuXJlrV69WiNHjlRAQIBSUlK0cuVKlS9fXtWqVbNn8+TJo/DwcJ0/f15///23ypUrJ0lq3ry5mjdv7vLaHBk/frzGjx9v/3dSUpJ27NihixcvKjExUadPn7b/Ik6SgoOD0/xFRnh4uLp166ZXX31V69atsy+iL1++XNK1I2s++ugjez4uLk7StV9O3qrY2FhJ7vslFQAAADwXi+gAAJjs3nvvVdu2bbV8+XI98MADqly5siIiIlS3bl01a9YszWL5kSNHFBISYl8QSxUSEqJHHnkkzbU///xTkjI8+7hq1apavXq1/vzzz3SL6I0bN3a4gC5JR48elXRtoTl1sflGp0+fdnzD2XDs2DH16dNHBw4cUP369dW3b19J184qXrJkiXr27KnixYvLZrPplVde0c8//6z9+/dr7ty52V74uueee+w7/n/++WdJ13a1fvXVV5o/f75q1aolm82mlJSULL1fdHS0pPQ7s6WsLaJPnjxZJ06cUOfOndMsMnbp0kV//fWXOnToYO/F7bffnqWabtWdd955094OHDhQklSzZk29+OKLWrJkSbpFdOna92D+/Pnatm2b9u/frwsXLth3JGf0PW7RokW6v5pI7bFhGGmuL1y4UKdPn1aFChW0f/9+/fzzz+l+znNK6i/BUhdzb9SpUyft27dP69atc+q4kccee0xjx47V5s2b9eCDD2r79u06c+ZMmp3+0rXjX9544w2NHj1arVu3Vrly5RQREaEaNWqodevWTv2iyVlbtmzRihUrtGvXLh05ckTJyclpjqi6XokSJdId5VOxYkVJ//vFlCT99ddfkpTmuKPrueIz6PTp08qXLx8PgAUAAID8bx4BAAA57cMPP9SiRYvUt29flSlTRr/99psGDRqktm3b6uLFi1l6jytXrshqtaZbUMzuA/HuvvvuTF9Pff+hQ4fqwIEDGX49/fTT2ZozI99++62eeOIJHTp0SJLSHA/Tp08fXblyRSNHjlRiYqL69++vn3/+Wc2aNdPXX3/t1M7RgQMH6q233pLNZtOqVatUrVo1BQQE6MSJE5KuLeKOHDkyzTEzjqSkpOi3335TWFiYIiMj072euoherFixDMdv2LBB06dPV8GCBdOdL1+oUCF99NFHqlChgv2hnamLjDntnnvu0aZNm7KUbdiwoapWrarNmzdr165daV6Ljo5WixYt9Pnnn6tQoUJ68cUXNX36dIdnm0sZ7+h35PTp03rppZe0YMECFS1aVO+8847bjuRI/euFEydOZLgb/ezZs0pOTk63UJxVbdq0UUBAgP0Ilx9++EF+fn5q27ZtuuxTTz2ln376ScOHD1etWrV09OhRvf3222ratKl2797t1PzZNX78eHXp0kUbN25U7dq1NWLECC1dujTDX6xI1xb/b5S64H79Z5thGMqfP7/Dz6CNGzfecu0bN27UPffcc8vvAwAAAO/HIjoAACY7duyY/vjjD91111166aWXNG7cOK1bt07dunXT0aNH9c0339iz5cuXV2Jion0H8vUeeeQR3Xfffbp06ZKk/y2sZrRb/FYWX1MXCQ8fPpzh+86ZM8e+C9sZu3btUlRUlIYOHaqQkBDNmjUrXSYiIkLPPvusfv75Zz3yyCP66aef1LVrV02ePNm+E/hmDh8+rFmzZqlXr15prn/33Xc6ceKE/WiZ689rzqqffvpJJ06cUJMmTdI86DHVmTNnJDneiV6oUCHlz59fr732WqYPNNyyZYuCgoLctojepEkTbdmyJcu7fF966SVJ0pQpU9Jc//jjj2W1WvXFF19o4sSJ6t69uxo2bJjuF0DXy+j76Ei7du30yiuvKG/evBowYICOHTuW5uz1nFSqVCmVLFlSCQkJ2rlzZ5rXYmNjderUKZUpU8bpM9OLFi2q+++/X2vWrFFCQoJWr16t2rVrq0SJEunm+uOPPxQaGqrOnTtr9OjRWrx4saZOnSqr1ZrmCJSccvbsWU2fPl3lypXTihUrNGLECHXq1EkRERH2s+NvdPz4cVmt1jTXjhw5Iklp/iKjXLlyunTpkv2/pevNmzdPc+fOvaXak5KStHr1avvxMQAAAPBtLKIDAGCyBQsWqFOnTlq3bl2a6xUqVJAknTt3zn4tdffm+++/r6SkJPv1rVu36sSJE6pSpYr96IEnnnhCkvTRRx/ZF9ala7srV69erRIlSqh+/frZrveBBx5Q4cKF9f333+vgwYP26zabTaNHj9bbb7+tCxcuZPt9JWnatGl66qmntHXrVrVs2VLLli1T3bp10+VWrFihdu3aqWrVqjp27Jjq1KmjwYMHO9x1b7VaFR0drfnz52vv3r2Ki4tT69atNWbMGPuucEm6cOGCJk2apPLly9u/f9kVFxenMWPGyM/PT88991yGmdSeOlpEr1atmr7++mv7mecZST17vnr16umOOckpDz/8sEqWLKmPP/44S/nmzZurfPny+u233+znnUv/+yXC9Yuix44d09ixY11S5/UPhG3fvr2qVq2q6dOn6++//3bJ+99Mly5dJF3bhX39DvgpU6YoJSUlzQM2nfHYY4/p0qVLGjNmjGJjY/XYY4+ly+zdu1edOnVK9wuMSpUqSUr7uZJTzp49K8MwVLRo0TQ/o8uWLdNPP/2U4ZjExER9+umnaf6d+ou064/kSX3w7KRJk9Ls+N+wYYPefPNNLVu27JZq//zzzxUQEOBwxzwAAAB8C2eiAwBgsvbt22vOnDkaPHiwVq1apRIlSujcuXNatWqV/P391aJFC3v22Wef1caNG7Vhwwa1adNGDRo00Pnz57Vq1SoFBwdr5MiR9uxDDz2k5557Tl999ZUeffRRNW7cWLGxsVqzZo3CwsI0btw4pxZfw8LC9N5776lfv3564okn1LRpUxUrVky///679u/frxYtWqhBgwZOfS+6deumgwcP6oknnshwgf/YsWN69913tX79evXu3VvTpk1T9+7dtXXrVj311FN65pln1KhRo3RHZURFRWnv3r2Srj2osHnz5nrooYf0wAMP2Bdyk5OT9dprr+nkyZP6+uuvM/zeLF++XDt27NBTTz2V4VnziYmJ6tOnj06dOqVnnnkmzYMer3fy5EmFhISkOaLmRlWqVLH/3xnt0J4+fbpSUlLUqFEjh+/hagEBARo8eLB69+6t1q1b2x+o6oi/v79efPFFDRkyRFOnTrUf19KgQQMdOHBA3bt318MPP6yTJ0/q559/tj+w9fpf+twqf39/DR06VJ07d9bbb7+tGTNmuOy9HYmKitL27du1evVqtW3bVg8//LAsFou2b9+uihUrqnv37rf0/k2bNlWePHk0f/58hYaGpvmMSFW3bl3dddddmjt3rv766y9VqVJFV65csS9e3+pCflZUrFhRd9xxh7Zu3ao+ffqoZMmS2rFjh6Kjo1WgQAFdvHgx3dnxBQoU0MyZM7V3716VL19emzdv1uHDh1WvXr0099mlSxdt2LBBCxcu1O7du1W3bl1dunRJK1euVHh4uIYOHep03QcPHtRnn32mIUOGpHlYMwAAAHwXO9EBADBZuXLl9O233+rRRx/V3r17NWfOHP3666+qXbu2Zs6cmWYndmBgoD799FO98cYbCgwM1Lx58/Tbb7+pUaNGWrhwoWrWrJnmvYcOHaoPPvhARYsW1eLFi7VlyxY1b95c3377bbpsdjRq1Ejz5s1TgwYNtHHjRs2fP1/+/v4aPXr0LR0TERwcrPHjx6dZQE/dybtt2za1bt1a69evV7NmzdSyZUsVLVpUc+fO1TPPPKPo6Gi9/vrrqlOnjlq3bq2+ffvaF+hatmypevXq6cMPP9SmTZs0efJkPfnkk/YF9H///Vfdu3fXL7/8otdee0116tSxz3/9Ltd9+/Zp3rx59oe2Xu/vv//W008/rS1btqh+/foZLuIZhqGtW7fqwIEDaXZL30xycrKk/z1wc8eOHZo3b57Cw8Ptx85cz2azZXo0yq1o0qSJOnbsqEGDBmn//v03zbdt21Z33nmn1q9fbz9GaMCAAerVq5eSkpI0Z84c7d27V88++6xmz54tf39/rVu3zqVnmNeqVUutWrXSr7/+qtWrV7vsfR3x9/fXxIkTNXz4cAUHB+ubb77R33//rWeffVZz58695YXZsLAw+4Jy06ZNM3y/4OBgzZkzR3379rU/xHX58uUqXry43n77bfXu3fuWasiK4OBgzZw5U02bNtX27ds1b948BQQE6KOPPrL/lcaKFSvSjClbtqw+/vhje82XL19W9+7d7TvDr3/vL774QoMGDZLNZtP8+fP1+++/q1mzZlqyZInuvfdep2o+efKkevbsqXr16umZZ55x/uYBAACQq/gZOfX/wwIAAHCBNWvWqE+fPpKku+66SyNHjlTt2rXT5Q4fPqxvvvlG3333nS5duqTHH39c77//vqRri88ZnW0eGxur2bNna9asWUpOTtbIkSPVsWNH++vdunXT5s2bNWLECIWEhOjDDz9UUlKSfvnlF/vZ66dOndJXX32lb775RklJSWrRokWGu/yjoqK0Y8cO+6L866+/rhdeeCFL34MZM2boww8/1MyZM3XnnXfq6aefVmxsrHr37q3+/funy7dp00bnz5/Xb7/9lqX3z66kpCR1795df/75p1auXOn0QzIBT3L16lW1bdtWAQEBmjt3bpafrwAAAIDcj+NcAACAR2vYsKHuvfde1a5dW/3793d4BE2FChU0cuRIDR06VH/88Ueao1IcPRz0559/1owZM1S5cmW98847aY5QkaTOnTvr0KFDGj16tKRrfzUwcODANItrKSkp+uWXXxQYGKg33nhDzz77bIZzde7cWUePHlVkZKTatm2r1q1bZ/l7kHr+fWJiosqUKaPRo0fr888/tz+4M6N8QkJClt8/u4KDg/XZZ59p06ZNLKAj1wgMDNSwYcNUpUoVFtABAACQBjvRAQCAx0tKSsqxh2cePXpUZcuWdfhQ0qw4fvy4goODVaxYMRdW9j+xsbE6d+6cSpQoYT+6wzCMW6oZAAAAAJA1LKIDAAAAAAAAAOAADxYFAAAAAAAAAMABFtEBAAAAAAAAAHCARXQAAAAA8DKJiYk3zXByJwAAgGuwiA4AyFV+/PFHffvttw5fX7p0qb7//vssvZfValVSUpJSUlKyPP/Vq1dltVp14cKFLI+B50pOTtaVK1fMLgNALvD999/r4sWL9n9fuXJFo0eP1rBhw7L9XhcuXNADDzygbt266dixYxlmbDabHnnkEfXs2VP//fef03UDAABACjS7AAAAXOnTTz/VsWPH9MQTT2T4+pgxY1SgQAE9+uijN32v4cOHa8WKFU7VUaRIEW3cuNGpsTBHSkqKzp49qyNHjujAgQPavXu3fvnlF/Xv319dunQxuzwAXuz06dMaPHiwbrvtNq1Zs0ZBQUEKDQ3VwYMHtXPnTj3//POqUKFClt9vypQpiouLk81mU1JSkg4fPizp2i/+goODVb58ef3www86fPiw7r33Xt122232sYZhKCEhQaGhofL3Z08VAABAVrCIDgDwaufPn9dff/2lkJAQhYSEKCAgQEFBQfYFhRsFBgbaXzcMQ4mJiSpYsKDuvPPOdNk6deqoUKFCCgwMVEBAQJrXfvrpJx07dkwdO3ZU3rx57devXr2qpKQkhYaGuvZGkSPOnj2rXr16KTY2VmfOnFFycrL9tdDQUN155506dOiQiRUCyA2mT5+u5ORkdevWTUFBQfbrAwYMUOfOnTV69Gh99dVXWXqv7du3a+7cuZKkLVu2qHXr1mleb9GihcaNG6eJEydKuvYXWMuWLbO/brPZJElr165VyZIlb+W2AAAAfIafwUF5AAAvtmbNGg0YMMC+gG61WmWz2VSgQIEM83FxcfLz81PevHllGIaSkpLUrVs39e/fP1vz9ujRQz///LNHLkJERUVp69atOnDggNmleIWXX35ZNptNd9xxh0JCQjRjxgy9/vrrev755+Xn52fP7dy5U3v27MnwPQICAhQVFXXLtSQnJ+vzzz/X0qVLderUKeXJk0f16tXTxx9/fMvvDdzo8OHDeuqppzRkyBC1b98+w0xcXJymTp2qVatW6dy5c7rnnns0ZMgQRUZGpssmJSXpiy++sP/8VqhQQa+++qoeeOABl9U8cuRI/fbbb1q3bp3DzKZNmzRlyhTt379f4eHheuKJJ9SvXz8FBqbfP2SxWDRx4kTt2bNHfn5+atWqlV577TXlyZPHZTX/9ddfevTRR1W0aFGtWrVKwcHBaV7v37+/Vq5cqSFDhqhr166Zvtfx48fVoUMHXbp0SdOnT9fOnTs1efJkjR8/XhEREUpKSlJ4eLgWLlyo6dOnq0ePHqpRo4ZmzpypvXv36oMPPlBycrISExPVrFkzl94nAABAbsZOdACAV2vatKksFov931FRUTpy5IjDo1QeffRRhYeHa8GCBe4qER5u0qRJ9v/78OHDmjFjhsLDw9MsoEvXFuYmT56c4XuEh4e7ZBF94sSJmjFjhu6//341b95c58+f1x9//HHL7wvcKDY2Vj179tSlS5ccZqxWq7p166bo6GhVq1ZNLVq00C+//KKoqCgtWrQozfEjNptNL7/8stavX68KFSqoc+fO2rFjh1588UXNnDlT9erVu+WaZ82apfnz52f4l0Opvv/+e7322msKDQ3Vo48+qqSkJM2YMUNnz57Ve++9lya7detWde/eXTabTa1bt1ZYWJiWLl2qv/76S19++WW6zwBnpKSkaOjQoUpOTtagQYPSLaBL144O27x5s8aNG6eKFSvqwQcfzPC9oqOj1adPH8XGxqp///6qX7++QkJCNHnyZB0+fNh+TNnGjRs1c+ZMVa1aVQMGDJC/v7/++ecfbd26VQUKFFDt2rVv+b4AAAB8DYvoAIBc59y5c6pcubLD1++99143VuN+Y8eOVUJCgtll5DqpR/T89NNPKlGihP16165d9eeff7pkju+++06lS5dOs4CXnQfbZuT48eNq0qSJHn/8cb3//vuuKNMnbNmyRV26dFHfvn3Vr18/s8txqUOHDql37976559/Ms1NnTpV0dHReuSRRzRu3Dj5+/urX79+euyxxzRs2DDNmzfPnp0/f77Wr1+vWrVq6csvv1RwcLBsNpuioqI0dOhQrVq1Ks0xJtmRkpKiiRMnatq0aZnmzp07p5EjRyooKEhz5sxR1apVJUk1atTQm2++qdatW9sXqBMTEzV48GAlJSXps88+08MPPyxJatasmbp3764FCxaoU6dOTtV7vZkzZ2rHjh2qUaOGw2dxFC1aVO+//7569+6tfv36adKkSWrQoEGajGEY+vTTT3X69Gk99thj6tWrlyTp7rvvVkBAgLZv3y7p2vcq9Zd9o0aNsp95Xq9ePfn5+clisbCIDgAA4ASeJAMAyHXy5s2rMWPGZPhVvHhxs8vLcSVKlMjWA+p82eXLl5WcnCxHp9slJSXp8uXLio+Ptx8F4e/vr8DAQPuXn59fujPznXX69GndfvvtaXbA8uA/uNKRI0fUsWNHSdeOpXLk6tWrmj9/voKCgjR8+HD7z2F4eLheeOEF7dq1S0eOHLHnv/nmG0nSiBEj7LutAwIC1Lt3b504cUKbN292uuY333xT06ZNU69evTLdhb506VJZrVY9/fTT9gV0SXryySd1xx136Ntvv7VfW7dunU6cOKGmTZvaF9AlqUGDBqpRo0aarLP27dunjz/+WOHh4RozZkymO9sbN26sESNGKD4+Xj169ND06dPT/ALNz89PkydP1qhRo+zv9dhjj2nw4MGaMWOGvvjiC0lSfHy8Ro8erVGjRik8PFyHDx/W4cOHFRgYqE8++UQNGzZM0zcAAABkDTvRAQC5TlBQkMPd5mFhYTcdb7VaFRQUpKCgIKf/nN8wDNlsNiUmJiokJCTDs3hhvscffzzD3bhvvfWW3nrrLfu/X3zxRRUpUsSNlQE549y5c/YF28zOFY+JiVFcXJzq16+vwoULp3kt9YzzjRs3qnz58oqNjdWff/6pMmXKqEqVKmmyderUUVBQkDZt2qSGDRs6VbPVatUnn3yipk2bavny5Q5z27ZtkyQ1b948zfXAwEDVq1dP69evv2k29f4++eQTXb58Oc2Do7Pj5MmT6tGjh5KSkjR69GiVK1fupmM6d+6ssLAwjRgxQuPGjdPatWs1fPhw+/nzAQEBeuqpp+z548ePK0+ePKpfv7792oYNGzRo0KBM5wkPD9euXbucui8AAABfxf+PHgCQ65w/f16tW7d2+PrNjnPp1KmTDh06lKW5mjRpctPM119/rbp162bp/W60c+dOTZ06Vbt27ZK/v7/KlSunqKgoPfroow4X+LPyYNHGjRtLurYb88iRI/riiy+0adMmtWzZUm+88Uaa7IULF/TJJ5/op59+UmxsrEqUKKGGDRuqd+/eKliwYLbvafHixRoyZIiGDh2q6OhorVmzRuXKldOkSZM0a9YsLV68WEWKFNGYMWNUq1YtDRgwQD/++KN+/PFHlS9fPs17DR48WEuWLNGSJUt09913Z7uWPn36KDk52b6TfMmSJdq6dasee+wx1a1bVzabTcnJybrrrrvSnL3vSjcePbR169Y0167/+Um934weaHt9T1OPcLle6vcp1fXvkdqTMWPGpHvAZEZz3njUyYoVKzRv3jwdOHBAX331VbqF1H379mnq1Knatm2brFarypUrpy5duqhDhw7Z/n45uueb/RxfvnxZn3/+uVauXKmTJ0+qYMGCaty4sQYOHKhChQqlua/rTZkyRVOmTLH/+/r/riZPnqwpU6Zk+N94Rv8dXv99fuyxx7RgwQItWbJEhw8f1vr165UvX740dfTt21cdO3bU2LFjtXHjRqWkpKhGjRoaMWKESpUq5dT3rGbNmqpTp85Nc2fOnJGkDP+7KlmypIKCguy/gMosGxwcrBIlSqT5ZdWUKVM0efJkdevWTYMHD7Zf/+ijj/TZZ5+pZ8+eGjhwoP36hx9+mKW/9jhz5oz8/PwyrKNMmTK6cOGC4uLilC9fPnvN1+9Yvz5rGIaOHTumiIiIm857o2PHjum5557TmTNn1KlTJ61evVq//vqrAgMDs/SXJe+9957ef/997dq1S6+88oqWLVtmP07qeoGBgem+L6m/KJ4+fXqGv7SIiorK8v/7BgAAgP9hER0AkOsUKVLE4YNFUxfdMtO0aVPVqVMnwwWKVD/99JOOHTumjh07ZrhTMSUlRTabTQkJCU7vYF6+fLkGDx6soKAgtWjRQvnz59fatWv16quvymKxaMiQIU697/V+/fVXvfzyy5KkSpUqpTvu5syZM3rmmWd07Ngx1apVSy1atLAvlK5bt06LFy9W/vz5nZp74sSJuu+++1S9enVt3rxZTz75pIoWLaqWLVtq8eLF+uijj/TNN9+oXbt2+vHHH7VixQr17dvXPj4pKUlr1qzRXXfd5dQCuiQ99thj9v/76tWr+vjjjyVJ1atXV4sWLRQUFGQ/miKnFtF79uxp/78/++wzlShRQm3btrVfu/789azKnz+//X0vX76sOXPmqHLlymrUqFGajCsMHz5cCxcuVMmSJVWmTJl0i30bNmxQ3759FR4ermbNmik0NFQ///yzhg8frn///dcl543f7Oc4Li5OzzzzjA4dOqQGDRqoSZMmOnjwoObPn68dO3Zo0aJFCgsLU4kSJezft5MnT2r58uWqVauWatWqdcs1Xu/q1at66aWX9Ouvv6p8+fIqX758hourZ8+etX/GPPbYY9q5c6c2bNig06dPa8mSJU4d9ZPVo4cuX74sSQ4/v/Lly6fTp09nKZs/f357Vrr2M79u3TrNnj1b7du311133aWjR4/qiy++0F133aU+ffo4XXN4eHiGf3GU+vP+77//Kl++fPaab7vttnTZAgUKSLp2vJIzi+g//vij/aiYN954Q/fdd1+2xr/++uuqV6+ehgwZotdff93+39SyZcv04YcfKjg4WP7+/rp06ZJ2796tRo0aKSkpSVFRUfbPwkuXLuns2bPp3vv6XxoCAAAg61hEBwB4rX/++UcXL160H70iSQkJCbLZbDp8+HCGY5KTk5WYmJjm9eTkZNlsNvuOxAEDBtx07iNHjujYsWPq0aNHuh3BrnDmzBmNGDFCISEhWrRokf2M85dfflmtWrXS119/re7du6to0aJOzxEXF6dXXnlFnTt3Vq9evZQnT550mVGjRunYsWPq37+/evfubb8+cuRIzZ8/X/Pnz9eLL77o1PzVqlXTF198of3796tdu3aSpLlz59p/WXDq1ClJ184oLly4cLpF9F9++UVxcXFpFqFvxaJFi/Tvv/9KunbkxVNPPaWqVavm+MM4r99x+9lnn6lkyZJprjkjf/789vc4fvy45syZo7vvvvuW3/dGy5YtU1xcnGbMmJHuQYjStf8eBw8erDx58mjJkiW64447JF37b+zRRx/V559/rqioKKf+oiFVVn6OP/roIx08eFBvvfWWnn76afv1d999V19//bUWL16szp07q1SpUvbv0ZYtW7R8+XLVq1fP5Q8W/fzzz2UYhhYuXKhq1ao5zC1YsEDNmjXThAkTFBQUJJvNpieeeEIxMTH666+/0v1lhiulLrRmtANaurbD/MqVK1nOXrx40f7vwMBAvf/++2rfvr1Gjx6t2bNn6+2335ZhGBo7dqz9F1fZ5e/v7/DIrtT3TH3ocmrNGeVvzGbXSy+9pCpVqqhevXoKDg7W1q1bFRYWpsDAQDVp0kTx8fHasmVLunGPPvqo/v77b/vxOTNnzkzzeu3atfXuu+8qODhYP/zwgxYuXKiIiAgNHTpUV69eVfHixe07/l955RWH9XE0FQAAQPaxiA4A8FrTpk3TokWLMnwts+Nczpw5k+71zHavm2HlypW6cuWKXnrppTQPCc2fP78++ugjnT171v6LA2ddunRJ7dq106uvvprh6xcvXtS6detUvHjxdA8g7Nmzp+rWrasyZco4PX/qcSOpO/lr165t3y16/e7+wMBAPfLII5o9e7b2799vPypkxYoV8vf3V5s2bZyuIVVcXJwmTZqksLAwJSQkqHDhwipSpIiWLFmiSpUq6YUXXrjlOXKjY8eOadasWbr//vszfH3jxo2KjY1VRESE5s2bl+a1fPny6dSpU9q5c2eW/kLEkZv9HKekpOj7779XYGCgTp06pY8++sj+2vnz5yVJv//+uzp37ux0Ddl1+vRpLV269KYPAM6TJ49GjRpl/289ICBA9evXV0xMjM6dO5eji+jh4eGSrv3FR0aSk5OVnJyc7Wyqu+66S3379tVHH32kgQMHauPGjerTp4/Tf1UiXft+xcbGOqzh+v+9vuaQkJA02dT7uLHm7Lj+KJXUne1JSUn6999/Hf7i5OzZs5k+/LpEiRL2v0wZM2aMpGv/HcXExKhKlSoqW7asfRF90qRJGf532aNHjwyfAwEAAIDMsYgOAPBagwcP1muvvabQ0FCFhIQoOTlZ/fv314YNGzR9+nQ98MADWr58ueLi4vT000/LMAz17NlTf/zxh32BIfXMa2d3HOaUI0eOSEp/XrakLJ1nnBXBwcEOFx4l6e+//1ZKSooqVaqU7s//r1/McdaNx+Bk9gC/xx57TLNnz9aKFStUpUoVJSQkaP369br//vszXXTKqlGjRum///7TgAEDNHHiRPn7+2vChAlq27atPvroI/uDFD2ZzWbLkWMabDabw9caN27scAFdko4ePSrp2kMqY2JiMsxcf8yHM272c3z+/Hn7Luhp06blSA0Zyez71qlTp5suoEvXHnB540M9U3dOG4ZxawXexO233y7p2i9KbnT16lVduHDB/kuvzLKS9N9//6W7D+naA3t/+ukn/fjjj6pcubJ69ep1SzUXL15ce/fu1cWLF+0L19fXIP1vQTv1c+PYsWPpFu5vzLrK3r17lZKSkuHnelJSki5cuKCKFSve9H02btyoAwcOyM/PT2fPntXUqVOVnJysb775xp4JCwvL8MgmHnINAADgnOwfpAgAgIfIly+fChYsqNDQUEVHR+upp57Shg0bNGrUKPui5xdffGFfFA0ICNBHH32kChUqqHv37powYYKSkpIUGhpqf7CgN7h69aqsVust7ZKUpKJFi6pYsWJOj7darW775UNkZKQqVqyoFStWSJLWr1+v+Pj4NGeHO2vFihX67rvv1KxZMzVv3tx+vVChQhoyZIiSk5O1Zs2aW54nJyUlJWV4/rErnDhxwuFrN9s1nLrQO3ToUB04cCDDr+uPV3HGzX6OU2uIiIhwWMOCBQtuqYaM3Mr3LVXZsmVdVE32VahQQSEhIYqOjk73WnR0tGw2m/04qQIFCujOO++UxWJJt7h/5swZnThxIsOjp6xWq33BOjY21n5OubNSv6979uxJ99off/wh6X9HmWQleyvHZWVkw4YNkq791c2NUv/7vdlnss1m0wcffKC7775bt912m4oWLarp06fr6tWr6tWrl+Li4iRd+wVF5cqV031t3brVpfcEAADgK1hEBwB4rStXrmjNmjV64YUX1LFjRx0/flxPPfWUChcurI0bN2rTpk1KTExUQECANm3apE2bNmnr1q2KiopSkyZNNG3aND300EN65513tGHDBodHEZgh9ZiGAwcOpHvts88+03333Zdm12FOKFOmjPz9/XXo0KF0u2pPnjyp++67T506dcrRGq7Xrl07HTt2THv27NGKFSsUHh6eZtHbGXv27NGwYcNUtGhRjR49Ot3rrVu31pdffpnmLPY1a9Zo2bJl9q9z587dUg3Z4efnJyn9Ludt27Y53PmclYdPOnrfCxcuaP/+/c6UKul/P8cZPaNg3759mjNnToaLtK5UuHBhFShQQH///Xe6/8avXLmiOXPmaNmyZenG3cr37ejRoy7Z3X6rRzbdipCQEDVo0EB//PFHuofqLlmyRJJUv359+7WmTZvqzJkz+umnn26aTTVq1CidOnVK3bt319mzZ/Xmm2/eUs3NmjWTpHSfjSdOnNCWLVtUtWpV+/n7Dz30kIKCgjRv3jylpKTYs5cvX9bq1atVuHDhDHeMOyshIUELFy5UWFiYHn744XSvpy6i3+wva6ZOnar9+/eneRZF1apVNXToUEVFRdn/ouedd97RihUr0n3dc889LrsnAAAAX8IiOgDAaz3//PPq06ePNm7cqLZt2+rHH3/Ur7/+qt69e+v5559Xt27ddPToUZ0/f17dunVTt27d1KtXL7333nuaNGmSPvjgA/n7+2v27Nk5/vDI7GrZsqVCQkI0Z86cNAuQV65c0cqVKyVJdevWzdEaChQooEaNGun06dPpjsFYvHixW2q4Xtu2beXv76+5c+fql19+UfPmze3nGjvj6NGjeumll5SYmKjx48dneNyElH7xb8yYMXr99dftX6lH77hDao2HDh2yX0tKStK4ceMcjilWrJiCgoLSHbVx9erVdO978ODBNJmJEyfKarU6XW/qcSTff/99mve22WwaPXq03n77bV24cMHp988Kf39/PfLII4qPj9fnn3+e5rXFixfr7bfftu8Qvt6dd94pKf0RJTf7vhmGoffffz/Hj1txhxdeeEH+/v4aNGiQ/vzzT/vDUL/99lvdcccdevDBB+3ZqKgohYWF6c0339SOHTskSevWrdO0adOUN29etWzZMs17//DDD/r+++/Vrl07vfbaa+rQoYNWrVplX3R3RpUqVdSgQQOtX79ekydPVnJysk6fPq1XXnlFycnJeuKJJ+zZ2267zf6Q1jfffFPx8fG6dOmSXn/9dZ0/f16PP/64S49HGj9+vM6dO6f27dsrX7586V5P/WVcZovoGzZs0Keffqrq1aurVatWaV7r0KGDunbtav/FTvHixVWhQoV0X44evAoAAIDMcSgeAMBrDRo0SMuXL1fXrl3tO16nTZumoKAgBQYGys/PT7169VJsbKzmz58vm82mixcvKj4+XtK1nc1NmzbVN998ozvuuEPBwcFm3k4axYoV0+jRozV06FA98cQTat68ufLmzauff/5ZJ06cUFRUlCIiInK8jjfffFP79+/Xxx9/rE2bNqlq1ao6ePCgNm3apFKlSt3yGcbZcfvtt6tOnTr2RbZ27drd0vsVK1ZMtWrVUq1atbL0y4DU3cY//fRTmvPgu3btqj///POWasmq5s2b64svvtD777+voKAgBQcH65NPPlFAQID9mIobBQYGqk2bNlq8eLH69OmjcuXK6fTp0zp16pTmzJkjSapZs6aKFCmiBQsWqFKlSrrrrru0ePFirVq1SjVq1NCuXbucqjcsLEzvvfee+vXrpyeeeEJNmzZVsWLF9Pvvv2v//v1q0aKFGjRo4PT3I6sGDhyorVu3avLkyfr1119177336t9//9WaNWt02223aeDAgenGlChRQvXq1dN3330nwzBUrFgxHTt2TIGBgZowYYKka7uZQ0ND9dlnn6lYsWIqVqyYZs2apcOHD6tixYpu+7nIKffdd58GDx6ssWPH6pFHHrE/eDc8PFzjxo1L85lZqlQpjRkzRoMHD9YzzzxjzwYFBemDDz5IczTKv//+q1GjRqlIkSIaOnSoJOmNN97QL7/8onfeeUe1a9dWyZIlnar5/fffV9euXTVlyhR9/vnnstlsstlsatKkiZ555pk02cGDB+vIkSNasGCBlixZIsMwdPXqVVWrVk39+/d3av6MzJ8/X7Nnz1bx4sUdvm/q8T+OFtENw9CECRMUFBSkd955R35+fhn+oiYrv7y5fuc9AAAAsoZFdACA10pdAL1e6mJ6qqCgIAUEBNgffJe6uzRVnjx59NJLL2VpvoMHD9rPIk/deZqVIx+c9dhjj6lkyZL69NNPtWbNGl29elUVK1ZU79699eSTT+bYvNcrXry4Fi1apKlTp2r16tXatWuXihQpomeffVZ9+/Z1+1nyjz32mH7//XcVL15c9erVu6X3ypMnjyZOnJjmQXuZLUClnv/u7++fZoyfn58SExNvqZasuvfeezVu3DhNnTpV/fr1U+HChdWqVSv169dPjz76qMNxI0aMUMGCBbVq1SqtX79e+fPnT5PPmzevPv/8c7377rsaM2aMgoODdf/992vRokX65JNPnF5El6RGjRpp3rx5mjp1qjZu3KikpCSVK1dOo0ePdtvPcf78+TV//nx99tlnWrlypebOnatixYqpQ4cO6tOnj8NzqD/66CNNmDBBGzZs0H///afbbrtNUVFR9tfvvPNOTZ06VePGjdOQIUOUN29eNWrUSAsWLHDpIqyZnnvuOdWsWVNff/21jh07pooVK6pnz57pPkslqVWrVrr77rv1xRdf6M8//1SJEiXsZ3OnMgxDgwcP1sWLFzVp0iT78Sr58uXTqFGj1LNnT73++uuaM2eOU5+vRYoU0eLFizV79mz98ssvCgwMVOvWrdW+fXv7Lu1UYWFh+uqrr7Ro0SKtWrVKV69eVaNGjfTss8+65AGcSUlJ+vjjjzVjxgzly5dP06ZNy/BhpefPn9d3330nSQ5/eeDn56eJEydqz549qlSpkiTZf0FwvdR/X3/cy41Sv+cAAADIOj8jN/ytKQDAZyUlJWnKlCkKCQmx7z6/3ty5c2W1WtMtKKSkpMhmsykpKUlPPvmkSpUqddO5Vq9erX79+tn/HRwcrG3btik0NNQ1NwPTWSwWPfHEExoxYoSeffbZNK/9888/On78uGrWrKmQkBD79QsXLujq1asOd4ID8C1JSUn67rvv9Nlnn+mff/5R8eLF9fnnn6tKlSppcgsXLtTo0aPtZ/VXrlxZS5cuzfIvD+677z7dddddmjdvnv3aypUr1b9/f73zzju677770o154403dOTIEe3cufMW7hAAAMD3sBMdAODVkpOTNWPGDAUHBys4ODjDB/EFBwfrq6++SnPNZrMpJSVFCQkJeuCBB7K0iF6/fn2FhYWpcOHCKleunDp06MAC+v83c+ZMXbx4MUvZF154Qfnz58/hipxz5coVSf/bdX690qVLq3Tp0umus6vz1h07dkyLFi3KUrZkyZLq0KFDDlcEOC8gIEAxMTH6559/1KJFC40aNSrDv9p5/PHHNXnyZBUtWtS+Az47u+8TExPT/RVM6r9Tz0S/UVhYmJKTk7N5RwAAAGAnOgAAuGWNGze2n+l7M2vXrnX6vGPkTlu2bFGXLl2ylK1Tp45mz56dwxUBt8YwDO3Zs0f33ntvjs1x6NAhBQcHq0yZMjk2BwAAAK5hER0AAAAAAAAAAAdy7mloAAAAAAAAAACPde7cOfXu3Vs1atRQ+/bttX///myNnzVrlqKiotJd/+uvvxQVFaUaNWqoS5cuOnnyZJrXlyxZosaNG6tu3boaP368UlJSbuk+chqL6AAAAAAAAADgYwzDUN++fRUbG6tFixYpKipKvXv3ltVqzdL4OXPmaOzYsemuJyYm2p+FtWzZMj3wwAPq27evfaH8l19+0bBhw9SrVy/Nnz9f27Zt05w5c1x6b67GIjoAAAAAAAAA+JidO3dq165deuedd1ShQgU9/vjjKleunNasWXPTsUuXLtXy5cvVoUOHdK+tWrVK586d0zvvvKPSpUurR48eSkhI0M6dOyVJX375pZo3b64OHTqobNmyGjx4sMcvogeaXQAAAAAAAAAAwDlNmjTJ9PW1a9dmeH3fvn0qUaKEKlasaL9Wo0YN7d69W+3atcv0PWvVqqU2bdpo6tSpOnr0aLr3veeee1SoUCH7terVq2v37t2qVauW9u3bp9dff93+2j333KOTJ08qNjZWhQsXznRes3jFIvq5c+c0cuRIbd68WeXKldN7772nKlWqOPVeFovFxdUBAAAAAAAA5oiMjDS7BK+S8u9dZpeQA0o5NSouLk6lS5dOc61AgQKKiYm56diSJUtm+31Pnz5tf71MmTL21wICApQnTx6dOXPGYxfRZXi4lJQUo1OnTkanTp2MP//801i8eLHRqFEj4/Lly069X3R0dLprVqvV2L59u2G1WrM8JrtzkHdf3tGYzPrsafdA3rkx9Nhz866agx67L++OOeixa/PumIMeuzbvjjnosbl5d8xBj12bd8cc3pD3ph67Y47cmDezx+6Yw9fyyB7bqUq57stZ06ZNM1588cU01xYsWGB07do1y+8xadIk49lnn01zbcSIEcZbb72V5tpHH31kDBs2zDAMw4iMjDR2796d5vWGDRsa27dvz075buXxZ6Lfytk8AAAAAAAAAID0ChUqpHPnzqW5dvnyZQUHB+fo++bUvDnJ4xfRMzubBwAAAAAAAACQfdWrV9ehQ4d06dIl+7Xo6Gjdcccdt/S+NWrU0K5du2Sz2TJ83+rVq2vHjh32144cOaLLly/f8rw5yeMX0W92hg4AAAAAAAAAIHsqVaqk8uXLa8KECUpJSdHevXu1evVqNW7cWCkpKbp06VKahfCseuCBB3T16lV98cUXkqT169drz549aty4sSSpbdu2WrBggQ4fPiybzabJkyfr3nvvVZEiRVx6f67k8Q8WDQwMVEhISJproaGhio+Pd/o9bxybkJCQ5n+zMia7c5B3bz6jMTfrs6fdA/nsj6HHnp13xRz02L15d8xBj12bd8cc9Ni1eXfMQY/NzbtjDnrs2rw75vD0vLf12B1z5La82T12xxy+lA8PD8/We/u6FKWYXYLL3cou6TFjxqhHjx5auXKl4uLi1K5dOzVs2FDHjx9XkyZNtHTpUkVERGTrPYOCgjRu3DgNHDhQs2bN0oULF9SnTx9VqFBBktS0aVP99ttvateunfLmzStJmjlz5i3cRc7zMwzDMLuIzCxcuFD/93//p8WLF9uvffnll/r99981bdq0bL+fxWJRYmJitsaEhIRkawx5c/OeWBN51+Y9sSby5s9B3rV5T6yJvPlzkHdt3hNrIu/avCfWRN78Oci7Nu+JNZE3fw5fy9esWTPLWUhX/61485CXCbz9z1saHx8fr23btqlQoUKqVq2ai6qSLly4oB07dqh06dKqVKlSutcPHTqkf/75RzVr1lTBggVdNm9O8PhF9EOHDql9+/bauHGj8ufPL0kaNGiQ8ufPr7feeivb72exWFS+fPk01xISEvTXX3+pbNmyCgsLSzfmyJEj6cZkhry5eUdjMuuzp90DeefG0GPPzbtqDnrsvrxZNdFj5/OeWBM9Nn8Oemxu3hNrosfmz+ENeW/qsSfW5A15M3vsjjl8Lc9O9OxhER3O8PjjXK4/m2fkyJGKiYnR6tWrNXXqVKff09GHS1hYmMPXsvuBRN7cfGZjHPXZ0+6BvPNj6LFn5l05Bz12T94dc9Bj1+bdMQc9dm3eHXPQY3Pz7piDHrs27445vCXvLT12xxy5NW9Wj90xh6/lAeQsj19ElxyfzQMAAAAAAAAAWWUzct+Z6F6xwOvlvOJ7fPfdd2vVqlU5cjYPAAAAAAAAAACOePyZ6K5msVjMLgEAAAAAAABwicjISLNL8CqJp7J3hr83CLnjiNkl5H6Gj4mOjk53zWq1Gtu3bzesVmuWx2R3DvLuyzsak1mfPe0eyDs3hh57bt5Vc9Bj9+XdMQc9dm3eHXPQY9fm3TEHPTY374456LFr8+6Ywxvy3tRjd8yRG/Nm9tgdc/haHtlz5WS5XPeFnOcVx7kAAAAAAAAAwK1KkU8dygEX8Te7AAAAAAAAAAAAPBWL6AAAAAAAAAAAOMAiOgAAAAAAAAAADnAmOgAAAAAAAACfkKIUs0uAF2InOgAAAAAAAAAADrCIDgAAAAAAAACAAyyiAwAAAAAAAADgAGeiAwAAAAAAAPAJNsMwuwR4IT/D8K2fHIvFYnYJAAAAAAAAgEtERkaaXYJXuXSytNkluFz+Ev+YXULuZ/iY6OjodNesVquxfft2w2q1ZnlMducg7768ozGZ9dnT7oG8c2PosefmXTUHPXZf3h1z0GPX5t0xBz12bd4dc9Bjc/PumIMeuzbvjjm8Ie9NPXbHHLkxb2aP3TGHr+WRPRdPlMp1X8h5nIkOAAAAAAAAAIADnIkOAAAAAAAAwCekyKdOtoaLsBMdAAAAAAAAAAAHWEQHAAAAAAAAAMABFtEBAAAAAAAAAHCAM9EBAAAAAAAA+AQbZ6LDCexEBwAAAAAAAADAARbRAQAAAAAAAABwgEV0AAAAAAAAAAAc8DMMw6cOArJYLGaXAAAAAAAAALhEZGSk2SV4lf9OljS7BJe7rcRxs0vI/QwfEx0dne6a1Wo1tm/fblit1iyPye4c5N2XdzQmsz572j2Qd24MPfbcvKvmoMfuy7tjDnrs2rw75qDHrs27Yw56bG7eHXPQY9fm3TGHN+S9qcfumCM35s3ssTvm8LU8sufMiRK57gs5j+NcAAAAAAAAAABwgEV0AAAAAAAAAAAcYBEdAAAAAAAAAAAHAs0uAAAAAAAAAADcwWYYZpcAL8ROdAAAAAAAAAAAHGARHQAAAAAAAAAAB1hEBwAAAAAAAADAAc5EBwAAAAAAAOATUswuAF7JzzB86zR9i8VidgkAAAAAAACAS0RGRppdglc5eaKE2SW4XIk7T5pdQu5n+Jjo6Oh016xWq7F9+3bDarVmeUx25yDvvryjMZn12dPugbxzY+ix5+ZdNQc9dl/eHXPQY9fm3TEHPXZt3h1z0GNz8+6Ygx67Nu+OObwh7009dsccuTFvZo/dMYev5ZE9J47fkeu+kPM4Ex0AAAAAAAAAAAc4Ex0AAAAAAACAT7DJp062houwEx0AAAAAAAAAAAdYRAcAAAAAAAAAwAEW0QEAAAAAAAAAcIAz0QEAAAAAAAD4BBtHosMJ7EQHAAAAAAAAAMABFtEBAAAAAAAAAHCARXQAAAAAAAAAABzwMwzDp04CslgsZpcAAAAAAAAAuERkZKTZJXiVI8fvMLsElytf8pTZJeR+ho+Jjo5Od81qtRrbt283rFZrlsdkdw7y7ss7GpNZnz3tHsg7N4Yee27eVXPQY/fl3TEHPXZt3h1z0GPX5t0xBz02N++OOeixa/PumMMb8t7UY3fMkRvzZvbYHXP4Wh7Zc/jY7bnuCzmP41wAAAAAAAAAAHCARXQAAAAAAAAAABwINLsAAAAAAAAAAHAHm/zMLgFeiJ3oAAAAAAAAAAA4wCI6AAAAAAAAAAAOsIgOAAAAAAAAAIADLKIDAAAAAAAAAOAADxYFAAAAAAAA4BNSDLMrgDdiJzoAAAAAAAAAAA74GYbhU79/sVgsZpcAAAAAAAAAuERkZKTZJXiVA8dKmF2Cy1UuddLsEnI/w8dER0enu2a1Wo3t27cbVqs1y2OyOwd59+Udjcmsz552D+SdG0OPPTfvqjnosfvy7piDHrs274456LFr8+6Ygx6bm3fHHPTYtXl3zOENeW/qsTvmyI15M3vsjjl8LY/s2f/PHbnuCzmPM9EBAAAAAAAA+ASb/MwuAV6IM9EBAAAAAAAAAHCARXQAAAAAAAAAABxgER0AAAAAAAAAAAc4Ex0AAAAAAACAT+BMdDiDnegAAAAAAAAAADjAIjoAAAAAAAAAAA6wiA4AAAAAAAAAgAN+hmEYZhfhThaLxewSAAAAAAAAAJeIjIw0uwSvsvufUmaX4HL3lj5mdgm5n+FjoqOj012zWq3G9u3bDavVmuUx2Z2DvPvyjsZk1mdPuwfyzo2hx56bd9Uc9Nh9eXfMQY9dm3fHHPTYtXl3zEGPzc27Yw567Nq8O+bwhrw39dgdc+TGvJk9dsccvpZH9vzxd8lc94Wcx3EuAAAAAAAAAAA4wCI6AAAAAAAAAAAOBJpdAAAAAAAAAAC4g01+ZpcAL8ROdAAAAAAAAAAAHGARHQAAAAAAAAAAB1hEBwAAAAAAAADAAc5EBwAAAAAAAOATbOwphhP4qQEAAAAAAAAAwAEW0QEAAAAAAAAAcMDPMAzD7CLcyWKxmF0CAAAAAAAA4BKRkZFml+BVtv1T1uwSXK526b/MLiH3M3xMdHR0umtWq9XYvn27YbVaszwmu3OQd1/e0ZjM+uxp90DeuTH02HPzrpqDHrsv74456LFr8+6Ygx67Nu+OOeixuXl3zEGPXZt3xxzekPemHrtjjtyYN7PH7pjD1/LInt//KpvrvpDzOM4FAAAAAAAAAAAHWEQHAAAAAAAAAMABFtEBAAAAAAAAAHCARXQAAAAAAAAAABwINLsAAAAAAAAAAHAHm/zMLgFeiJ3oAAAAAAAAAAA4wCI6AAAAAAAAAAAOsIgOAAAAAAAAAIADnIkOAAAAAAAAwCfYDPYUI/v8DMMwzC7CnSwWi9klAAAAAAAAAC4RGRlpdgle5de/Kppdgss1KPun2SXkfoaPiY6OTnfNarUa27dvN6xWa5bHZHcO8u7LOxqTWZ897R7IOzeGHntu3lVz0GP35d0xBz12bd4dc9Bj1+bdMQc9NjfvjjnosWvz7pjDG/Le1GN3zJEb82b22B1z+Foe2fPL0Qq57gs5j79fAAAAAAAAAADAAc5EBwAAAAAAAOATUsSeYmQfPzUAAAAAAAAAADjAIjoAAAAAAAAAAA6wiA4AAAAAAAAAgAOciQ4AAAAAAADAJ9jkZ3YJ8ELsRAcAAAAAAAAAwAEW0QEAAAAAAAAAcMDPMAzD7CIkKSUlRf3799ddd92lfv362a9v2LBBY8eO1enTp9WqVSuNGDFCISEhTs9jsVhcUS4AAAAAAABgusjISLNL8Crr/qpsdgku17jsAbNLyP0MD3DlyhXjtddeM+666y5j0qRJ9uv79+83qlatanzyySfGP//8Y/Tt29d47733bmmu6OjodNesVquxfft2w2q1ZnlMducg7768ozGZ9dnT7oG8c2PosefmXTUHPXZf3h1z0GPX5t0xBz12bd4dc9Bjc/PumIMeuzbvjjm8Ie9NPXbHHLkxb2aP3TGHr+WRPauPVMl1X8h5HnGcy1tvvaWgoCDVqFEjzfXZs2crIiJCvXv3VqlSpTR8+HAtXLhQiYmJJlUKAAAAAAAAAPAlHrGI3rNnT7377rsKCgpKc33fvn1q0KCB/d/FixdXoUKFdPDgQXeXCAAAAAAAAADwQR6xiF6mTJkMr8fFxal06dJprhUoUECnT592R1kAAAAAAAAAAB8XaHYBmQkICEj3ENHQ0FDFx8ff0vveOD4hISHN/2ZlTHbnIO/efEZjbtZnT7sH8tkfQ489O++KOeixe/PumIMeuzbvjjnosWvz7piDHpubd8cc9Ni1eXfM4el5b+uxO+bIbXmze+yOOXwpHx4enq339nUp8jO7BHghP8MwDLOLSBUVFaU6deqoX79+kqSnn35arVu3VlRUlD3Tpk0b9enTRy1btnRqDovFku0z1UNCQrI1hry5eU+sibxr855YE3nz5yDv2rwn1kTe/DnIuzbviTWRd23eE2sib/4c5F2b98SayJs/h6/la9asmeUspFVH7za7BJdrUW6f2SXkeh69iD527FidOnVKEydOlCRZrVbVrVtXc+fOVbVq1Zyaw2KxqHz58mmuJSQk6K+//lLZsmUVFhaWbsyRI0fSjckMeXPzjsZk1mdPuwfyzo2hx56bd9Uc9Nh9ebNqosfO5z2xJnps/hz02Ny8J9ZEj82fwxvy3tRjT6zJG/Jm9tgdc/hanp3o2cMiOpzh0ce5tGnTRh07dtS2bdtUu3ZtTZkyRYUKFVJkZOQtva+jD5ewsDCHr2X3A4m8ufnMxjjqs6fdA3nnx9Bjz8y7cg567J68O+agx67Nu2MOeuzavDvmoMfm5t0xBz12bd4dc3hL3lt67I45cmverB67Yw5fywPIWR69iH733XerX79+6tq1qwoWLKj4+Hh9/PHH8vf3iOehAgAAAAAAAPAiNrGuiOzzqEX02bNnp7vWo0cPtW7dWgcOHNA999yj4sWLm1AZAAAAAAAAAMAXedSZ6O5gsVjMLgEAAAAAAABwiVs99tjXrDia+75frcux3pnjDB8THR2d7prVajW2b99uWK3WLI/J7hzk3Zd3NCazPnvaPZB3bgw99ty8q+agx+7Lu2MOeuzavDvmoMeuzbtjDnpsbt4dc9Bj1+bdMYc35L2px+6YIzfmzeyxO+bwtTyy54cjVXPdF3KeRx3nAgAAAAAAAAA5xWZwJjqyj58aAAAAAAAAAAAcYBEdAAAAAAAAAAAHWEQHAAAAAAAAAMABFtEBAAAAAAAAAHCAB4sCAAAAAAAA8Akp7CmGE/ipAQAAAAAAAADAARbRAQAAAAAAAABwgEV0AAAAAAAAAPBB586dU+/evVWjRg21b99e+/fvz9I4m82msWPHqm7dumrUqJFWrFhhf23y5MmqXLlyuq/GjRtLkgzDUN26ddO8VqtWrRy5P1fxMwzDMLsId7JYLGaXAAAAAAAAALhEZGSk2SV4lcWHa5hdgsu1r7DLqXGGYejpp5+WJL377rvas2ePJk+erO+++0558uTJdOz48eP17bffauLEiQoKClK/fv00bdo0Va1aVYmJiUpMTEyTHzlypAoUKKBRo0bpyJEjat++vdavX6+AgABJkp+fn/Lly+fUfbiF4WOio6PTXbNarcb27dsNq9Wa5THZnYO8+/KOxmTWZ0+7B/LOjaHHnpt31Rz02H15d8xBj12bd8cc9Ni1eXfMQY/NzbtjDnrs2rw75vCGvDf12B1z5Ma8mT12xxy+lkf2fPtn9Vz35azt27cbd911l3Ho0CH7teeff95YunRppuMSExON6tWrG99884392rRp04yhQ4dmmN+3b59Rp04d47///jMMwzAWLVpkdO3a1em6zcBxLgAAAAAAAADgY/bt26cSJUqoYsWK9ms1atTQ7t27Mx139OhRxcfHq0GDBlkaN378eHXt2lWFCxeWJO3cuVN///237r//flWvXl09e/bUsWPHXHBHOSfQ7AIAAAAAAAAAAM5p0qRJpq+vXbs2w+txcXEqXbp0mmsFChRQTExMpu8XFxengIAAlSpVyn4tf/78On36dLpsTEyMdu7cqQkTJtivHT16VA0bNtTzzz+v5ORkvf322xo4cKAWLVqU6bxmYhEdAAAAAAAAgE+wiYM5UgUGBiokJCTNtdDQUMXHx990XHBwcJprYWFhGY6bOXOm2rdvr/z589uvzZ07N03m7bffVtOmTXXkyBGVL18+u7fhFiyiAwAAAAAAAICXcrTT/GYKFSqkc+fOpbl2+fLldAvkGY1LSEjQ5cuXlTdvXknXdqffOC4uLk6rVq3S//3f/2X6fsWKFZMkHT9+3GMX0fnVCwAAAAAAAAD4mOrVq+vQoUO6dOmS/Vp0dLTuuOOOTMeVKlVKRYoU0Y4dOzId9+OPP+rOO+9U1apV7df+++8/tWrVKs2u9dT3KVGixC3dT05iER0AAAAAAAAAfEylSpVUvnx5TZgwQSkpKdq7d69Wr16txo0bKyUlRZcuXZLNZks3zt/fX61bt9akSZN0+fJlnT9/XrNmzVLjxo3T5NauXasHH3wwzbXbbrtNefPm1bBhw2SxWLR+/XoNHz5c9evXT/OAU0/DIjoAAAAAAAAAn5Bi+Oe6r1sxZswYrV27VvXr11fHjh3Vtm1bNWzYUCdPnlTt2rV18ODBDMe9/PLLCgoKUsOGDdW4cWOFhoaqZ8+e9teTkpK0detW1a1bN93YSZMm6dKlS3rmmWc0YsQItWjRQlOmTLml+8hpnIkOAAAAAAAAAD7o7rvv1qpVq7Rt2zYVKlRI1apVkySVLFlSBw4ccDguX758mjt3rnbs2KGkpCTVqVNHQUFB9teDg4O1a9euDMfecccd+uKLL1x7IzmMRXQAAAAAAAAA8FHh4eF66KGHsj3O399ftWvXzoGKPI+fYRiG2UW4k8ViMbsEAAAAAAAAwCUiIyPNLsGrzP8z9y36dqq4zewScj2f3Il+44dLfHy8YmJiFBERofDw8HR5i8WSrQ8k8ubmHY3JrM+edg/knRtDjz0376o56LH78mbVRI+dz3tiTfTY/Dnosbl5T6yJHps/hzfkvanHnliTN+TN7LE75vC1PLLHxiMi4QR+agAAAAAAAAAAcIBFdAAAAAAAAAAAHGARHQAAAAAAAAAAB3zyTHQAAAAAAAAAvsdm+JldArwQO9EBAAAAAAAAAHCARXQAAAAAAAAAABxgER0AAAAAAAAAAAc4Ex0AAAAAAACAT0hhTzGc4GcYhmF2Ee5ksVjMLgEAAAAAAABwicjISLNL8CpfHapvdgku91ylTWaXkOv55E70Gz9c4uPjFRMTo4iICIWHh6fLWyyWbH0gkTc372hMZn32tHsg79wYeuy5eVfNQY/dlzerJnrsfN4Ta6LH5s9Bj83Ne2JN9Nj8Obwh70099sSavCFvZo/dMYev5QHkPP5+AQAAAAAAAAAAB1hEBwAAAAAAAADAAZ88zgUAAAAAAACA77EZ7ClG9vFTAwAAAAAAAACAAyyiAwAAAAAAAADgAIvoAAAAAAAAAAA4wJnoAAAAAAAAAHxCivzMLgFeiJ3oAAAAAAAAAAA4wCI6AAAAAAAAAAAO+BmGYZhdhDtZLBazSwAAAAAAAABcIjIy0uwSvMqMgw3MLsHlut/1q9kl5Ho+eSb6jR8u8fHxiomJUUREhMLDw9PlLRZLtj6QyJubdzQmsz572j2Qd24MPfbcvKvmoMfuy5tVEz12Pu+JNdFj8+egx+bmPbEmemz+HN6Q96Yee2JN3pA3s8fumMPX8sgem8HBHMg+fmoAAAAAAAAAAHCARXQAAAAAAAAAABxgER0AAAAAAAAAAAd88kx0AAAAAAAAAL7Hxp5iOIGfGgAAAAAAAAAAHGARHQAAAAAAAAAAB1hEBwAAAAAAAADAAc5EBwAAAAAAAOATUgw/s0uAF2InOgAAAAAAAAAADvgZhmGYXYQ7WSwWs0sAAAAAAAAAXCIyMtLsErzKlP2NzS7B5fpWWWd2CbmeTx7ncuOHS3x8vGJiYhQREaHw8PB0eYvFkq0PJPLm5h2NyazPnnYP5J0bQ489N++qOeix+/Jm1USPnc97Yk302Pw56LG5eU+siR6bP4c35L2px55YkzfkzeyxO+bwtTyAnOeTi+gAAAAAAAAAfI+N063hBH5qAAAAAAAAAABwgEV0AAAAAAAAAAAcYBEdAAAAAAAAAAAHOBMdAAAAAAAAgE9IMdhTjOzjpwYAAAAAAAAAAAdYRAcAAAAAAAAAwAEW0QEAAAAAAAAAcMDPMAzD7CLcyWKxmF0CAAAAAAAA4BKRkZFml+BVPoppbnYJLjcwYrXZJeR6Pvlg0Rs/XOLj4xUTE6OIiAiFh4eny1sslmx9IJE3N+9oTGZ99rR7IO/cGHrsuXlXzUGP3Zc3qyZ67HzeE2uix+bPQY/NzXtiTfTY/Dm8Ie9NPfbEmrwhb2aP3TGHr+WRPTb5mV0CvBDHuQAAAAAAAAAA4ACL6AAAAAAAAAAAOMAiOgAAAAAAAAAADvjkmegAAAAAAAAAfE+KwZ5iZB8/NQAAAAAAAAAAOMAiOgAAAAAAAAAADrCIDgAAAAAAAACAA5yJDgAAAAAAAMAn2ORndgnwQuxEBwAAAAAAAADAAT/DMAyzi3Ani8VidgkAAAAAAACAS0RGRppdglcZs6+12SW43JC7V5hdQq7nk8e53PjhEh8fr5iYGEVERCg8PDxd3mKxZOsDiby5eUdjMuuzp90DeefG0GPPzbtqDnrsvrxZNdFj5/OeWBM9Nn8Oemxu3hNrosfmz+ENeW/qsSfW5A15M3vsjjl8LQ8g5/nkIjoAAAAAAAAA35NicLo1so+fGgAAAAAAAAAAHGARHQAAAAAAAAAAB1hEBwAAAAAAAADAAc5EBwAAAAAAAOATbJyJDifwUwMAAAAAAAAAgAMsogMAAAAAAAAA4ACL6AAAAAAAAAAAOMCZ6AAAAAAAAAB8Qor8zC4BXsjPMAzD7CLcyWKxmF0CAAAAAAAA4BKRkZFml+BV3rK0M7sEl3srcpnZJeR6PrkT/cYPl/j4eMXExCgiIkLh4eHp8haLJVsfSOTNzTsak1mfPe0eyDs3hh57bt5Vc9Bj9+XNqokeO5/3xJrosflz0GNz855YEz02fw5vyHtTjz2xJm/Im9ljd8zha3kAOY8z0QEAAAAAAAAAcMAnd6IDAAAAAAAA8D02gz3FyD5+agAAAAAAAAAAcIBFdAAAAAAAAAAAHGARHQAAAAAAAAAABzgTHQAAAAAAAIBPSDH8zC4BXoid6AAAAAAAAAAAOMAiOgAAAAAAAAAADvgZhmGYXYQkxcTE6K233tLevXsVEhKijh076rXXXpO/v782bNigsWPH6vTp02rVqpVGjBihkJAQp+axWCwurhwAAAAAAAAwR2RkpNkleJVhe9qbXYLLvVttsdkl5HoecSb65cuX9eKLL6p9+/aaPHmyDhw4oL59+6pixYqKjIxUnz591Lt3b7Vp00YffPCBJkyYoCFDhjg9340fLvHx8YqJiVFERITCw8PT5S0WS7Y+kMibm3c0JrM+e9o9kHduDD323Lyr5qDH7subVRM9dj7viTXRY/PnoMfm5j2xJnps/hzekPemHntiTd6QN7PH7pjD1/IAcp5HLKL/+eefatOmjQYNGiRJKlasmGrWrKndu3dr165dioiIUO/evSVJw4cPV6tWrTRo0CCnd6MDAAAAAAAA8D02TreGEzzip6Z69ep644037P+22Ww6fPiwypcvr3379qlBgwb214oXL65ChQrp4MGDZpQKAAAAAAAAAPAhHrGIfqN58+bpypUrat++veLi4lS6dOk0rxcoUECnT582qToAAAAAAAAAgK/wiONcrnfo0CF9+OGHGj16tPLnz6+AgIB0x7aEhoYqPj7e6TluHJuQkJDmf7MyJrtzkHdvPqMxN+uzp90D+eyPoceenXfFHPTYvXl3zEGPXZt3xxz02LV5d8xBj83Nu2MOeuzavDvm8PS8t/XYHXPktrzZPXbHHL6Uz+j5fgBcy88wDMPsIlJduHBBnTp10oMPPqgRI0ZIkp5++mm1bt1aUVFR9lybNm3Up08ftWzZMttzWCwWJSYmZmtMSEhItsaQNzfviTWRd23eE2sib/4c5F2b98SayJs/B3nX5j2xJvKuzXtiTeTNn4O8a/OeWBN58+fwtXzNmjWznIX0+u4OZpfgch/cu9DsEnI9j1lET0hIUPfu3RUaGqpp06YpMPDaJvmxY8fq1KlTmjhxoiTJarWqbt26mjt3rqpVq5bteSwWi8qXL59u7r/++ktly5ZVWFhYujFHjhxJNyYz5M3NOxqTWZ897R7IOzeGHntu3lVz0GP35c2qiR47n/fEmuix+XPQY3PznlgTPTZ/Dm/Ie1OPPbEmb8ib2WN3zOFreXaiZw+L6HCGRxznYhiGBg4cqPPnz2vWrFlKTExUYmKiAgIC1KZNG3Xs2FHbtm1T7dq1NWXKFBUqVEiRkZFOz+fowyUsLMzha9n9QCJvbj6zMY767Gn3QN75MfTYM/OunIMeuyfvjjnosWvz7piDHrs274456LG5eXfMQY9dm3fHHN6S95Yeu2OO3Jo3q8fumMPX8gBylkcsoh84cEDr16+XJDVo0MB+vU6dOpo9e7b69eunrl27qmDBgoqPj9fHH38sf3+PfCYqAAAAAAAAACAX8YhF9CpVqujAgQMOX+/Ro4dat26tAwcO6J577lHx4sXdWB0AAAAAAACA3CBFbMxF9nnMmejuYrFYzC4BAAAAAAAAcIlbOfLYF726u5PZJbjcuHvnm11CrucRO9Hd7cYPl/j4eMXExCgiIiLDM6csFku2PpDIm5t3NCazPnvaPZB3bgw99ty8q+agx+7Lm1UTPXY+74k10WPz56DH5uY9sSZ6bP4c3pD3ph57Yk3ekDezx+6Yw9fyAHIef78AAAAAAAAAAIADPrkTHQAAAAAAAIDvsRl+ZpcAL8ROdAAAAAAAAAAAHGARHQAAAAAAAAAAB1hEBwAAAAAAAADAAc5EBwAAAAAAAOATUjgTHU5gJzoAAAAAAAAAAA6wiA4AAAAAAAAAgAMsogMAAAAAAAAA4ICfYRiG2UW4k8ViMbsEAAAAAAAAwCUiIyPNLsGr9NvZ2ewSXG7yfd+YXUKu55MPFr3xwyU+Pl4xMTGKiIhQeHh4urzFYsnWBxJ5c/OOxmTWZ0+7B/LOjaHHnpt31Rz02H15s2qix87nPbEmemz+HPTY3Lwn1kSPzZ/DG/Le1GNPrMkb8mb22B1z+FoeQM7jOBcAAAAAAAAAABxgER0AAAAAAAAAAAd88jgXAAAAAAAAAL7HJj+zS4AXYic6AAAAAAAAAAAOsIgOAAAAAAAAAIADLKIDAAAAAAAAAOAAi+gAAAAAAAAAADjAg0UBAAAAAAAA+IQUgweLIvvYiQ4AAAAAAAAAgAN+hmEYZhfhThaLxewSAAAAAAAAAJeIjIw0uwSv0nNHlNkluNxnNWebXUKu55PHudz44RIfH6+YmBhFREQoPDw8Xd5isWTrA4m8uXlHYzLrs6fdA3nnxtBjz827ag567L68WTXRY+fznlgTPTZ/Dnpsbt4Ta6LH5s/hDXlv6rEn1uQNeTN77I45fC0PIOf55CI6AAAAAAAAAN+TYnC6NbKPnxoAAAAAAAAAABxgER0AAAAAAAAAAAdYRAcAAAAAAAAAwAHORAcAAAAAAADgE1LkZ3YJ8ELsRAcAAAAAAAAAwAEW0QEAAAAAAAAAcIBFdAAAAAAAAAAAHPAzDMMwuwh3slgsZpcAAAAAAAAAuERkZKTZJXiVbtu6mV2Cy31Z+0uzS8j1fPLBojd+uMTHxysmJkYREREKDw9Pl7dYLNn6QCJvbt7RmMz67Gn3QN65MfTYc/OumoMeuy9vVk302Pm8J9ZEj82fgx6bm/fEmuix+XN4Q96beuyJNXlD3sweu2MOX8sDyHkc5wIAAAAAAAAAgAMsogMAAAAAAAAA4ACL6AAAAAAAAAB8Qorhn+u+bsW5c+fUu3dv1ahRQ+3bt9f+/fuzNM5ms2ns2LGqW7euGjVqpBUrVqR5/f3331flypXTfK1Zs8b++oYNG9S6dWvVrFlTw4cPV2Ji4i3dR05jER0AAAAAAAAAfIxhGOrbt69iY2O1aNEiRUVFqXfv3rJarTcdO3HiRC1btkyTJ0/WhAkT9N5772nv3r3213ft2qV3331X27Zts3899NBDkqQDBw6oT58+evTRR7V06VJdvHhREyZMyLH7dAUW0QEAAAAAAADAx+zcuVO7du3SO++8owoVKujxxx9XuXLl0uwYz0hSUpLmzJmjvn37qk6dOqpRo4a6dOmiuXPn2l/ft2+fHnzwQeXPn9/+FRQUJEmaPXu2IiIi1Lt3b5UqVUrDhw/XwoULPXo3eqDZBQAAAAAAAAAAnNOkSZNMX1+7dm2G1/ft26cSJUqoYsWK9ms1atTQ7t271a5dO4fvd/ToUcXHx6tBgwZpxi1fvlyStGfPHknSCy+8oH/++UelS5dW79699cgjj9jnffjhh+1jixcvrkKFCungwYO65557Mr9Zk7CIDgAAAAAAAMAnpBh+ZpfgMeLi4lS6dOk01woUKKCYmJibjgsICFCpUqXs1/Lnz6/Tp09Lkg4dOqSKFStq6NChKlOmjL777ju9+uqrqlixoipXruxw3tOnT7OIDgAAAAAAAABwLUc7zW8mMDBQISEhaa6FhoYqPj7+puOCg4PTXAsLC7OPe/rpp/X000/bX3vhhRe0bt06ff/996pcubICAgKcmtdMnIkOAAAAAAAAAD6mUKFCOnfuXJprly9fTrdAntG4hIQEXb582X4tLi4u03HFihXT8ePHHc57s/Fm8zMMwzC7CHeyWCxmlwAAAAAAAAC4RGRkpNkleJWoLd3NLsHlZted4dS4Q4cOqX379tq4caPy588vSRo0aJDy58+vt956y+G4lJQUNWjQQO+9954eeughSdK8efP09ddfa8WKFRo6dKjq1auntm3bSpKuXr2q5s2bq2XLlnr99dc1duxYnTp1ShMnTpQkWa1W1a1bV3PnzlW1atWcupec5pPHudz44RIfH6+YmBhFREQoPDw8Xd5isWTrA4m8uXlHYzLrs6fdA3nnxtBjz827ag567L68WTXRY+fznlgTPTZ/Dnpsbt4Ta6LH5s/hDXlv6rEn1uQNeTN77I45fC2P7EkRZ6KnqlSpksqXL68JEyZo5MiRiomJ0erVqzV16lSlpKTo8uXLypMnjwICAtKM8/f3V+vWrTVp0iTVrFlTycnJmjVrlpo2bSrp2trr+PHjVahQIRUoUEBffvmlYmNj1bFjR0lSmzZt1LFjR23btk21a9fWlClTVKhQIY/+uffJRXQAAAAAAAAA8HVjxoxRjx49tHLlSsXFxaldu3Zq2LChjh8/riZNmmjp0qWKiIhIN+7ll1/Wiy++qIYNG8owDJUpU0Y9e/aUJD3zzDM6e/asXnvtNV25ckU1a9bUvHnzVLZsWUnS3XffrX79+qlr164qWLCg4uPj9fHHH8vf33NPHmcRHQAAAAAAAAB80N13361Vq1Zp27ZtKlSokP04lZIlS+rAgQMOx+XLl09z587Vjh07lJSUpDp16igoKMj+ev/+/dW/f3+H43v06KHWrVvrwIEDuueee1S8eHHX3VQOYBEdAAAAAAAAAHxUeHi4/Wzz7PD391ft2rWdnrdUqVIqVaqU0+PdyXP3yAMAAAAAAAAAYDJ2ogMAAAAAAADwCSkGDxZF9rETHQAAAAAAAAAAB1hEBwAAAAAAAADAARbRAQAAAAAAAABwgDPRAQAAAAAAAPiEFIM9xcg+P8MwDLOLcCeLxWJ2CQAAAAAAAIBLREZGml2CV+m0uafZJbjc/Ps/M7uEXM8nd6Lf+OESHx+vmJgYRUREKDw8PF3eYrFk6wOJvLl5R2My67On3QN558bQY8/Nu2oOeuy+vFk10WPn855YEz02fw56bG7eE2uix+bP4Q15b+qxJ9bkDXkze+yOOXwtDyDn8fcLAAAAAAAAAAA44JM70QEAAAAAAAD4nhTDz+wS4IXYiQ4AAAAAAAAAgAMsogMAAAAAAAAA4ACL6AAAAAAAAAAAOMCZ6AAAAAAAAAB8Qoo4Ex3Zx050AAAAAAAAAAAcYBEdAAAAAAAAAAAH/AzDMMwuwp0sFovZJQAAAAAAAAAuERkZaXYJXuWJTb3NLsHlvq0/1ewScj2fPBP9xg+X+Ph4xcTEKCIiQuHh4enyFoslWx9I5M3NOxqTWZ897R7IOzeGHntu3lVz0GP35c2qiR47n/fEmuix+XPQY3PznlgTPTZ/Dm/Ie1OPPbEmb8ib2WN3zOFreWRPisGZ6Mg+jnMBAAAAAAAAAMABFtEBAAAAAAAAAHCARXQAAAAAAAAAABzwyTPRAQAAAAAAAPgezkSHM9iJDgAAAAAAAACAAyyiAwAAAAAAAADgAIvoAAAAAAAAAAA4wJnoAAAAAAAAAHwCZ6LDGexEBwAAAAAAAADAAT/DMAyzi3Ani8VidgkAAAAAAACAS0RGRppdgldp82s/s0twue8aTDa7hFzPJ49zufHDJT4+XjExMYqIiFB4eHi6vMViydYHEnlz847GZNZnT7sH8s6Noceem3fVHPTYfXmzaqLHzuc9sSZ6bP4c9NjcvCfWRI/Nn8Mb8t7UY0+syRvyZvbYHXP4Wh5AzvPJRXQAAAAAAAAAvocz0eEMzkQHAAAAAAAAAMABFtEBAAAAAAAAAHCARXQAAAAAAAAAABxgER0AAAAAAAAAAAd4sCgAAAAAAAAAn5AiHiyK7GMnOgAAAAAAAAAADrCIDgAAAAAAAACAAyyiAwAAAAAAAADggJ9hGIbZRbiTxWIxuwQAAAAAAADAJSIjI80uwau02DDA7BJcbtVDE80uIdfzyQeL3vjhEh8fr5iYGEVERCg8PDxd3mKxZOsDiby5eUdjMuuzp90DeefG0GPPzbtqDnrsvrxZNdFj5/OeWBM9Nn8Oemxu3hNrosfmz+ENeW/qsSfW5A15M3vsjjl8LQ8g53GcCwAAAAAAAAAADrCIDgAAAAAAAACAAz55nAsAAAAAAAAA35Ni+JldArwQO9EBAAAAAAAAAHCARXQAAAAAAAAAABxgER0AAAAAAAAAAAc4Ex0AAAAAAACAT+BMdDjDo3aiJyUlac+ePTpw4IAMwzC7HAAAAAAAAACAj/MzPGS1es+ePerVq5duu+02nTlzRnfeeae+/PJL5c+fXxs2bNDYsWN1+vRptWrVSiNGjFBISIhT81gsFhdXDgAAAAAAAJgjMjLS7BK8SpP1g8wuweXWNppgdgm5nkcc52Kz2TRo0CC98sorat++vaxWq5544gnNnTtXjRo1Up8+fdS7d2+1adNGH3zwgSZMmKAhQ4Y4Pd+NHy7x8fGKiYlRRESEwsPD0+UtFku2PpDIm5t3NCazPnvaPZB3bgw99ty8q+agx+7Lm1UTPXY+74k10WPz56DH5uY9sSZ6bP4c3pD3ph57Yk3ekDezx+6Yw9fyAHKeRxznEhcXpy5duqh9+/aSpDx58qh8+fK6ePGiZs+erYiICPXu3VulSpXS8OHDtXDhQiUmJppcNQAAAAAAAABvkmL45bov5DyPWEQvWLCgunTpYv/3li1btHnzZrVq1Ur79u1TgwYN7K8VL15chQoV0sGDB80oFQAAAAAAAADgQzziOJfrtWnTRgcPHtSAAQNUrVo1xcXFqXTp0mkyBQoU0OnTp3XPPfeYVCUAAAAAAAAAwBd43CL6jBkztGLFCk2YMEHVq1dXQEBAuoeIhoaGKj4+3uk5bhybkJCQ5n+zMia7c5B3bz6jMTfrs6fdA/nsj6HHnp13xRz02L15d8xBj12bd8cc9Ni1eXfMQY/NzbtjDnrs2rw75vD0vLf12B1z5La82T12xxy+lM/o+X4AXMvPMAzD7CIyMnToUF2+fFlnz55V69atFRUVZX+tTZs26tOnj1q2bJnt97VYLNk+Tz0kJCRbY8ibm/fEmsi7Nu+JNZE3fw7yrs17Yk3kzZ+DvGvznlgTedfmPbEm8ubPQd61eU+sibz5c/havmbNmlnOQnp47atml+ByPzcZZ3YJuZ5HLKLv3btXn332mSZNmiQ/v2uH4b/11lu6ePGibr/9dp06dUoTJ06UJFmtVtWtW1dz585VtWrVsj2XxWJR+fLl01xLSEjQX3/9pbJlyyosLCzdmCNHjqQbkxny5uYdjcmsz552D+SdG0OPPTfvqjnosfvyZtVEj53Pe2JN9Nj8OeixuXlPrIkemz+HN+S9qceeWJM35M3ssTvm8LU8O9Gzh0V0OMMjjnMpV66cdu3apbfeeksvvfSSDh8+rO+++04ffvihbr/9dnXs2FHbtm1T7dq1NWXKFBUqVEiRkZFOz+fowyUsLMzha9n9QCJvbj6zMY767Gn3QN75MfTYM/OunIMeuyfvjjnosWvz7piDHrs274456LG5eXfMQY9dm3fHHN6S95Yeu2OO3Jo3q8fumMPX8gBylkcsooeHh2vGjBl699139eijj6p48eIaMWKEGjduLEnq16+funbtqoIFCyo+Pl4ff/yx/P39Ta4aAAAAAAAAAJDbecQiuiRVqVJFs2fPzvC1Hj16qHXr1jpw4IDuueceFS9e3M3VAQAAAAAAAPB2KfIzuwR4IY84E92dLBaL2SUAAAAAAAAALnErRx77ooZrXzO7BJf7pcmHZpeQ63nMTnR3uvHDJT4+XjExMYqIiMjwzCmLxZKtDyTy5uYdjcmsz552D+SdG0OPPTfvqjnosfvyZtVEj53Pe2JN9Nj8OeixuXlPrIkemz+HN+S9qceeWJM35M3ssTvm8LU8gJzHweIAAAAAAAAAADjAIjoAAAAAAAAAAA745HEuAAAAAAAAAHxPisGDRZF97EQHAAAAAAAAAMABFtEBAAAAAAAAAHCARXQAAAAAAAAAABzgTHQAAAAAAAAAPsHgTHQ4gZ3oAAAAAAAAAAA4wCI6AAAAAAAAAAAO+BmGYZhdhDtZLBazSwAAAAAAAABcIjIy0uwSvMoDP71hdgkut7HZWLNLyPV88kz0Gz9c4uPjFRMTo4iICIWHh6fLWyyWbH0gkTc372hMZn32tHsg79wYeuy5eVfNQY/dlzerJnrsfN4Ta6LH5s9Bj83Ne2JN9Nj8Obwh70099sSavCFvZo/dMYev5ZE9KZyJDidwnAsAAAAAAAAAAA6wiA4AAAAAAAAAgAMsogMAAAAAAAAA4IBPnokOAAAAAAAAwPcYnIkOJ7ATHQAAAAAAAAAAB1hEBwAAAAAAAADAARbRAQAAAAAAAABwgDPRAQAAAAAAAPiEFM5EhxPYiQ4AAAAAAAAAgAN+hmEYZhfhThaLxewSAAAAAAAAAJeIjIw0uwSvUmflULNLcLmtLd8zu4RczyePc7nxwyU+Pl4xMTGKiIhQeHh4urzFYsnWBxJ5c/OOxmTWZ0+7B/LOjaHHnpt31Rz02H15s2qix87nPbEmemz+HPTY3Lwn1kSPzZ/DG/Le1GNPrMkb8mb22B1z+FoeQM7zyUV0AAAAAAAAAL7Ht87kgKtwJjoAAAAAAAAAAA6wiA4AAAAAAAAAgAMsogMAAAAAAAAA4ABnogMAAAAAAADwCSnyM7sEeCF2ogMAAAAAAAAA4ACL6AAAAAAAAAAAOMAiOgAAAAAAAAAADvgZhmGYXYQ7WSwWs0sAAAAAAAAAXCIyMtLsErxKzR+HmV2Cy+1o9a7ZJeR6Pvlg0Rs/XOLj4xUTE6OIiAiFh4eny1sslmx9IJE3N+9oTGZ99rR7IO/cGHrsuXlXzUGP3Zc3qyZ67HzeE2uix+bPQY/NzXtiTfTY/Dm8Ie9NPfbEmrwhb2aP3TGHr+WRPYbBg0WRfRznAgAAAAAAAACAAyyiAwAAAAAAAADgAIvoAAAAAAAAAAA44JNnogMAAAAAAADwPSmciQ4nsBMdAAAAAAAAAAAHWEQHAAAAAAAAAMABFtEBAAAAAAAAAHCAM9EBAAAAAAAA+ATDMLsCeCN2ogMAAAAAAAAA4ICfYfjW718sFovZJQAAAAAAAAAuERkZaXYJXuXe70eYXYLL7X70bbNLyPV88jiXGz9c4uPjFRMTo4iICIWHh6fLWyyWbH0gkTc372hMZn32tHsg79wYeuy5eVfNQY/dlzerJnrsfN4Ta6LH5s9Bj83Ne2JN9Nj8Obwh70099sSavCFvZo/dMYev5QHkPJ9cRAcAAAAAAADgewzDz+wS4IU4Ex0AAAAAAAAAAAdYRAcAAAAAAAAAwAEW0QEAAAAAAAAAcIAz0QEAAAAAAAD4BM5EhzPYiQ4AAAAAAAAAgAMsogMAAAAAAAAA4ACL6AAAAAAAAAAAOMCZ6AAAAAAAAAB8QgpnosMJfoZhGGYX4U4Wi8XsEgAAAAAAAACXiIyMNLsEr1J12Vtml+Bye9u9ZXYJuZ5P7kS/8cMlPj5eMTExioiIUHh4eLq8xWLJ1gcSeXPzjsZk1mdPuwfyzo2hx56bd9Uc9Nh9ebNqosfO5z2xJnps/hz02Ny8J9ZEj82fwxvy3tRjT6zJG/Jm9tgdc/haHkDO40x0AAAAAAAAAAAc8Mmd6AAAAAAAAAB8j28dbA1XYSc6AAAAAAAAAAAOsIgOAAAAAAAAAIADLKIDAAAAAAAAAOAAi+gAAAAAAAAA4IPOnTun3r17q0aNGmrfvr3279+fpXE2m01jx45V3bp11ahRI61YsSLN65s3b1abNm1UtWpV1a1bVzNnzrS/ZhiG6tatq8qVK9u/atWq5dL7cjUeLAoAAAAAAADAJxiGn9kleAzDMNS3b19J0qJFi7Rnzx717t1b3333nfLkyZPp2IkTJ2rZsmWaPHmygoKC1K9fP5UpU0ZVq1bV8ePH1bdvX7388st69NFH9dtvv+mNN97Q3XffrXr16uno0aNKTEzU77//roCAAEmSn59n94Wd6AAAAAAAAADgY3bu3Kldu3bpnXfeUYUKFfT444+rXLlyWrNmTabjkpKSNGfOHPXt21d16tRRjRo11KVLF82dO1eSdPDgQfXs2VPPPfecbrvtNrVr105lypTR7t27JUm7du1SjRo1VKhQIeXPn1/58+dXvnz5cvx+b4WfYRiG2UW4k8ViMbsEAAAAAAAAwCUiIyPNLsGrRCwZZXYJLhfz+JtOjZs9e7Zmzpyp9evX269NmTJFsbGxGjlypMNxBw4cUNu2bbVmzRqVKlVKkrRt2zaNGjVK33//fbp8XFycHnjgAY0fP17NmjXTsGHDtHnzZiUkJCghIUH16tXTsGHD7O/liXzyOJcbP1zi4+MVExOjiIgIhYeHp8tbLJZsfSCRNzfvaExmffa0eyDv3Bh67Ll5V81Bj92XN6smeux83hNrosfmz0GPzc17Yk302Pw5vCHvTT32xJq8IW9mj90xh6/lgSZNmmT6+tq1azO8HhcXp9KlS6e5VqBAAcXExGT6fnFxcQoICEiz6J0/f36dPn06w/zUqVNVokQJNWrUSJJ09OhRNWzYUM8//7ySk5P19ttva+DAgVq0aFGm82bFnj17tHLlSh04cEBnzpxRQECAihYtqsjISLVs2VKVK1d26n19chEdAAAAAAAAgO/hTPT/CQwMVEhISJproaGhio+Pv+m44ODgNNfCwsIyHLd582bNnj1bX375pQIDry1Fpx77kurtt99W06ZNdeTIEZUvX96ZW9GBAwf09ttv68KFC2rZsqW6d++uokWLymaz6ezZs9q6dav69OmjSpUqadiwYSpZsmS23p9FdAAAAAAAAADwUo52mt9MoUKFdO7cuTTXLl++nG6BPKNxCQkJunz5svLmzSvp2u70G8cdO3ZMgwYN0qBBg1S7dm2H71esWDFJ0vHjx51aRF+4cKEmTpyogQMH6sknn0z3euXKlfXggw+qf//++vLLL/XUU0/pvffeU8OGDbM8Bw8WBQAAAAAAAAAfU716dR06dEiXLl2yX4uOjtYdd9yR6bhSpUqpSJEi2rFjh8NxsbGx6tGjhxo1aqTnn3/efv2///5Tq1at0uxaT32fEiVKZPseDh06pC+//FJz587NcAH9egEBAerevbumTJmid999N8193wyL6AAAAAAAAADgYypVqqTy5ctrwoQJSklJ0d69e7V69Wo1btxYKSkpunTpkmw2W7px/v7+at26tSZNmqTLly/r/PnzmjVrlho3bixJSkxMVPfu3VW4cGENHjxYVqtVVqtVSUlJuu2225Q3b14NGzZMFotF69ev1/Dhw1W/fn1VrFjRqXv4/vvvVaZMmSyPqV69un788Uflz58/y2NYRAcAAAAAAADgE4xc+HUrxowZo7Vr16p+/frq2LGj2rZtq4YNG+rkyZOqXbu2Dh48mOG4l19+WUFBQWrYsKEaN26s0ND/x969x9lY7v8ff68ZDAvDyCnlGDWLNQ5pSEjRgSmlA+1yyI5SDhUp1IgQkUh0stMBWwfa0WHbhdS3k5JUbpZDSe0Q2W0MaxqM9fujR/PbY6wx95p71nWvWa/n4zGPvWet93Vdn7uPx/3H5Xbd5XX77bdLkj7++GNt3LhRa9euVXp6us4991yde+65evDBByVJTzzxhA4ePKibbrpJY8eO1eWXX645c+ZEfA0JCfa3uO2O4Ux0AAAAAAAAAIhDTZs21bvvvqu1a9cqJSVFzZs3lySdeeaZ2rJlS9hxlStX1qJFi7Ru3TodOXJEbdq0UdmyZSVJXbp0KXTs6aefrnnz5jl7ISWMTXQAAAAAAAAAiFNer1edOnWyPS4hIaHQF4aWJmyiAwAAAAAAAABiVufOneXxeIqcX7Vqla352UQHAAAAAAAAEBdCoaJvtCJ2DBs2rETn94RCoeKePx9TLMsyXQIAAAAAAADgCL/fb7qEmHL26xNNl+C4rdeNNV1CqReXT6KfeHMJBoMKBALy+Xzyer0F8pZl2bohkTebDzemsD677RrIRzaGHrs379Qa9Dh6eVM10ePI826siR6bX4Mem827sSZ6bH6NWMjHUo/dWFMs5E32OBprxFseQMlLMF0AAAAAAAAAAABuFdGT6Pv27dO7776rLVu2aO/evUpMTFSNGjXk9/t16aWXqkqVKk7XCQAAAAAAAADFE1cHW+Po0aPatWuXTj/9dB08eFDVq1ePaB5bT6Lv27dP9913n7p3766NGzcqLS1NN954o6677jr5fD599tlnuuyyy/Twww/r0KFDERUEAAAAAAAAAECksrOzNWrUKJ177rnq1q2btm/frqlTp6pHjx7au3ev7fmKvIn+8ccf66qrrtIZZ5yh1atXa/LkyerZs6c6deqkzp0764YbbtBjjz2mFStW6NixY+revbs2b95suyAAAAAAAAAAACL1yCOP6LPPPtN9992n48ePS5JuvfVWJSQkaOrUqbbnK9Im+rZt2zR69Gg9+eSTuuuuu1S+fPmw2eTkZI0bN07333+/brvtNh08eNB2UQAAAAAAAAAARGLFihUaO3as+vbtm/fZ2WefreHDh+vjjz+2PV+RzkRv0qSJ/vWvf6lSpUpFnvjSSy9Vu3btbI0BAAAAAAAAgJISCnlMl4AoOHbsmJKSkgp8npOTo1DI/sH4RT7OJTExUWvWrLE1ORvoAAAAAAAAAIBouvjii/Xoo4/mO25869atmjVrli6++GLb8xXpSXRJ2rVrlwYNGqRvvvmmwHdt27ZVhQoVlJiY+P8nLlNGPXr00B133GG7KAAAAAAAAAAAInH//fdr6NCh6tGjhyTp+uuvV25urs477zyNGTPG9nxF3kRPSkpSmTInjx84cEDjxo3L99maNWs0b948NtEBAAAAAAAAAFFTpUoVLViwQF988YW2bt0q6Y8jy9u2bRvRfJ5QEQ+B2blzp6655hp98cUXmjlzppKSknT06FHdeeed8vl8+R6Nl6Rvv/1WixYt0oQJE1SuXLmIiisJlmWZLgEAAAAAAABwhN/vN11CTGn82iTTJTjuu16Zpkso9Yr8JPr/evbZZ9WlSxd98sknGjp0aN7nr776qsqWLaucnBzdeOONat68uWOFOunEm0swGFQgEJDP55PX6y2QtyzL1g2JvNl8uDGF9dlt10A+sjH02L15p9agx9HLm6qJHkeed2NN9Nj8GvTYbN6NNdFj82vEQj6WeuzGmmIhb7LH0Vgj3vIATu7jjz/WSy+9pB9//FG5ubmqX7+++vTpo86dO9ueq8gvFv1fHo9HTz75pKpUqZL3uyQ98sgj+vvf/66HH344kmkBAAAAAAAAACiWRYsWaeDAgTp06JAuvvhiXXbZZTp69KiGDBmi1157zfZ8ET2J/qc/N8//VLt2bb3++utKT08vzrQAAAAAAAAAAETk2Wef1cCBAzVy5Mh8n8+cOVPPPfecevXqZWu+Im2iv/rqq0XaoT9xUx0AAAAAAAAA3CIUYv8yHmRlZal9+/YFPj///PM1f/582/MV6TiXDRs26OjRo7YnBwAAAAAAAAAgmq699lq98MILOnz4cN5n2dnZWrhwobp162Z7viI9if7QQw9p9+7duvbaa4s0KU+kAwAAAAAAAACi4fbbb8/3eygU0qeffqpOnTopNTVVHo9HW7du1aFDh9SuXTvb8xdpEz0xMbFIG+M//vij/H6/cnNzbRcCAAAAAAAAAIBdKSkpBT7r3r17vt/PPPPMiOeP6MWioVBIQ4cO1f79+/N+l6R//etfSk5O1oEDByIuCAAAAAAAAACAopoyZUqJzh/RJvoNN9ygChUq6Prrr1dCQkLeU+p169aVJFWpUsW5CgEAAAAAAADACbxYNG4Eg0F9//33+v333/M+O3bsmNatW6ehQ4famssT+vMx8lP4+eef1aNHD3355ZcFvktNTVXPnj1POm7ixIm2Clq2bJlmzZql999/X5L0xhtvaPbs2Tp8+LB69eql4cOHKyGhSO9DPSnLsiIeCwAAAAAAALiJ3+83XUJMOeuVyaZLcNz3f7nfdAmu89lnn+nOO+/UoUOHJP3/k1Q8Ho9SUlL06aef2pqvyE+i5+Tk5Nu1/19du3ZVTk5O3lPpoVBIubm5Onr0qK1ifvnlF02aNEmVK1eWJP3f//2fHnjgAT300ENKT0/X6NGjtXDhQvXr18/WvCc68eYSDAYVCATk8/nk9XoL5C3LsnVDIm82H25MYX122zWQj2wMPXZv3qk16HH08qZqoseR591YEz02vwY9Npt3Y0302PwasZCPpR67saZYyJvscTTWiLc8gIKmTp2q7t27q0ePHrrpppv0f//3f/rvf/+rv/71rxo5cqTt+Yq8iV6vXj0tX778pN89/vjjthc+USgU0pgxY1S7dm0dPnxYkvTCCy/osssuy3vKffTo0brvvvuKvYkOAAAAAAAAACidduzYoQcffFDNmzdX/fr19dVXX+mSSy7R0KFD9fTTT+uqq66yNV+Rz0UpW7Zs3pnnJWHhwoXatWuX7rnnnrzPNm3apI4dO+b9npaWpl27dum3334rsToAAAAAAAAAlE6hUOn7QUHVq1dXIBCQJJ177rlav369JKlRo0batWuX7fkierGo03744Qc9/vjjeuGFF5SdnZ33eVZWlurXr5/3e2JioipWrKi9e/eqWrVqJkoFAAAAAAAAALhYnz59NHnyZNWsWVNdunTRiBEjVKFCBX388cc666yzbM9nfBM9NzdXo0aN0i233KLmzZvr888/z/suMTFR5cqVy5cvX7583nEvkQoGg/l+/3Pj/n838E81xu4a5KObP9mYU/XZbddA3v4YeuzuvBNr0OPo5qOxBj12Nh+NNeixs/lorEGPzeajsQY9djYfjTXcno+1HkdjjdKWN93jaKwRT/mTvd8PiHf9+/fXaaedpmrVqql169a64YYbNH/+fKWkpGjq1Km25/OEQmYf+n/yySf10Ucf6e9//7sSExP1+eefa8yYMXr//fd14YUXavz48ercuXNevnXr1nrxxReVlpYW0XqWZSknJ8fWmKSkJFtjyJvNu7Em8s7m3VgTefNrkHc278aayJtfg7yzeTfWRN7ZvBtrIm9+DfLO5t1YE3nza8RbvnXr1kXOQmr08mTTJThu+433my6h1DO+id65c2f95z//UdmyZSX98WR6dna2KlWqJJ/Pp+bNm+vee++VJG3fvl3dunXTJ598ourVq0e0nmVZatSoUb7PsrOztWPHDjVo0EAVKlQoMGb79u0FxhSGvNl8uDGF9dlt10A+sjH02L15p9agx9HLm6qJHkeed2NN9Nj8GvTYbN6NNdFj82vEQj6WeuzGmmIhb7LH0Vgj3vI8iW5Po0WlcBP9JjbRS5rx41wWLVqkY8eO5f3+zTffaOrUqVq0aJG++eYbjR8/Xtdee60aNGig2bNnq0WLFhFvoP8p3M2lQoUKYb+ze0MibzZf2JhwfXbbNZCPfAw9dmfeyTXocXTy0ViDHjubj8Ya9NjZfDTWoMdm89FYgx47m4/GGrGSj5UeR2ON0po31eNorBFveQAly/gmeu3atfP9vnPnTpUpU0ZnnnmmzjzzTK1du1ZXX321KlWqJEl6/vnnTZQJAAAAAAAAAIhDxjfRT9S2bVu9//77eb+PHz9evXv31k8//aTWrVuratWq5ooDAAAAAAAAALjKmDFjbOWnTJliK2/8TPRosyzLdAkAAAAAAACAI/x+v+kSYkrDv9vbPI0FP/S2t4FcGpX0Jnqxn0T/8ssvlZycrLPPPru4U0XNiTeXYDCoQCAgn8930jOnLMuydUMibzYfbkxhfXbbNZCPbAw9dm/eqTXocfTypmqix5Hn3VgTPTa/Bj02m3djTfTY/BqxkI+lHruxpljIm+xxNNaItzwA+5vidhVrE33Dhg0aNWqUypQpo3nz5unMM890qi4AAAAAAAAAAIyLeBN9y5YtGj58uJ544gn99ttvuvXWW7VgwQJVr17dyfoAAAAAAAAAADAmIZJBO3bs0JAhQ/TII4+oWbNm6tixo4YOHaoBAwYoKyvL6RoBAAAAAAAAoPhCpfAHJc72JvrOnTs1aNAgjR8/Xuedd17e51dccYV69eqlgQMHKjs729EiAQAAAAAAAAAwwdYm+t69ezVw4EDdc8896tChQ4Hve/furY4dO2rw4ME6evSoY0UCAAAAAAAAAGBCkTfRDx8+rAEDBui2227TZZddFjY3dOhQNWrUSPfcc48jBQIAAAAAAAAAYEqRXyxasWJFTZgwQa1atTplduzYsVq7dm2xCgMAAAAAAAAAJ4VCHtMlIAbZOs7lZBvoS5cu1cGDBwt8np6eHnlVAAAAAAAAAAC4gO0Xi/6v3NxcjRkzRrt27XKqHgAAAAAAAAAAXMMTCoVCkQ7Ozc1Vs2bNtHTpUqWmpjpZV4mxLMt0CQAAAAAAAIAj/H6/6RJiSoMFj5guwXE7+o42XUKpV+Qz0UuTE28uwWBQgUBAPp9PXq+3QN6yLFs3JPJm8+HGFNZnt10D+cjG0GP35p1agx5HL2+qJnoced6NNdFj82vQY7N5N9ZEj82vEQv5WOqxG2uKhbzJHkdjjXjLw6aIHydGPCvWcS4AAAAAAAAAALjN0aNHlZOTI0k6fPiwVq5cqc2bN0c0F5voAAAAAAAAAIBS44svvlD79u21Zs0aHT58WFdffbWGDh2qa665Rv/4xz9sz1fsTXSPx1PcKQAAAAAAAAAAcMQjjzyiyy67TK1bt9a7776rnJwcvfvuu+rbt6/mzp1re75ib6IX472kAAAAAAAAAAA4avv27brqqqtUqVIlrV+/Xpdffrnq16+vbt26affu3bbnK9YmemJiojZv3qzU1NTiTAMAAAAAAAAAUeAphT84UbVq1bR582b9/vvv+vTTT9W6dWtJ0i+//KLq1avbno8z0QEAAAAAAAAApcZf/vIXTZ48Wenp6crJyVHHjh21atUqTZ06VVdccYXt+cqUQI0AAAAAAAAAABhx2223qVGjRvr55591+eWXq1KlSvrtt9/0l7/8Rbfeeqvt+dhEBwAAAAAAAACUKpdcckm+33v27BnxXBznAgAAAAAAACA+hErhDwr47rvvHJ3PEwqF4uo/tWVZpksAAAAAAAAAHOH3+02XEFMavDTVdAmO23HzKNMluE5qaqqaNGmijIwMXXHFFapXr16x5ovL41xOvLkEg0EFAgH5fD55vd4CecuybN2QyJvNhxtTWJ/ddg3kIxtDj92bd2oNehy9vKma6HHkeTfWRI/Nr0GPzebdWBM9Nr9GLORjqcdurCkW8iZ7HI014i0PoKC3335bH3zwgT744APNmTNH55xzjq644gp169ZNderUsT1fXG6iAwAAAAAAAABKp8aNG6tx48YaOHCgDhw4oI8++kirV6/W3/72NzVs2FAvv/yyrfnYRAcAAAAAAAAQH+LqYGtIUpUqVdS0aVPt2bNHP//8s7Zu3Wp7jog30X/99Vf9+OOP+u9//6vff/9dFSpUUM2aNeX3+5WQwPtKAQAAAAAAAADRd+zYMa1du1arV6/Whx9+qL1796pTp04aMGCAOnXqZHs+25voy5cv15NPPqnvvvtOlStXltfrlcfj0YEDB5Sdna0qVapoyJAh6tevn+1iAAAAAAAAAAAojjZt2ujYsWPq0KGDhg0bps6dO5/0XZhFZWsTfcGCBXrqqac0cuRIXXLJJapSpUq+77///nu99NJLmjJliipVqqRrr7024sIAAAAAAAAAALBr7NixuuSSS1S5cmVH5rO1if7cc89p4sSJuuSSS076/VlnnaUJEybol19+0auvvsomOgAAAAAAAAD3CHlMV4AouOaaaxydz9bh5bm5udqzZ88pc7///ruSk5MjLgoAAAAAAAAAADew9ST69ddfr2nTpiknJ0dXXnmlatasme/7QCCguXPn6quvvtLf//53RwsFAAAAAAAAACDaPKFQKFTUcCgU0hNPPKHnn39eR44cUZUqVZScnKyEhATt2bNH2dnZatq0qUaNGqW2bduWZN0RsyzLdAkAAAAAAACAI/x+v+kSYkqDF6aZLsFxO/56n+kSSj1bT6J7PB7dddddGjBggL766ivt2bNHOTk5KleunKpVq6bU1FSdeeaZJVWrY068uQSDQQUCAfl8vpO+pdWyLFs3JPJm8+HGFNZnt10D+cjG0GP35p1agx5HL2+qJnoced6NNdFj82vQY7N5N9ZEj82vEQv5WOqxG2uKhbzJHkdjjXjLw56iP04M/H9F3kT/97//rbp160qSKlWqpAsvvNDWGAAAAAAAAAAAYk2RXiz63Xff6YYbbtCKFSuKPPGCBQvUt29fHTx4MOLiAAAAAAAAAAAwqUib6I0bN9a8efP0yCOPaNSoUfr111/DZv/973/r9ttv1+uvv65FixYpOTnZsWIBAAAAAAAAAIimIh/n4vP59M477+ipp55SRkaGmjVrplatWqlGjRoKhULau3evPv/8c/34448aMGCA+vfvrzJlbB25DgAAAAAAAAAlhzPREQFbu9zly5fXiBEjNHToUH388cfasmWLvvvuOyUkJKhGjRoaOnSozj//fDbPAQAAAAAAAADGHDhwQO+//7527NihPn36aN26dapTp46aN29ue66IdrvLlSunzp07q3PnzpEMBwAAAAAAAACgRAQCAfXv31+HDh3S8ePH1a1bN61du1avvvqqnnrqKV144YW25ivSmegAAAAAAAAAAMSCiRMnKj09XZ9++qlCoT/O8Bk7dqx69+6txx9/3PZ8bKIDAAAAAAAAiA8hT+n7QQF/PolepUqVfJ9feuml2r59u+352EQHAAAAAAAAAJQaNWrUUCAQKPD5N998o5o1a9qezxP683n2OGFZlukSAAAAAAAAAEf4/X7TJcSU+s89aroEx/048F7TJbjOokWLNGXKFN14441asGCBhg8frv/+979atGiR7rvvPvXu3dvWfBG9WDTWnXhzCQaDCgQC8vl88nq9BfKWZdm6IZE3mw83prA+u+0ayEc2hh67N+/UGvQ4enlTNdHjyPNurIkem1+DHpvNu7Ememx+jVjIx1KP3VhTLORN9jgaa8RbHkBBN910kypUqKA5c+YoFAppxowZqlOnjh588EFdd911tueLy010AAAAAAAAAEDpdc011+iaa67R4cOHFQqFVKlSpYjnYhMdAAAAAAAAQFzwxNXB1pCkihUrFnsOXiwKAAAAAAAAACg13nzzTf3www+OzccmOgAAAAAAAACg1Jg5c6Y++eQTx+ZjEx0AAAAAAAAAUGpcccUVevfddx2bj010AAAAAAAAAPEhVAp/UMCdd96pihUr6q677tKePXuKPV+Jvlj0yJEjKleuXEkuAQAAAAAAAABAnq5du0qSdu3apdWrV6tGjRr5vl+1apWt+Wxvou/bt08XXXSRvv76a5UpE374sWPH1KdPHw0fPlzt2rWzuwwAAAAAAAAAALYNGzbM0flsb6InJSXp2LFj6t69u0477TSdeeaZaty4sVq3bi2/36+yZctKkiZPnqwtW7aoevXqjhYMAAAAAAAAAEA411xzjaPzeUKhkK2Tc7KyspSenq7HHntMv/76q/bu3avt27fr66+/VigU0rXXXqujR4/qlVde0aOPPqpu3bo5WnBxWZZlugQAAAAAAADAEX6/33QJMaXBs9NNl+C4HYNGmi7B1fbv369QKKSUlJSI5yjSk+ihUEjbt2/XWWedJUnyeDy64oor8mWOHz+umTNn6m9/+5s8Ho/GjRvnug30P514cwkGgwoEAvL5fPJ6vQXylmXZuiGRN5sPN6awPrvtGshHNoYeuzfv1Br0OHp5UzXR48jzbqyJHptfgx6bzbuxJnpsfo1YyMdSj91YUyzkTfY4GmvEWx7Ayb311luaNWuWdu7cKUk644wzdNddd6l79+625yrSJvrnn3+u/v37q3HjxkpPT5f0x0tDf//9d23atElr167VihUrtHv3bv31r3/V8ePH9cwzz+jiiy9WrVq1bBcFAAAAAAAAAEAkli1bpjFjxuiqq67KOx99zZo1Gj16tCTZ3kgv0iZ6WlqaXnzxRW3cuFGffvqpkpKSdN555+no0aOqVq2aLrzwQt122226+OKLVbFiRUnS3r17dffdd+vll1+2VRAAAAAAAAAAAJF66qmnNGjQIN111115n1199dWqXbu25syZUzKb6BUrVpTf79f555+vc889V5s3b5bX69U//vEPrV27VjVq1NCVV16Zl8/JydHIkSPVs2dPLV68WD179rRVFAAAAAAAAAA4ztbbIRGrdu/erTZt2hT4vE2bNpo3b57t+Yq0iR4MBtW5c2dlZGSoSpUq+u677/Tkk0/KsixdfPHFSklJUffu3ZWRkaHevXvrH//4hz766CMNHTpUZ5xxhu2iAAAAAAAAAACIRNOmTfXKK6+oTZs2SkxMlPTHOz1feeUVNW3a1PZ8RdpE93q9Wrx4sV588UUdOXJEiYmJmjJlitatW6eBAwdKkh544AF9+umneu6555SUlKTMzExlZGTYLggAAAAAAAAAgEiNGTNG/fv3V7du3dS6dWtJ0ldffaW9e/fqpZdesj1fQlFCmzdv1po1a5SWlqb//ve/2r17t8qXL6+rrrpKP/30kySpXLlyWrBggcaPH6/ffvtNGzZssF0MAAAAAAAAAADF0aJFCy1evFitWrXShg0b9O2336pVq1ZasmSJmjdvbnu+Ij2JblmWXnrpJZUpU0ZZWVn65Zdf9OuvvyolJUWbN2/Wjz/+mJfNzc1V27ZttXz5cp1//vnq1KmT7aIAAAAAAAAAwHGciR43GjdurKlTpzoyV5E20a+//npdf/31Wrt2rb7//nvNnDlTv//+uyZNmiSv16uZM2fq66+/Vq9evZSUlKSrrrpK1apV05QpU3ThhRfK4/E4UiwAAAAAAAAAAKdy4MAB7d69W6mpqdq5c6c++OADXX755apevbrtuTyhUOiUf/9y/PhxjRs3Th988IH+8pe/yLIsdenSRTNnztSSJUt07Ngxde/eXSNHjtTcuXP1yiuvqE6dOurWrZtGjBihSy+9NKILLQmWZZkuAQAAAAAAAHCE3+83XUJMafD0dNMlOG7HHSNNl+A6Gzdu1MCBA9WmTRvNmjVLGzZsUO/eveX1evXiiy8qNTXV1nxFehI9FAqpYsWKWrZsmd58803l5OTo+uuv14YNGzRhwgSNHDlSR48eVZ8+fdSzZ08lJSVJkrp166Zt27a5ahNdKnhzCQaDCgQC8vl88nq9BfKWZdm6IZE3mw83prA+u+0ayEc2hh67N+/UGvQ4enlTNdHjyPNurIkem1+DHpvNu7Ememx+jVjIx1KP3VhTLORN9jgaa8RbHkBBjzzyiFq3bq2xY8dKktLS0rR27Vrdd999mjp1ql544QVb8xVpEz0xMVGjR4+WJPXs2VNXXXWVJOmee+7Rzz//rNq1a+vJJ5+UpLwNdEm6+eabVaVKFVsFAQAAAAAAAECJ4Ez0uLBx40Y9++yz+Y5uSUpK0k033aTBgwfbni/B7oCKFSuqWrVqkqTk5GQ1bdpUFStW1EUXXVQgywY6AAAAAAAAACCakpOTtXXr1gKfb926VZUqVbI9X5GeRP/T0aNH9dprr+nqq68udLEjR46oW7duevrpp3X22WfbLgoAAAAAAAAAgEjceOONmj59ug4dOqRzzz1XkvTll19q7ty5uv32223PZ2sTPSEhQZMmTdIll1xS6CZ6uXLltHPnTiUmJtouCAAAAAAAAACASA0aNEiHDh3Sk08+qaNHjyoUCqls2bLq16+fBg0aZHs+W5voiYmJCoVCKlOmjObPn68NGzaoc+fOOv/885WSklIgn5Bg+7QYAAAAAAAAACgZIY/pChAl99xzj+644w599913kqSzzjpLFStWjGguW5vokuTx/PEH7eeff9a7776rt956SwkJCWrcuLHatGmj9PT0vEfkAQAAAAAAAAAwwev1qnnz5sWep8ib6Pv371dWVlbe7/fff79GjRqlHTt2aNOmTdq0aZO+/vprvfbaazp69GjeZjsAAAAAAAAAANFy8OBBzZgxQz169FDLli01depULVmyRHXr1tX06dPVqFEjW/Od8ryV48eP68UXX9Tll1+uWbNm5fsuMTFRZ511lrp3765Ro0bp5Zdf1pdffqnnn3/e3lUBAAAAAAAAAOCAhx56SJ9//rkqV66szz//XPPnz9dNN90kSXr44Ydtz3fKTfSEhAT985//VI8ePTRq1CiFQqGT5nbs2KFXXnlFY8eO1XnnnRc2BwAAAAAAAAAmeEKl7wcFffTRRxo5cqTOOussffzxx+rSpYuGDx+uESNG6Ouvv7Y9nydUhN3uY8eOqUyZP05+SU1N1SeffKLTTjtNX375pVatWqUVK1Zo586dSkpKUqtWrTRhwgRdeumlWr58uRo2bGi7qJJkWZbpEgAAAAAAAABH+P1+0yXElIZzHjNdguN+GHqP6RJcp23btnrsscfUoUMH9e3bV127dlXv3r21Zs0a3X333VqzZo2t+Yp0JvqfG+jHjx+Xx+NRbm6uvvjiC/Xr10+NGjVSRkaGOnXqpObNm6ts2bL2ryrKTry5BINBBQIB+Xw+eb3eAnnLsmzdkMibzYcbU1if3XYN5CMbQ4/dm3dqDXocvbypmuhx5Hk31kSPza9Bj83m3VgTPTa/RizkY6nHbqwpFvImexyNNeItD6CgNm3aaMKECWrevLm++uorTZgwQb/++qtefvlltWzZ0vZ8RX6xqCRlZ2crFAopJydH6enpWrp0qVJTUwvk/ny4PTc313ZBAAAAAAAAAABE6sEHH9S4ceO0bds2ZWZmqmHDhpo8ebI2b96sp59+2vZ8RdpE37Bhg84880ylpKRo7dq1qlSpkjwez0k30CXp6NGj6t69u5KSkmwXBAAAAAAAAABApGrUqKGnnnoq32cjRozQ/fffH9F8RdpEHzdunLZv365LL71UXbt2VUpKyinH3HjjjdqzZ4/+/e9/64ILLoioOAAAAAAAAABwDC/iLHUOHTqkSpUqnTJXvnx522P+dMpN9FAopBEjRmj16tV677339NZbb8nj8eT7vjAej0eBQKDIBQEAAAAAAAAAcCrbtm3TLbfcojlz5qhFixZFGrNy5UpNnDhRb731lpKTk4s05pSb6B6PRx06dFCHDh00ZswYvffee5o/f76++eYbXXXVVRo+fLhq165dYFxubq6OHTumrKysIhUCAAAAAAAAAEBRNWnSRJMnT9Ydd9yhG2+8UYMGDVK5cuVOmj106JBmzpyp999/X08//XSRN9Almy8WLVOmjDIyMpSRkaEPP/xQkydP1pVXXqlFixbp7LPPzpdNTExUYmIi56IDAAAAAAAAAEpEx44d9cYbb2jatGm68MIL1aVLF7Vq1Uo1atRQKBTS3r179fnnn+vjjz9WRkaGli1bZmsDXbK5if6/OnXqpLZt22rz5s0FNtABAAAAAAAAAIiGWrVq6bHHHtOePXv03nvv6euvv9avv/4qj8ejmjVrqk2bNnrggQdUrVq1iOaPeBNdkv7zn/+oZcuWxZkCAAAAAAAAAIBiq1Wrlvr27ev4vAmRDjx+/LjuvPNO3X333Q6WAwAAAAAAAACAe3hCoVAokoFPPvmkZs+erRkzZigjI8PpukqMZVmmSwAAAAAAAAAc4ff7TZcQUxrOfsx0CY77Ydg9pkso9SI6zmX58uV68skn1bNnT3k8Hn399deqXbu2atasqYSEiB9uj5oTby7BYFCBQEA+n09er7dA3rIsWzck8mbz4cYU1me3XQP5yMbQY/fmnVqDHkcvb6omehx53o010WPza9Bjs3k31kSPza8RC/lY6rEba4qFvMkeR2ONeMvDHk9EjxOXXvv27dODDz6ozz77TA0bNtTkyZOVmpp6ynG5ubmaPn26/vGPf8jr9eree+/N96D1jh07NHbsWFmWpbS0ND3yyCOqU6dO3vdvvPGGZs+ercOHD6tXr14aPny4q/eVbW+iL1iwQI888oiuvvpq9e/fX1dccYU8Ho8kKSEhQTVq1FDt2rXVsmVLDR06VJUqVXK8aAAAAAAAAABA5EKhkIYOHSpJWrJkib799lsNHjxYb731lipWrFjo2Mcff1zLli3T7NmzVbZsWQ0bNkz169dXs2bNlJOTowEDBig1NVXLli3T8uXLNXToUC1ZskQJCQn6v//7Pz3wwAN66KGHlJ6ertGjR2vhwoXq169fNC47IkXe3t+xY4cGDBigKVOmaODAgZoyZUre5vnq1av18ssva+bMmRowYICaNm2qV199VTNmzCixwgEAAAAAAAAAkfnqq6+0fv16TZo0SWeddZauueYaNWzYUCtXrix03JEjR7Rw4UINHTpUbdq0UatWrdSvXz8tWrRIkvTuu+9q3759mjRpkurVq6dBgwYpOztbX331lSTphRde0GWXXaaePXuqQYMGeZvoblakJ9H/8Y9/KDMzU36/X6+88oqaN2+e953H41Ht2rVVu3btfGMSEhL0/vvv68EHH3S2YgAAAAAAAACAJKlLly6Ffr9q1aqTfr5p0ybVqVNHjRs3zvusVatW+uabb3T11VeHne+HH35QMBhUx44d841788038+ZNS0tTSkpK3vctW7bUN998o/POO0+bNm3Sfffdl/ddWlqadu3apd9++03VqlUr/GINKdImenp6up566ilddNFFRZ64Xr166tWrV6R1AQAAAAAAAICzQh7TFbhGVlaW6tWrl++zKlWqKBAInHJcYmKi6tatm/dZcnKy9uzZU+i8//t9/fr1875LTExUxYoVtXfv3tjeRK9bt26+/yhF4eYzbAAAAAAAAACgNAj3pPmplClTRklJSfk+K1++vILB4CnHlStXLt9nFSpUyBuXmJioxMTEAvPu27cv7/sTx5cvX16HDx+O6Dqiochnon///fcn/TwUCqlVq1bq1auXZsyYoR9++MGx4gAAAAAAAAAAzktJScnb2P7ToUOHCmxwn2xcdna2Dh06lPdZVlZW3rhTzRvpuiYVaRN97969uu6669SnTx+tX7++wPdjxoxR8+bN9c477+jKK6/U9OnTFQqFHC8WAAAAAAAAAFB8LVu21LZt23Tw4MG8zzZs2KDTTz+90HF169ZV9erVtW7dupOOa9WqldavX6/c3NyTft+yZct8Y7dv365Dhw6dcl2TirSJXrNmTc2YMUPHjh3TTTfdpHvuuUf79+9Xbm6uPB6PevbsqczMTK1YsUL333+/XnrpJY0ePbqkawcAAAAAAACAoguVwp8INWnSRI0aNdKMGTN0/Phxbdy4Ue+99546d+6s48eP6+DBg/k2wv+UkJCgjIwMPfHEEzp06JD++9//6sUXX1Tnzp0lSe3bt9exY8c0b948SdLq1av17bff5n1/1VVX6bXXXtP333+v3NxczZ49Wy1atFD16tUjv5gS5gnZfGR85cqVGj9+vDwej4YNG6bly5fr2Wefzfe4/ZtvvqlRo0Zp/PjxuuGGG04554IFCzRp0qR8n40ZM0b9+/fXt99+q4ceekjbt29X+/btNWnSJFWtWtVOyflYlhXxWAAAAAAAAMBN/H6/6RJiSqPHZ5guwXHb7x4R8dhNmzZp0KBBOnr0qLKysnT11Vdr8uTJ+vnnn9WlSxctXbpUPp+vwLisrCzdeuut2rp1q0KhkOrXr6+FCxeqUqVKkqSPP/5Yw4cPV9myZbV//34NHjxYQ4cOzRs/fvx4LVmyJC///PPPq2nTphFfx5++//57fffdd2rRooVq164t6Y/97CNHjqhZs2b5Xmhqh+1NdEk6cOCAxo4dq/fff1/jx4/X9ddfXyAzYcIEvfPOO1q+fPkp36o6YsQINWzYUDfffHPeZ+XLl9fBgwfVrVs3XXHFFRowYIBeeOEF7d27V3PmzLFbch7LsgrcXILBoAKBgHw+n7xeb5HG2F2DfPTy4cYU1me3XQP5yMbQY/fmnVqDHkcvb6omehx53o010WPza9Bjs3k31kSPza8RC/lY6rEba4qFvMkeR2ONeMvDHjbRCwoGg1q7dq1SUlLUvHnzIo87fvy41q1bpyNHjqhNmzYqW7Zsvu/379+vdevWqV69emrSpEmB8du2bdNPP/2k1q1bF+uhaUk6ePCgxowZo1WrVsnj8ehvf/ubOnToIEm66aab9NVXX8nj8ahTp0567LHHVLFiRVvzl4mkqCpVquiJJ57Q1KlTNW7cOLVp00b16tXLl7ntttu0c+fOAm94PZn169erd+/eSk5Ozvf54sWLVaFCBT3wwAMqW7asRo0apY4dO+qXX37J+5sEAAAAAAAAAEBkvF6vOnXqZHtcQkKC0tPTw35ftWpVdenSJez3TZo0OenmeiTGjx+v7du367nnnlNaWpqqVKmS993ChQu1f/9+ffnll5o4caImTZqkKVOm2Jq/SGeihzNq1Cg999xzBTbQJal27dp65plnTrmr/8svv2jXrl2aMGGC0tLS1KVLF82fP1/SH/+coF27dnl/i5GUlKTU1FR9/fXXxSkbAAAAAAAAQDwyfX65i85EL01Wr16tMWPGqEOHDvk20KU/NvurVaumyy67TJmZmXr33Xdtzx/Rk+j/q127dmG/83g8pxy/efNm1atXTyNGjJDP59Onn36qzMxM1a9fX1lZWUpNTc2Xr1Klivbs2VPcsgEAAAAAAAAApUDlypX166+/njKXlZV10uO8T6XYm+jFddFFF+miiy7K+71Hjx769NNPtWzZMiUmJhY4DqZ8+fIKBoPFWvPE8dnZ2fn+tyhj7K5BPrr5k405VZ/ddg3k7Y+hx+7OO7EGPY5uPhpr0GNn89FYgx47m4/GGvTYbD4aa9BjZ/PRWMPt+VjrcTTWKG150z2OxhrxlI9kQxAobfr06aNJkybp4MGD6tq1q04//fR83x86dEgrV67UlClT1KdPH9vzR/Ri0ZI2ffp0ffHFF6pXr56qV6+u0aNH5313++23Kz09XQMGDIhobsuylJOTY2tMUlKSrTHkzebdWBN5Z/NurIm8+TXIO5t3Y03kza9B3tm8G2si72zejTWRN78GeWfzbqyJvPk14i3funXrImchNZpZCl8sOrx4LxYtLZ599lk988wz+v3331W+fHklJycrMTFRhw8f1sGDBxUKhdSzZ0899NBDSkiwd8q58U302bNnKykpSbfddlveZzfeeKNq1qypNm3aaOnSpVq8eLEkKRQK6cILL9SYMWOUkZER0XqWZalRo0b5PsvOztaOHTvUoEEDVahQocCY7du3FxhTGPJm8+HGFNZnt10D+cjG0GP35p1agx5HL2+qJnoced6NNdFj82vQY7N5N9ZEj82vEQv5WOqxG2uKhbzJHkdjjXjL8yS6PWfNKH2b6N+PYBP9T4cOHdLHH3+sbdu26bffftORI0fk9XrVoEEDdejQQfXr149oXuPHuTRv3lz33HOP6tevrzPPPFNvvPGGvv76a82fP18NGzbUlClT9M477+iKK67QokWLdPDgQbVv375Ya4a7uVSoUCHsd3ZvSOTN5gsbE67PbrsG8pGPocfuzDu5Bj2OTj4aa9BjZ/PRWIMeO5uPxhr02Gw+GmvQY2fz0VgjVvKx0uNorFFa86Z6HI014i0P4A+VKlVS165d1bVrV0fnNb6J3qlTJw0fPlwPP/yw9u/fr6ZNm2r+/PlKT0+XJE2aNEljxozRlClTtH//fo0fP77AG1YBAAAAAAAAACgJxjfRJal3797q3bv3Sb/r0aOHLrjgAn377bc655xzVLdu3ShXBwAAAAAAAACIV8bPRI82y7JMlwAAAAAAAAA4wu/3my4hpnAmeum0atUqW/kuXbrYyrviSfRoO/HmEgwGFQgE5PP5TnrmlGVZtm5I5M3mw40prM9uuwbykY2hx+7NO7UGPY5e3lRN9DjyvBtrosfm16DHZvNurIkem18jFvKx1GM31hQLeZM9jsYa8ZaHTXH1OHH8GDJkSJGzHo9HgUDA1vxxuYkOAAAAAAAAACgd7D6JblexNtE3btyocePGafPmzcrNzS3wvd0dfQAAAAAAAAAA7DjjjDNKdP5ibaKPGTNGkjR9+nRVq1bNkYIAAAAAAAAAAHCLYm2i//TTT3r66afVrl07p+oBAAAAAAAAgJLBmeiIQEJxBvv9fm3YsMGpWgAAAAAAAAAAcJVibaJPnDhRr732ml566SUdOXLEqZoAAAAAAAAAAHCFYh3nMmDAAAWDQU2ZMkXTpk1TzZo1lZDw//flS/qtqAAAAAAAAAAAlKRibaIPGzbMqToAAAAAAAAAoER5OBMdESjWJvo111zjVB0AAAAAAAAAALiOJxQKxdXfv1iWZboEAAAAAAAAwBF+v990CTGl8aMzTJfguO/uHWG6hFKvWE+id+/eXQMHDtTVV1/tVD1RceLNJRgMKhAIyOfzyev1FshblmXrhkTebD7cmML67LZrIB/ZGHrs3rxTa9Dj6OVN1USPI8+7sSZ6bH4Nemw278aa6LH5NWIhH0s9dmNNsZA32eNorBFveQAlr1ib6CkpKdq2bZtTtQAAAAAAAABAyQl5TFeAGJRQnMF33XWXXn/9dY5IAQAAAAAAAAC43ocffmh7TLGeRP/3v/+tSy+9VDfddJN69uyptLS0fN/36NGjONMDAAAAAAAAAGDLyJEjNXXqVCUmJuZ99t1332nKlCn67LPPtGnTJlvzFWsT/YknnpAkVa9eXatXr9bq1avzvvN4PGyiAwAAAAAAAACi6ttvv9WQIUP0xBNP6NChQ5o1a5aWLFmi888/X0uWLLE9X7E20d9///3iDAcAAAAAAACA6AmZLgDRsGjRIt16663q1auXdu7cqQYNGmjevHk6//zzI5qvWGeiAwAAAAAAAADgJtWrV9eCBQuUnJys8uXL69lnn414A10q5pPoS5cuLfR7jnMBAAAAAAAAAJSktWvXnvTz2267TZMmTVLfvn2VmZmpMmX+2A5PT0+3Nb8jZ6JLUigU0q+//qrc3FxVqFBBKSkpbKIDAAAAAAAAAEpU3759T5n561//KumPd3kGAgFb83tCoZBjJwHl5ubqvffe04wZMzRt2jS1atXKqakdY1mW6RIAAAAAAAAAR/j9ftMlxJQmj8w0XYLjto0ebrqEUq9YT6KfKDExUd26ddPpp5+uSZMmRfSm02g48eYSDAYVCATk8/nk9XoL5C3LsnVDIm82H25MYX122zWQj2wMPXZv3qk16HH08qZqoseR591YEz02vwY9Npt3Y0302PwasZCPpR67saZYyJvscTTWiLc8gJLn6Cb6n/x+v3744YeSmBoAAAAAAAAAgLAsy9L48eO1efNm5ebmFvje7nEuxdpEP9mB7dnZ2XrjjTd0xhlnFGdqAAAAAAAAAABsu//++yVJ06dPV7Vq1Yo9X7E20U92YHtiYqLOOeccTZ48uThTAwAAAAAAAICzHHs7JNzsp59+0tNPP6127do5Ml+xNtE3b97sSBEAAAAAAAAAADjB7/drw4YNjm2iJzgyCwAAAAAAAAAALjBx4kS99tpreumll3TkyJFiz1esTfSlS5fqwIEDBT6fP3++Bg4cWJypAQAAAAAAAACwbcCAATp06JCmTJmiVq1a6eKLL1aXLl3yfuwq1nEuY8aM0ZIlS1SlSpV8n/t8Pj366KPFmRoAAAAAAAAAANuGDRvm6HzF2kQPhULyeDwFPt+/f3+BjXUAAAAAAAAAMMnDi0XjwjXXXOPofLY30d944w298cYbeb+PHTtWFStWzPs9NzdXmzZt0u233+5MhQAAAAAAAAAA2PDzzz/Lsiz9/vvveZ8dO3ZM69at05QpU2zN5QmFQrb+/uWLL77QF198IUmaM2eOevXqpZo1a+Z9X65cObVo0UJt27a1VUi0WJZlugQAAAAAAADAEX6/33QJMeXsyTNNl+C4rfcPN12C67zzzjsaNWqUQqGQQqGQateurQMHDigYDMrv92vx4sW25rP9JHqbNm3Upk0bSX9sot9www1q1qyZ3WmMOvHmEgwGFQgE5PP55PV6C+Qty7J1QyJvNh9uTGF9dts1kI9sDD12b96pNehx9PKmaqLHkefdWBM9Nr8GPTabd2NN9Nj8GrGQj6Ueu7GmWMib7HE01oi3PICC5syZozvuuEM9e/ZU586d9fe//11JSUnq3bu3MjIybM+XUJxi0tPT8x3lAgAAAAAAAACuFSqFPyhg9+7dateunWrWrKkmTZro66+/VrVq1XTHHXdowYIFtucr1ib6ggUL1KBBg+JMAQAAAAAAAACAY8444wx98sknkqTWrVtr7dq1kqQaNWrot99+sz2f7eNcTrRmzRotW7ZMP/74o6ZMmaK33npLlStX1s0331zcqQEAAAAAAAAAsGXQoEEaNWqU6tatq65du6p///7Kzs7W+vXrIzouqVib6O+8847uueceNW3aVIFAQNnZ2UpJSdG0adOUm5urW265pTjTAwAAAAAAAABgy1VXXaUzzjhDFStWVGpqqkaPHq3XX39dderUUWZmpu35irWJ/uSTT2rw4MG68847lZqaKknq3bu3EhMT9dxzz7GJDgAAAAAAAMA9OEM8brRu3Trv//fp00d9+vSJeK5inYm+c+dOtW/fvsDnjRo10t69e4szNQAAAAAAAAAARZKVlaVly5Zp7ty5Wrx4sfbs2VMgs3v3bk2YMMH23MV6Ev2cc87RW2+9lber7/F4JElvv/123pPpAAAAAAAAAACUlO3bt6tPnz767bffVKFCBWVnZ6tcuXKaMWOGLrnkEv3000969tlntWzZMlWrVk0PPvigrfmLtYk+cuRIDRgwQN988408Ho/mzJmjXbt2aevWrXruueeKMzUAAAAAAAAAAKc0ffp0Va1aVfPnz1fjxo2VlZWlhx56SOPGjdO//vUvLV++XNWrV9eoUaN0ww032J6/WJvobdq00Ztvvqm5c+fK4/Fo586dOvvss/Xoo4+qUaNGxZkaAAAAAAAAABzl4Uz0Uumrr77SuHHj1LhxY0lS5cqVdf/99+uCCy7QV199pfvvv189e/ZUuXLlIprfEwqF4uqPjmVZpksAAAAAAAAAHOH3+02XEFPOmTjTdAmO2zJ2uOkSjEtNTdXixYuVlpZW4POlS5cW++jxYj2JHqtOvLkEg0EFAgH5fD55vd4CecuybN2QyJvNhxtTWJ/ddg3kIxtDj92bd2oNehy9vKma6HHkeTfWRI/Nr0GPzebdWBM9Nr9GLORjqcdurCkW8iZ7HI014i0P4A87duxQQkJCgc+3b9+u3NzcfJ81a9bM1tzF2kR/8MEHlZGRofPPP7840wAAAAAAAAAAELH77rvvpJ+PGDFCHo9HkhQKheTxeBQIBGzNXaxN9PXr1+vMM89kEx0AAAAAAAAAYMT8+fNLdP5ibaLffPPNmjt3rv7yl78oOTnZqZoAAAAAAAAAACiSNm3alOj8xdpEv+CCC7Rp0yb17NlTI0aMKHBwe506dYpVHAAAAAAAAAAAJhVrE71z5855//+uu+4q9tkyAAAAAAAAAAC4SbE20VetWuVUHQAAAAAAAABQskKmC0AsKtYm+hlnnOFUHQAAAAAAAAAAuE5CSU5+/PhxtWnTRtu2bSvJZQAAAAAAAAAAKBEluokeCoV08OBB5ebmluQyAAAAAAAAAACUCE8oFCqxk4Byc3PVrFkzLV26VKmpqSW1jC2WZZkuAQAAAAAAAHCE3+83XUJMSX1opukSHLd53HDTJZR6xToTPVadeHMJBoMKBALy+Xzyer0F8pZl2bohkTebDzemsD677RrIRzaGHrs379Qa9Dh6eVM10ePI826siR6bX4Mem827sSZ6bH6NWMjHUo/dWFMs5E32OBprxFseQMmLy010AAAAAAAAAEDpdOTIES1evFhbtmzR0aNHC3w/ZcoUW/OV6JnoAAAAAAAAAABE05gxYzRlyhTt2bPHkfl4Eh0AAAAAAAAAUGp88MEHGj9+vK6//npH5mMTHQAAAAAAAEB8CJkuANFQpUoVpaSkODZfiR7nkpiYqPnz56t+/foluQwAAAAAAAAAAJKkIUOG6NFHH9XevXsdma/YT6KvWbNGy5Yt048//qgpU6borbfeUuXKlXXzzTdLktq0aVPsIgEAAAAAAAAAKIrdu3erfPnyuvzyy9WlSxfVq1dPCQn//3nyoUOH2pqvWJvo77zzju655x41bdpUgUBA2dnZSklJ0bRp05Sbm6tbbrmlONMDAAAAAAAAAGDL559/rsqVK8vv92vPnj35XjDq8Xhsz1esTfQnn3xSgwcP1p133qnU1FRJUu/evZWYmKjnnnuOTXQAAAAAAAAA7sGZ6HFhwYIFjs7nCYVCEf/RadGihZ5//nm1bt1aqampWrp0qVJTU/XFF19o4MCB+vbbb52s1RGWZZkuAQAAAAAAAHCE3+83XUJMSR0303QJjtv80HDTJcSM48ePa926dUpPT7c1rlhPop9zzjl666231Lp1a0n//1H4t99+O+/JdDc68eYSDAYVCATk8/nk9XoL5C3LsnVDIm82H25MYX122zWQj2wMPXZv3qk16HH08qZqoseR591YEz02vwY9Npt3Y0302PwasZCPpR67saZYyJvscTTWiLc8gIL+/e9/68EHH9TXX3+t33//vcD3gUDA1nwJp46EN3LkSL3++uu65ppr5PF4NGfOHF133XV64403dM899xRnagAAAAAAAAAAbHvooYd05MgRDRkyRB6PR88884wefPBBlS1bVpMnT7Y9X7E20du0aaM333xTqamp8vl82rlzp5o0aaJly5apbdu2xZkaAAAAAAAAABzlCZW+HxT09ddf66677tLAgQNVo0YNlSlTRjfeeKMGDhyoJUuW2J6vWMe5SFLDhg01ZcqU4k4DAAAAAAAAAECxJSUlKSsrS5LUsmVLbdy4Ue3bt9cFF1ygefPm2Z6v2JvoAAAAAAAAAAC4RZcuXTRu3Diddtppat++vZ577jm1bNlSb7/9tk477TTb8xXrOJcHH3xQa9asKc4UAAAAAAAAAAA4ZsyYMbr44ov1yy+/qEePHqpatar69eunxYsXa/DgwbbnK9aT6OvXr9eZZ56p888/vzjTAAAAAAAAAEDJ4wzxuFChQgVNnDgx7/dXXnlFW7duVdWqVVW7dm3b8xVrE/3mm2/W3Llz9Ze//EXJycnFmQoAAAAAAAAAgGLbsGGDli5dqmAweNLv7b7js1ib6BdccIE2bdqknj17asSIEUpLS8v3fZ06dYozPQAAAAAAAAAAtgwePFhVq1ZVs2bN5PF4ij1fsTbRO3funPf/77rrrryCQqGQPB6PAoFA8aoDAAAAAAAAAMCGsmXLauTIkerUqZMj83lCoVDEJwHt3Lmz0O/POOOMSKcuMZZlmS4BAAAAAAAAcITf7zddQkxpmjnTdAmO2zRpuOkSXOfDDz/U2LFj1adPH9WsWbPA9z169LA1X7GeRHfjJnlRnHhzCQaDCgQC8vl88nq9BfKWZdm6IZE3mw83prA+u+0ayEc2hh67N+/UGvQ4enlTNdHjyPNurIkem1+DHpvNu7Ememx+jVjIx1KP3VhTLORN9jgaa8RbHkBBU6dO1W+//aZXXnmlwHcejye6m+i7du0q9HvORAcAAAAAAAAARNP+/fv1zDPPqEOHDo7MV+wz0Qs7mJ0z0QEAAAAAAAAA0XTbbbfp2WefVeXKlVWjRo0C39t9+LtYm+irVq3K+//Hjx/XL7/8on/+859atWqVZs2aVZypAQAAAAAAAMBZEb8dErHkkUcekSTdcMMNeZ95PB6FQiF5PB7bD387eiZ63bp1lZ6errS0NP3tb3/TU089VZzpAQAAAAAAAACw5X8f/nZCsTbRw7n66qs1ZcqUkpgaAAAAAAAAAICwTnz4u7gSHJ1NUk5OjhYtWlTg7c4AAAAAAAAAAJS02267TStXrnRsvmI9iZ6amnrSF4uWLVtWEyZMKM7UAAAAAAAAAOAszkSPC/v379fmzZt1ySWXODJfsTbR58+fX+CzxMRENWrUSCkpKcWZGgAAAAAAAAAA2wYNGqTx48erR48eOvPMM4s9X7E20du0aVPsAgAAAAAAAAAAcEpycrIuueQS9ezZU7feeqvS0tLyfZ+enm5rPk8oFIr4HzHs2rVLNWvWVJky+ffi33nnHe3YsUNDhgyJdOoSY1mW6RIAAAAAAAAAR/j9ftMlxJSm9880XYLjNk0ebroE10lNTQ37ncfjUSAQsDVfsZ5E79Kli5YsWaJmzZrl+7xKlSqaN2+eKzfRpYI3l2AwqEAgIJ/Pd9IXolqWZeuGRN5sPtyYwvrstmsgH9kYeuzevFNr0OPo5U3VRI8jz7uxJnpsfg16bDbvxprosfk1YiEfSz12Y02xkDfZ42isEW952OPhTPS4sHnzZkfnSyjO4HAPsZcrV04JCcWaGgAAAAAAAAAA42w/if7FF1/oiy++yPv91VdfVc2aNfN+z83N1YoVK3TxxRc7UyEAAAAAAAAAAA74+eefbb9s1PYm+s6dO/X555/n/W5ZlipWrJj3e1JSki6//HINHDjQ7tQAAAAAAAAAABTL/v37NX36dH399dfKzs7O+zw3N1e//vqrNm7caGs+25vo11xzja655hpJfxzQPnHixAJnogMAAAAAAAAAYML48eO1bds2nX/++XrllVc0btw4/fTTT3r++ec1evRo2/MV68WiAAAAAAAAABAzeLFoXPjss880Y8YMtW/fXsuXL1fDhg3Vq1cv/f777/roo4/Ur18/W/MV6+2fmzdv5il0AAAAAAAAAICrhEJ//I1JixYt8o5vufzyy/Xll1/anqtYT6LPmTOn0O+HDh1anOkBAAAAAAAAALDlggsu0OTJk/X444/r/PPP19KlS3XZZZdpzZo1+d7vWVTF2kT/3xeMhkIh7d69Wzt37lRKSoqaNGlSnKkBAAAAAAAAALAtMzNT9957rzZu3KjrrrtO8+fPV5cuXSRJd911l+35PKE/n2t3yLfffqv7779fw4cPzyvMjm3btqlnz5566aWX1KJFC0nShx9+qKlTp2rPnj3q1q2bxo4dq6SkpIjqsywronEAAAAAAACA2/j9ftMlxJRmo2aaLsFxG6cON12C6x0+fFhr1qxRSkqKzj33XNvjHd9El6Tt27frzjvv1Ntvv21r3NGjR3XDDTcoPT1dY8aMkSRt2bJF1113nQYPHqzu3btr2rRpqlOnTt73dlmWVeDmEgwGFQgE5PP55PV6izTG7hrko5cPN6awPrvtGshHNoYeuzfv1Br0OHp5UzXR48jzbqyJHptfgx6bzbuxJnpsfo1YyMdSj91YUyzkTfY4GmvEWx72sImOSBTrxaLhnH766dq9e7ftcU8//bQOHjyou+++O++zBQsWyOfzafDgwapbt64yMzO1ePFi5eTkOFgxAAAAAAAAAKC0eP3119W3b1916tRJ3333nSZOnKjJkyfr2LFjtucq1pnoS5cuLfBZdna23n77bZ199tm25rIsS88++6z69eun5cuX69xzz1WDBg20adMmXXTRRXm5WrVqKSUlRVu3blVaWlpxygcAAAAAAAAAlDIvvfSSpk2bps6dO+vLL7/UsWPH1Lp1a02YMEHly5fXiBEjbM1XrE30J554ouCEZcooNTVV9957b5HnCYVCGjdunCpWrCiPx6MtW7Zo6tSpuuOOO5SVlaV69erly1epUkV79uxhEx0AAAAAAABAkXkcP9gabrRgwQKNGTNGffr0UWpqqiQpIyNDx48f16OPPhrdTfT333+/OMPzrFu3TpZl6ZlnntHFF18sSTrvvPN01113qU6dOgVeIlq+fHkFg8GI1ztxbHZ2dr7/LcoYu2uQj27+ZGNO1We3XQN5+2PosbvzTqxBj6Obj8Ya9NjZfDTWoMfO5qOxBj02m4/GGvTY2Xw01nB7PtZ6HI01SlvedI+jsUY85U/2fj8g3u3bty9v8/x/nXbaaTpw4IDt+Wy9WPSjjz5ScnKyWrRoYXuhwrz11lsaPXq0vv32WyUmJkqS9uzZowsvvFAJCQm6//771bdv37x89+7dNWTIEHXt2tX2WpZl2T5PPSkpydYY8mbzbqyJvLN5N9ZE3vwa5J3Nu7Em8ubXIO9s3o01kXc278aayJtfg7yzeTfWRN78GvGWb926dZGzkPz3lb4Xi1rTeLHoifr376/k5GTNmDFDfr9fy5YtU6NGjTR8+HAdOnRIL774oq35bG2it2/fXuPHj9ell14qSerSpYueeeYZNWnSxNaiJ1q3bp369OmjL7/8UhUrVsz77KabbtK1116r7OxsPf7445Kkw4cPq23btlq0aJGaN29uey3LstSoUaN8n2VnZ2vHjh1q0KCBKlSoUGDM9u3bC4wpDHmz+XBjCuuz266BfGRj6LF7806tQY+jlzdVEz2OPO/Gmuix+TXosdm8G2uix+bXiIV8LPXYjTXFQt5kj6OxRrzleRLdHjbR48PmzZvVv39/JSYm6j//+Y9atGihXbt26ciRI3rppZdO+pR6YWwd55KVlaUaNWrk/b5z504dOXLE1oIn06JFCzVs2FAPPvig7r77bh08eFAPP/ywLrjgAvXt21e9evXS2rVrlZ6erjlz5iglJUV+vz/i9cLdXCpUqBD2O7s3JPJm84WNCddnt10D+cjH0GN35p1cgx5HJx+NNeixs/lorEGPnc1HYw16bDYfjTXosbP5aKwRK/lY6XE01iiteVM9jsYa8ZaHDZyJHhdSU1P1r3/9SwsXLtTWrVslSR06dFCfPn2UkpJiez5bm+jNmjXTwoULlZOTo4SEBEnSpk2bwp7TlJ6eXrQiypTRvHnzNHXqVF1//fU6cuSI2rVrp4kTJ+q0007TsGHD1L9/f1WtWlXBYFCzZs3KWx8AAAAAAAAAgP9VtWpVDR061JG5bG2iT5w4Uffff78GDBigY8eOyePxaOzYsSfNejweBQKBIs99+umn5x3ZcqJBgwYpIyNDW7ZsUVpammrVqmWnbAAAAAAAAABAHNm3b59ef/11/fjjj0pISFCDBg107bXXqlq1arbnsnUm+olSU1P1+uuvq1mzZpFOEXWWZZkuAQAAAAAAAHBEcY48jkf+e0vhmeiPcib6idavX69bbrlFktSoUSOFQiFt375diYmJeuaZZ4p8gsqfbD2JXlqceHMJBoMKBALy+XwnPXPKsixbNyTyZvPhxhTWZ7ddA/nIxtBj9+adWoMeRy9vqiZ6HHnejTXRY/Nr0GOzeTfWRI/NrxEL+VjqsRtrioW8yR5HY414y8MeD2eix4WHH35Y7dq106OPPqqKFStK+uN9nyNHjtTEiRP15ptv2pqvWAeLz58/Xw0bNizOFAAAAAAAAAAAOOa7775T37598zbQJaly5crq37+/duzYYXu+Ym2it2nThrcFAwAAAAAAAABco3nz5lqzZk2Bzz/66CPbR7lIcXqcCwAAAAAAAACgdEpNTdXf/vY3bdiwQa1atVIoFNLatWu1fv163XzzzXrhhRckSX/961+LNF+xnkQHAAAAAAAAgJgRKoU/JeDDDz9URkaGWrdurczMTOXk5BR57I4dO9S3b1+1atVK/fr1065du/K+C4VCmjlzps4//3w1a9ZMPXr00MaNG/O+f//993XOOefk+3n44Ydt179y5UrVrl1bO3bs0BtvvKGlS5dq586dqlmzppYvX64FCxZo4cKFRZ6PJ9EBAAAAAAAAAJKkLVu2aMiQIRo8eLC6d++uadOmacaMGRozZswpx+bk5GjAgAFKTU3VsmXLtHz5cg0dOlRLlixRQkKC5s2bp3/961+aM2eO6tWrp0mTJmnYsGFatWqVPB6P1q9fr2uvvTbfWuXKlbN9De+//77tMYXhSXQAAAAAAAAAgCRpwYIF8vl8Gjx4sOrWravMzEwtXry4SE+jv/vuu9q3b58mTZqkevXqadCgQcrOztZXX30lSfrll1/02GOP6bzzzlPNmjXVv39/7dy5U7/++qskaf369WrXrp2Sk5PzfsqXL1/k2nft2qVgMJjvsxUrVmjWrFlasmSJsrKybPyX+P94Eh0AAAAAAAAAYlSXLl0K/X7VqlW25tu0aZMuuuiivN9r1aqllJQUbd26VWlpaaccm5aWppSUlLzPWrZsqW+++UbnnXeeMjMz8+W3bdumqlWr6rTTTtPRo0e1YcMGHThwQGPHjlVycrKuu+463XnnnUpIKPxZ8D179ujee+/V2rVr9fe//13nnnuuQqGQ7rrrLq1YsUIVK1ZUTk6OZs2apfnz56thw4a2/puwiQ4AAAAAAAAgPpTQGeKxZvjw4froo49O+l2VKlVUr169Ap/t2bPnlJvoWVlZYceeKDs7W88++6z69eunxMREbdu2TcnJybr99tuVnp6ujRs36t5771WNGjXUu3fvQtfNzMzU7t27NWvWLPl8PknSvHnz9N577+nWW2/ViBEjdPjwYd19992aOnWqnnnmmULnOxGb6AAAAAAAAAAQo+w+aS79semcnZ190u9uueUWJSUl5fusfPnyBY5JOZnExEQlJiYWGLtv374C2YceekgVK1bUwIEDJUmpqan5NvZr1qypvn376s033zzlJvratWs1a9YsderUSZJ06NAhzZ07V+eee67uueceSVKlSpX017/+Ne93OzyhUCiu/v7FsizTJQAAAAAAAACO8Pv9pkuIKWkjZpouwXEbZgx3dL4bb7xRGRkZ6tu3b95n3bt315AhQ9S1a9dCx86cOVPbt2/X7Nmz8z6bNGmSjh8/rgcffDDvs4ULF+qJJ57Q4sWLVb9+/bDzvfzyy5ozZ44++eSTQte96KKL9MADD+jSSy+VJM2ePVtPPfWUXnzxRbVt2zYv995772nChAn6+OOPC53vRHH5JPqJN5dgMKhAICCfzyev11sgb1mWrRsSebP5cGMK67PbroF8ZGPosXvzTq1Bj6OXN1UTPY4878aa6LH5Neix2bwba6LH5teIhXws9diNNcVC3mSPo7FGvOUBp7Vs2VLr1q3L20Q/fPiwfvjhB9WpU+eUY1u1aqXXX39dubm5eU+kb9iwQZdcckleZuXKlZo2bZqeffbZfBvoixcvViAQyLfZvm7dOp1++umnXLdHjx56+OGHtWvXLv3nP//R888/rw4dOuRtoB86dEibN2/WY489lm9TvagKP5EdAAAAAAAAABA3unfvrpUrV2rt2rWSpDlz5iglJSXvL3dycnJ06NChk45t3769jh07pnnz5kmSVq9erW+//VadO3eW9Mem+N13361Ro0apefPmOnz4sA4fPqzc3FylpaXp9ddfz9tMf+qpp/T222+rX79+p6x56NChuvLKK/Xss8/q+eefV8eOHfXoo4/mfd+nTx/16dNHXq9Xo0ePtv3fJC6fRAcAAAAAAAAQfzymC4gBTZs21bBhw9S/f39VrVpVwWBQs2bNUkLCH89jz507VytXrtSyZcsKjC1btqymT5+u4cOH68UXX9T+/fs1ZMgQnXXWWZKkF154QUePHtWECRM0YcKEvHHz589X27ZtNW3aND322GPatWuXGjVqpNmzZ+cd0VKYMmXKaOTIkRo5cqSOHz+eV+ufhg8frsqVK6tFixYFzmwvCjbRAQAAAAAAAAB5Bg0apIyMDG3ZskVpaWmqVatW3nfDhg3TsGHDwo7t0KGDVqxYoXXr1qlevXpq0qRJ3ndz5swpdN3LL79cl19+ebFqP3EDXVLeC0cjxSY6AAAAAAAAACCfunXrqm7duhGNrVq1qrp06eJwReZwJjoAAAAAAAAAAGHwJDoAAAAAAACA+BAyXQBiEU+iAwAAAAAAAAAQBpvoAAAAAAAAAACE4QmFQnH1jxgsyzJdAgAAAAAAAOAIv99vuoSY0nz4TNMlOO7bmcNNl1DqxeWZ6CfeXILBoAKBgHw+n7xeb4G8ZVm2bkjkzebDjSmsz267BvKRjaHH7s07tQY9jl7eVE30OPK8G2uix+bXoMdm826siR6bXyMW8rHUYzfWFAt5kz2Oxhrxloc9nrh6nBhO4TgXAAAAAAAAAADCYBMdAAAAAAAAAIAw2EQHAAAAAAAAACCMuDwTHQAAAAAAAEAc4kx0RIAn0QEAAAAAAAAACINNdAAAAAAAAAAAwmATHQAAAAAAAACAMDgTHQAAAAAAAEB84Ex0RIAn0QEAAAAAAAAACMMTCoXi6u9fLMsyXQIAAAAAAADgCL/fb7qEmNLizpmmS3DcN08MN11CqReXx7mceHMJBoMKBALy+Xzyer0F8pZl2bohkTebDzemsD677RrIRzaGHrs379Qa9Dh6eVM10ePI826siR6bX4Mem827sSZ6bH6NWMjHUo/dWFMs5E32OBprxFseQMmLy010AAAAAAAAAPHHE1dncsApnIkOAAAAAAAAAEAYbKIDAAAAAAAAABAGm+gAAAAAAAAAAITBmegAAAAAAAAA4gNnoiMCPIkOAAAAAAAAAEAYbKIDAAAAAAAAABAGm+gAAAAAAAAAAITBJjoAAAAAAAAAAGF4QqFQXB2nb1mW6RIAAAAAAAAAR/j9ftMlxJRWQ2aaLsFx658cbrqEUq+M6QJMOPHmEgwGFQgE5PP55PV6C+Qty7J1QyJvNh9uTGF9dts1kI9sDD12b96pNehx9PKmaqLHkefdWBM9Nr8GPTabd2NN9Nj8GrGQj6Ueu7GmWMib7HE01oi3PICSx3EuAAAAAAAAAACEwSY6AAAAAAAAAABhxOVxLgAAAAAAAADiUFy9HRJO4Ul0AAAAAAAAAADCYBMdAAAAAAAAAIAw2EQHAAAAAAAAACAMzkQHAAAAAAAAEBc8nImOCPAkOgAAAAAAAAAAYbCJDgAAAAAAAABAGJ5QKBRX/4jBsizTJQAAAAAAAACO8Pv9pkuIKefeMdN0CY776unhpkso9eLyTPQTby7BYFCBQEA+n09er7dA3rIsWzck8mbz4cYU1me3XQP5yMbQY/fmnVqDHkcvb6omehx53o010WPza9Bjs3k31kSPza8RC/lY6rEba4qFvMkeR2ONeMvDprh6nBhO4TgXAAAAAAAAAADCYBMdAAAAAAAAAIAw2EQHAAAAAAAAACCMuDwTHQAAAAAAAEAc4kx0RIAn0QEAAAAAAAAACINNdAAAAAAAAAAAwmATHQAAAAAAAACAMDgTHQAAAAAAAEBc8HAmOiLgCYVCcfVHx7Is0yUAAAAAAAAAjvD7/aZLiCmtb5tpugTHrZs73HQJpV5cPol+4s0lGAwqEAjI5/PJ6/UWyFuWZeuGRN5sPtyYwvrstmsgH9kYeuzevFNr0OPo5U3VRI8jz7uxJnpsfg16bDbvxprosfk1YiEfSz12Y02xkDfZ42isEW95ACWPM9EBAAAAAAAAAAgjLp9EBwAAAAAAABCH4upgaziFJ9EBAAAAAAAAAAiDTXQAAAAAAAAAAMJgEx0AAAAAAAAAgDA4Ex0AAAAAAABAXPCEOBQd9vEkOgAAAAAAAAAAYbCJDgAAAAAAAABAGGyiAwAAAAAAAAAQhicUiq+DgCzLMl0CAAAAAAAA4Ai/32+6hJhy3sAZpktw3JfPjTBdQqkXly8WPfHmEgwGFQgE5PP55PV6C+Qty7J1QyJvNh9uTGF9dts1kI9sDD12b96pNehx9PKmaqLHkefdWBM9Nr8GPTabd2NN9Nj8GrGQj6Ueu7GmWMib7HE01oi3PGyKq8eJ4RSOcwEAAAAAAAAAIAw20QEAAAAAAAAACINNdAAAAAAAAAAAwojLM9EBAAAAAAAAxB8PZ6IjAjyJDgAAAAAAAABAGGyiAwAAAAAAAAAQBpvoAAAAAAAAAACEwZnoAAAAAAAAAOIDZ6IjAjyJDgAAAAAAAABAGJ5QKBRXf/9iWZbpEgAAAAAAAABH+P1+0yXElPS/zjBdguPWvjDCdAmlXlwe53LizSUYDCoQCMjn88nr9RbIW5Zl64ZE3mw+3JjC+uy2ayAf2Rh67N68U2vQ4+jlTdVEjyPPu7Ememx+DXpsNu/Gmuix+TViIR9LPXZjTbGQN9njaKwRb3kAJS8uN9EBAAAAAAAAxB9PXJ3JAadwJjoAAAAAAAAAAGGwiQ4AAAAAAAAAQBhsogMAAAAAAAAAEAZnogMAAAAAAACID5yJjgjwJDoAAAAAAAAAAGGwiQ4AAAAAAAAAQBhsogMAAAAAAAAAEIYnFAoZPwnoH//4h8aMGXPS77Zs2aIPP/xQU6dO1Z49e9StWzeNHTtWSUlJEa1lWVZxSgUAAAAAAABcw+/3my4hprTtN8N0CY77fP4I0yWUeq54seiVV16pSy65JN9nTz/9tLZt26YtW7ZoyJAhGjx4sLp3765p06ZpxowZYTfdi+LEm0swGFQgEJDP55PX6y2QtyzL1g2JvNl8uDGF9dlt10A+sjH02L15p9agx9HLm6qJHkeed2NN9Nj8GvTYbN6NNdFj82vEQj6WeuzGmmIhb7LH0Vgj3vIASp4rjnMpV66ckpOT835+//13LV68WGPGjNGCBQvk8/k0ePBg1a1bV5mZmVq8eLFycnJMlw0AAAAAAAAAKOVcsYl+oieffFJdu3bVWWedpU2bNqljx45539WqVUspKSnaunWrwQoBAAAAAAAAAPHAdZvo+/bt07JlyzRgwABJUlZWlurVq5cvU6VKFe3Zs8dEeQAAAAAAAABiVagU/qDEueJM9P+1cOFCtWvXTg0bNpQkJSYmFniJaPny5RUMBiNe48Sx2dnZ+f63KGPsrkE+uvmTjTlVn912DeTtj6HH7s47sQY9jm4+GmvQY2fz0ViDHjubj8Ya9NhsPhpr0GNn89FYw+35WOtxNNYobXnTPY7GGvGUP9n7/QA4yxMKhVzz9xXHjx/XxRdfrAceeECXXXaZJOnGG29URkaG+vbtm5fr3r27hgwZoq5du9pew7Is2+epJyUl2RpD3mzejTWRdzbvxprIm1+DvLN5N9ZE3vwa5J3Nu7Em8s7m3VgTefNrkHc278aayJtfI97yrVu3LnIWUtu+M0yX4LjPF4wwXUKp56pN9E8++UR33XWXPv30U5UrV06SNHXqVO3evVuPP/64JOnw4cNq27atFi1apObNm9tew7IsNWrUKN9n2dnZ2rFjhxo0aKAKFSoUGLN9+/YCYwpD3mw+3JjC+uy2ayAf2Rh67N68U2vQ4+jlTdVEjyPPu7Ememx+DXpsNu/Gmuix+TViIR9LPXZjTbGQN9njaKwRb3meRLeHTXREwlXHuaxatUpt2rTJ20CX/njqvFevXlq7dq3S09M1Z84cpaSkyO/3R7xOuJtLhQoVwn5n94ZE3my+sDHh+uy2ayAf+Rh67M68k2vQ4+jko7EGPXY2H4016LGz+WisQY/N5qOxBj12Nh+NNWIlHys9jsYapTVvqsfRWCPe8gBKlqs20T/66CP17t0732dNmzbVsGHD1L9/f1WtWlXBYFCzZs1SQoLr3okKAAAAAAAAwMU8rjmTA7HEVZvoK1asOOnngwYNUkZGhrZs2aK0tDTVqlUrypUBAAAAAAAAAOKRq85EjwbLskyXAAAAAAAAADiiOEcex6Pz+5S+M9HXLORM9JLmqifRo+XEm0swGFQgEJDP5zvpmVOWZdm6IZE3mw83prA+u+0ayEc2hh67N+/UGvQ4enlTNdHjyPNurIkem1+DHpvNu7Ememx+jVjIx1KP3VhTLORN9jgaa8RbHkDJi8tNdAAAAAAAAABxKL4O5YBDeDsnAAAAAAAAAABhsIkOAAAAAAAAAEAYbKIDAAAAAAAAABAGZ6IDAAAAAAAAiAsejkRHBHgSHQAAAAAAAACAMNhEBwAAAAAAAAAgDDbRAQAAAAAAAAAIgzPRAQAAAAAAAMQHzkRHBDyhUCiu/uhYlmW6BAAAAAAAAMARfr/fdAkxpd2Nj5kuwXGfvXyP6RJKvbh8Ev3Em0swGFQgEJDP55PX6y2QtyzL1g2JvNl8uDGF9dlt10A+sjH02L15p9agx9HLm6qJHkeed2NN9Nj8GvTYbN6NNdFj82vEQj6WeuzGmmIhb7LH0Vgj3vIASh5nogMAAAAAAAAAEEZcPokOAAAAAAAAIP54jpuuALGIJ9EBAAAAAAAAAAiDTXQAAAAAAAAAAMJgEx0AAAAAAAAAgDA4Ex0AAAAAAABAfAiZLgCxiCfRAQAAAAAAAAAIg010AAAAAAAAAADC8IRCobj6RwyWZZkuAQAAAAAAAHCE3+83XUJMuaDXY6ZLcNynr91juoRSLy7PRD/x5hIMBhUIBOTz+eT1egvkLcuydUMibzYfbkxhfXbbNZCPbAw9dm/eqTXocfTypmqix5Hn3VgTPTa/Bj02m3djTfTY/BqxkI+lHruxpljIm+xxNNaItzzs8cTV48RwCse5AAAAAAAAAAAQBpvoAAAAAAAAAACEwSY6AAAAAAAAAABhsIkOAAAAAAAAAEAYcfliUQAAAAAAAABxKMSbRWEfT6IDAAAAAAAAABAGm+gAAAAAAAAAAITBJjoAAAAAAAAAIM+HH36ojIwMtW7dWpmZmcrJySny2B07dqhv375q1aqV+vXrp127duV9FwqF1LZtW51zzjl5P+edd17e97m5uZo6daratm2riy++WP/85z8dva5IsYkOAAAAAAAAIC54QqXvx2lbtmzRkCFDdOWVV2rp0qU6cOCAZsyYUaSxOTk5GjBggJKTk7Vs2TK1b99eQ4cO1fHjxyVJP/zwg3JycrRmzRqtXbtWa9eu1erVq/PGP/7441q2bJlmz56tGTNmaPLkydq4caPzF2mTJxSKr9P0LcsyXQIAAAAAAADgCL/fb7qEmNLhuummS3Dcx6+PdHS+zMxMbdmyRYsXL5Yk7dmzR926ddNnn32mpKSkQse++eabGjt2rD744AOlpKRIkrp166aJEyfqvPPO0+uvv663335bL7zwQoGxR44cUdu2bXXvvffqpptukiTNnTtXP/74ox5++GFHr9GuMkZXN+TEm0swGFQgEJDP55PX6y2QtyzL1g2JvNl8uDGF9dlt10A+sjH02L15p9agx9HLm6qJHkeed2NN9Nj8GvTYbN6NNdFj82vEQj6WeuzGmmIhb7LH0Vgj3vKA0zZt2qSLLroo7/datWopJSVFW7duVVpa2inHpqWl5W2gS1LLli31zTff6LzzztNXX32lH3/8Ue3atVN2drbOP/98PfDAA6pbt65++OEHBYNBdezYMW9sq1at9Oabbzp+jXbF5SY6AAAAAAAAAJQGXbp0KfT7VatWFfhs+PDh+uijj06ar1KliurVq1fgsz179pxyEz0rKyvsWOmP41wuvPBC3XLLLTp69KgmTpyo4cOHa8mSJcrKylJiYqLq1q2bNzY5OTlvrElsogMAAAAAAACID3F1sHV4mZmZys7OPul3t9xyS4FjW8qXL69gMHjKeRMTE5WYmFhg7L59+yRJixYtyvfdxIkTdckll2j79u0qU6aMypUrl+/7ChUqFGndksYmOgAAAAAAAADEqJM9aX4qp512WqHf/bnp/aesrKwCG9wnk5KSou3bt+f77NChQ2HH1qxZU5L0888/q379+srOztahQ4dUqVIlW+uWtATTBQAAAAAAAAAA3KFly5Zat25d3u+HDx/WDz/8oDp16pxybKtWrbR+/Xrl5ubmfbZhwwadfvrp+s9//qNu3brle7L8z3Xq1KmjunXrqnr16vnW/nOsaWyiAwAAAAAAAAAkSd27d9fKlSu1du1aSdKcOXOUkpKS98LbnJwcHTp06KRj27dvr2PHjmnevHmSpNWrV+vbb79V586dddppp6lSpUp64IEHZFmWVq9erczMTF1wwQVq3LixEhISlJGRoSeeeEKHDh3Sf//7X7344ovq3LlzdC68EBznAgAAAAAAACAueDgT/ZSaNm2qYcOGqX///qpataqCwaBmzZqlhIQ/nseeO3euVq5cqWXLlhUYW7ZsWU2fPl3Dhw/Xiy++qP3792vIkCE666yzJElPPPGEMjMzddNNNyk5OVndu3fX0KFD88bfeeeduvXWW3XhhRcqFAqpfv36uv3226Nz4YVgEx0AAAAAAAAAkGfQoEHKyMjQli1blJaWplq1auV9N2zYMA0bNizs2A4dOmjFihVat26d6tWrpyZNmuR9d/rpp+c9pX4ylStX1qJFi7Ru3TodOXJEbdq0UdmyZZ25qGJgEx0AAAAAAAAAkE/dunVVt27diMZWrVpVXbp0iWhsQkKC0tPTIxpbUjgTHQAAAAAAAACAMDyhUCiuTgKyLMt0CQAAAAAAAIAj/nzZI4qmY49HTZfguI+W3mu6hFIvLo9zOfHmEgwGFQgE5PP55PV6C+Qty7J1QyJvNh9uTGF9dts1kI9sDD12b96pNehx9PKmaqLHkefdWBM9Nr8GPTabd2NN9Nj8GrGQj6Ueu7GmWMib7HE01oi3PICSx3EuAAAAAAAAAACEwSY6AAAAAAAAAABhxOVxLgAAAAAAAADijyeu3g4Jp/AkOgAAAAAAAAAAYbCJDgAAAAAAAABAGGyiAwAAAAAAAAAQBmeiAwAAAAAAAIgPnImOCPAkOgAAAAAAAAAAYbCJDgAAAAAAAABAGJ5QKBRX/4jBsizTJQAAAAAAAACO8Pv9pkuIKRde9ajpEhz3f2/ea7qEUi8uz0Q/8eYSDAYVCATk8/nk9XoL5C3LsnVDIm82H25MYX122zWQj2wMPXZv3qk16HH08qZqoseR591YEz02vwY9Npt3Y0302PwasZCPpR67saZYyJvscTTWiLc87PHE1ePEcArHuQAAAAAAAAAAEAab6AAAAAAAAAAAhMEmOgAAAAAAAAAAYbCJDgAAAAAAAABAGHH5YlEAAAAAAAAAceg4bxaFfTyJDgAAAAAAAABAGGyiAwAAAAAAAAAQBpvoAAAAAAAAAACE4QmFQnF1EJBlWaZLAAAAAAAAABzh9/tNlxBTOmVMM12C4z78532mSyj14vLFoifeXILBoAKBgHw+n7xeb4G8ZVm2bkjkzebDjSmsz267BvKRjaHH7s07tQY9jl7eVE30OPK8G2uix+bXoMdm826siR6bXyMW8rHUYzfWFAt5kz2OxhrxlgdQ8jjOBQAAAAAAAACAMNhEBwAAAAAAAAAgjLg8zgUAAAAAAABA/PHE1dsh4RSeRAcAAAAAAAAAIAw20QEAAAAAAAAACINNdAAAAAAAAAAAwuBMdAAAAAAAAADxIcSh6LCPJ9EBAAAAAAAAAAiDTXQAAAAAAAAAAMLwhELx9W8YLMsyXQIAAAAAAADgCL/fb7qEmHJR16mmS3DcB/8aZbqEUi8uz0Q/8eYSDAYVCATk8/nk9XoL5C3LsnVDIm82H25MYX122zWQj2wMPXZv3qk16HH08qZqoseR591YEz02vwY9Npt3Y0302PwasZCPpR67saZYyJvscTTWiLc87PHE1ePEcArHuQAAAAAAAAAAEAab6AAAAAAAAAAAhMEmOgAAAAAAAAAAYcTlmegAAAAAAAAA4hBnoiMCPIkOAAAAAAAAAEAYbKIDAAAAAAAAABAGm+gAAAAAAAAAAITBmegAAAAAAAAA4oInxKHosI8n0QEAAAAAAAAACMMTCsXXX79YlmW6BAAAAAAAAMARfr/fdAkxpfOlj5guwXHvrxhtuoRSLy6Pcznx5hIMBhUIBOTz+eT1egvkLcuydUMibzYfbkxhfXbbNZCPbAw9dm/eqTXocfTypmqix5Hn3VgTPTa/Bj02m3djTfTY/BqxkI+lHruxpljIm+xxNNaItzyAksdxLgAAAAAAAAAAhBGXT6IDAAAAAAAAiEPHTReAWMST6AAAAAAAAAAAhMEmOgAAAAAAAAAAYbCJDgAAAAAAAABAGJyJDgAAAAAAACAueEIh0yUgBvEkOgAAAAAAAAAAYbCJDgAAAAAAAABAGJ5QyPy/YXjvvff02GOPadeuXapRo4b69++vfv36SZI+/PBDTZ06VXv27FG3bt00duxYJSUlRbyWZVlOlQ0AAAAAAAAY5ff7TZcQU7p0nmK6BMeten+M6RJKPeNnov/888964IEHNGPGDKWmpmr9+vW699571aBBA9WqVUtDhgzR4MGD1b17d02bNk0zZszQmDHF+4Nx4s0lGAwqEAjI5/PJ6/UWyFuWZeuGRN5sPtyYwvrstmsgH9kYeuzevFNr0OPo5U3VRI8jz7uxJnpsfg16bDbvxprosfk1YiEfSz12Y02xkDfZ42isEW952GT8cWLEIuPHuWzYsEH16tVTx44dVaNGDV122WU666yztH37di1YsEA+n0+DBw9W3bp1lZmZqcWLFysnJ8d02QAAAAAAAACAOGB8E71Jkybatm2bVq9erezsbL333nv67rvv1KFDB23atEkdO3bMy9aqVUspKSnaunWrwYoBAAAAAAAAAPHC+HEujRs31m233abbb78977Nx48apcePGysrKUr169fLlq1Spoj179igtLS3apQIAAAAAAAAA4ozxTfRAIKDnn39ec+bMUceOHWVZlkaOHKmUlBQlJiYWeIlo+fLlFQwGi7XmieOzs7Pz/W9Rxthdg3x08ycbc6o+u+0ayNsfQ4/dnXdiDXoc3Xw01qDHzuajsQY9djYfjTXosdl8NNagx87mo7GG2/Ox1uNorFHa8qZ7HI014il/svf7oRAhDkWHfZ5QyOyfnEceeUQ///yz5syZk/fZ3Llz9dFHH+nYsWPKyMhQ3759877r3r27hgwZoq5du0a0nmVZts9UT0pKsjWGvNm8G2si72zejTWRN78GeWfzbqyJvPk1yDubd2NN5J3Nu7Em8ubXIO9s3o01kTe/RrzlW7duXeQspC4XTTZdguNWfXC/6RJKPeNPoh87dkz79u3L99m+fft0/PhxtWzZUuvWrcvbRD98+LB++OEH1alTp1hr+ny+fL9nZ2drx44datCggSpUqFAgv3379gJjCkPebD7cmML67LZrIB/ZGHrs3rxTa9Dj6OVN1USPI8+7sSZ6bH4Nemw278aa6LH5NWIhH0s9dmNNsZA32eNorBFveQAlz/gmesuWLbVw4UJNnz5dTZs21datW7Vo0SLdd999Ou+889SrVy+tXbtW6enpmjNnjlJSUuT3+4u1Zrh/5lKhQoWw39n9pzHkzeYLGxOuz267BvKRj6HH7sw7uQY9jk4+GmvQY2fz0ViDHjubj8Ya9NhsPhpr0GNn89FYI1bysdLjaKxRWvOmehyNNeItD6BkGd9Ev/LKK/Xbb79p0aJFeumll1S5cmX17dtXffr0UUJCgoYNG6b+/furatWqCgaDmjVrlhISEkyXDQAAAAAAACDGeDgSHREwvokuSf369VO/fv1O+t2gQYOUkZGhLVu2KC0tTbVq1YpydQAAAAAAAACAeGX8xaLRZlmW6RIAAAAAAAAARxT32ON4c0mn0vdi0ZUf8mLRkuaKJ9Gj7cSbSzAYVCAQkM/nO+mZU5Zl2bohkTebDzemsD677RrIRzaGHrs379Qa9Dh6eVM10ePI826siR6bX4Mem827sSZ6bH6NWMjHUo/dWFMs5E32OBprxFseQMmLy010AAAAAAAAAHEovg7lgEN4QycAAAAAAAAAAGGwiQ4AAAAAAAAAQBhsogMAAAAAAAAAEAZnogMAAAAAAACIC57jpitALOJJdAAAAAAAAAAAwmATHQAAAAAAAACAMNhEBwAAAAAAAAAgDDbRAQAAAAAAAAAIwxMKhUKmi4gmy7JMlwAAAAAAAAA4wu/3my4hplzafpLpEhy34pNM0yWUemVMF2DCiTeXYDCoQCAgn88nr9dbIG9Zlq0bEnmz+XBjCuuz266BfGRj6LF7806tQY+jlzdVEz2OPO/Gmuix+TXosdm8G2uix+bXiIV8LPXYjTXFQt5kj6OxRrzlAZQ8jnMBAAAAAAAAACAMNtEBAAAAAAAAAAgjLo9zAQAAAAAAABCH4urtkHAKT6IDAAAAAAAAABAGm+gAAAAAAAAAAITBJjoAAAAAAAAAAGFwJjoAAAAAAACAuOAJcSg67ONJdAAAAAAAAAAAwvCEQvH11y+WZZkuAQAAAAAAAHCE3+83XUJMuazdRNMlOO69z8aaLqHUi8vjXE68uQSDQQUCAfl8Pnm93gJ5y7Js3ZDIm82HG1NYn912DeQjG0OP3Zt3ag16HL28qZroceR5N9ZEj82vQY/N5t1YEz02v0Ys5GOpx26sKRbyJnscjTXiLQ+g5MXlJjoAAAAAAACAOBRfh3LAIZyJDgAAAAAAAABAGGyiAwAAAAAAAAAQBpvoAAAAAAAAAACEwZnoAAAAAAAAAOLDcdMFIBbxJDoAAAAAAAAAAGGwiQ4AAAAAAAAAQBhsogMAAAAAAAAAEAZnogMAAAAAAACIC55QyHQJiEGeUCi+/uRYlmW6BAAAAAAAAMARfr/fdAkx5fL0h0yX4Lh3144zXUKpF5dPop94cwkGgwoEAvL5fPJ6vQXylmXZuiGRN5sPN6awPrvtGshHNoYeuzfv1Br0OHp5UzXR48jzbqyJHptfgx6bzbuxJnpsfo1YyMdSj91YUyzkTfY4GmvEWx5AyeNMdAAAAAAAAAAAwojLJ9EBAAAAAAAAxKH4OtkaDuFJdAAAAAAAAAAAwmATHQAAAAAAAACAMNhEBwAAAAAAAAAgDDbRAQAAAAAAAAAIgxeLAgAAAAAAAIgPvFgUEeBJdAAAAAAAAAAAwmATHQAAAAAAAACAMDyhUHz9GwbLskyXAAAAAAAAADjC7/ebLiGmXN56nOkSHPfuuodMl1DqxeWZ6CfeXILBoAKBgHw+n7xeb4G8ZVm2bkjkzebDjSmsz267BvKRjaHH7s07tQY9jl7eVE30OPK8G2uix+bXoMdm826siR6bXyMW8rHUYzfWFAt5kz2OxhrxlodNx00XgFjEcS4AAAAAAAAAAITBJjoAAAAAAAAAAGGwiQ4AAAAAAAAAQBhxeSY6AAAAAAAAgPjjCYVMl4AYxJPoAAAAAAAAAACEwSY6AAAAAAAAAABhsIkOAAAAAAAAAEAYnIkOAAAAAAAAID5wJjoiwJPoAAAAAAAAAACE4QmF4uuvXyzLMl0CAAAAAAAA4Ai/32+6hJjStcVY0yU47l/fTDRdQqkXl8e5nHhzCQaDCgQC8vl88nq9BfKWZdm6IZE3mw83prA+u+0ayEc2hh67N+/UGvQ4enlTNdHjyPNurIkem1+DHpvNu7Ememx+jVjIx1KP3VhTLORN9jgaa8RbHkDJ4zgXAAAAAAAAAPEhFCp9PyXgww8/VEZGhlq3bq3MzEzl5OQUeeyOHTvUt29ftWrVSv369dOuXbvyvuvcubPOOeecAj+zZ8+WJL3//vsFvnv44Ycdvz672EQHAAAAAAAAAEiStmzZoiFDhujKK6/U0qVLdeDAAc2YMaNIY3NycjRgwAAlJydr2bJlat++vYYOHarjx49Lkt58802tXbs272flypWqXLmyOnbsKElav369rr322nyZe+65p8SutajYRAcAAAAAAAAASJIWLFggn8+nwYMHq27dusrMzNTixYuL9DT6u+++q3379mnSpEmqV6+eBg0apOzsbH311VeSpEqVKik5OTnvZ+HCherUqZNatmwp6Y9N9Hbt2uXLlC9fviQvt0ji8kx0AAAAAAAAACgNunTpUuj3q1atsjXfpk2bdNFFF+X9XqtWLaWkpGjr1q1KS0s75di0tDSlpKTkfdayZUt98803Ou+88/Jld+/erVdffVXvvPOOJOno0aPasGGDDhw4oLFjxyo5OVnXXXed7rzzTiUkmH0WnE10AAAAAAAAAPGhhM4QjzXDhw/XRx99dNLvqlSponr16hX4bM+ePafcRM/Kygo79kQvvPCCLrnkEp1xxhmSpO+//17Jycm6/fbblZ6ero0bN+ree+9VjRo11Lt3bzuX5zg20QEAAAAAAAAgRtl90lySMjMzlZ2dfdLvbrnlFiUlJeX7rHz58goGg6ecNzExUYmJiQXG7tu3L99nWVlZWrx4sf7+97/nfZaamppvY79mzZrq27ev3nzzTTbRAQAAAAAAAADRc9pppxX63ck2vcuVK3fKeVNSUrR9+/Z8nx06dKjA2H/+858688wz1bRp00Lnq1mzpn7++edTrlvSeLEoAAAAAAAAAEDSH2eYr1u3Lu/3w4cP64cfflCdOnVOObZVq1Zav369cnNz8z7bsGGDTj/99Hy5N954Q927d8/32eLFizVhwoR8n61bt67AWBM8oVB8HQRkWZbpEgAAAAAAAABH+P1+0yXElK7NHjBdguP+tfFhR+fbtGmTevXqpRdeeEHp6emaOnWq3n77bX344YdKSEhQTk6Ojh49qkqVKhUYe/ToUXXs2FG33HKLbrvtNq1evVqDBw/W22+/rbPOOkuS9Ntvv+mCCy7QG2+8IZ/Plzd28+bNuuGGG5SZmSm/36/Vq1friSee0LRp03TVVVc5eo12xeVxLifeXILBoAKBgHw+n7xeb4G8ZVm2bkjkzebDjSmsz267BvKRjaHH7s07tQY9jl7eVE30OPK8G2uix+bXoMdm826siR6bXyMW8rHUYzfWFAt5kz2Oxhrxlgec1rRpUw0bNkz9+/dX1apVFQwGNWvWLCUk/HGoydy5c7Vy5UotW7aswNiyZctq+vTpGj58uF588UXt379fQ4YMydtAl6SPPvpIycnJOuecc/KNTU1N1bRp0/TYY49p165datSokWbPnq1LL720ZC+4COJyEx0AAAAAAAAAcHKDBg1SRkaGtmzZorS0NNWqVSvvu2HDhmnYsGFhx3bo0EErVqzQunXrVK9ePTVp0iTf91dffbWuvvrqk469/PLLdfnllztzEQ5iEx0AAAAAAAAAkE/dunVVt27diMZWrVpVXbp0cbgic9hEBwAAAAAAABAXPPH1ekg4JMF0AQAAAAAAAAAAuBWb6AAAAAAAAAAAhMEmOgAAAAAAAAAAYbCJDgAAAAAAAABAGLxYFAAAAAAAAEB84MWiiABPogMAAAAAAAAAEIYnFIqvv36xLMt0CQAAAAAAAIAj/H6/6RJiSjffGNMlOG55YIrpEkq9uDzO5cSbSzAYVCAQkM/nk9frLZC3LMvWDYm82Xy4MYX12W3XQD6yMfTYvXmn1qDH0cubqokeR553Y0302Pwa9Nhs3o010WPza8RCPpZ67MaaYiFvssfRWCPe8gBKXlxuogMAAAAAAACIQ8fj6lAOOIQz0QEAAAAAAAAACINNdAAAAAAAAAAAwmATHQAAAAAAAACAMDgTHQAAAAAAAEB8CHEmOuzjSXQAAAAAAAAAAMJgEx0AAAAAAAAAgDDYRAcAAAAAAAAAIAzORAcAAAAAAAAQHzgTHRHwhELx9SfHsizTJQAAAAAAAACO8Pv9pkuIKd2a3Ge6BMct3zbNdAmlXlw+iX7izSUYDCoQCMjn88nr9RbIW5Zl64ZE3mw+3JjC+uy2ayAf2Rh67N68U2vQ4+jlTdVEjyPPu7Ememx+DXpsNu/Gmuix+TViIR9LPXZjTbGQN9njaKwRb3kAJY8z0QEAAAAAAAAACCMun0QHAAAAAAAAEIfi62RrOIQn0QEAAAAAAAAACINNdAAAAAAAAAAAwmATHQAAAAAAAACAMDgTHQAAAAAAAEB8OM6Z6LCPJ9EBAAAAAAAAAAiDTXQAAAAAAAAAAMLwhEIh4/+G4eOPP9ZTTz2lzZs3q06dOrrjjjt0xRVXSJI+/PBDTZ06VXv27FG3bt00duxYJSUlRbyWZVlOlQ0AAAAAAAAY5ff7TZcQU7o1Gmm6BMct3z7ddAmlnvEz0QOBgG6//XaNHj1as2fP1scff6xRo0bp+PHjOvvsszVkyBANHjxY3bt317Rp0zRjxgyNGTOmWGueeHMJBoMKBALy+Xzyer0F8pZl2bohkTebDzemsD677RrIRzaGHrs379Qa9Dh6eVM10ePI826siR6bX4Mem827sSZ6bH6NWMjHUo/dWFMs5E32OBprxFseNoWOm64AMcj4cS6vv/66WrdurT59+ui0007T1Vdfrc6dO+uf//ynFixYIJ/Pp8GDB6tu3brKzMzU4sWLlZOTY7ps4P+1d/fxNdf/H8efZzNjG7O5WC4TuRgzSkhC5iLGXNSXUJbim2tSFPL9+vlWlKRCLhYqEiVymRK5jCS5OrMNy6LMXMzYbLOLc35/uDnf75ojm2OfnbPH/Xbbrfb5vD6f9+vTq3129jrv8/4AAAAAAAAAKAIMb6InJiaqcuXKObZ5eHjI3d1dR48eVcuWLW3bAwIC5Ofnp2PHjhV0mgAAAAAAAACAIsjwJnpQUJB+/PFHXblyRZJ05swZbd++XY899piSk5NVrVq1HPG+vr5KSEgwIlUAAAAAAAAAQBFj+Jroffv21cGDB9WtWzfVr19f+/btk4+Pj8LCwrRgwYJcDxEtUaKEUlNT72jMvx6flpaW45+3c0xexyC+YONvdszf1bmwXQPxeT+GGhfueEeMQY0LNr4gxqDGjo0viDGosWPjC2IMamxsfEGMQY0dG18QYxT2eGercUGM4WrxRte4IMYoSvE3e74fAMcyWa1Wq9FJSFJCQoLMZrOGDh2q6dOnKywsTH369FFoaKj69etniwsLC9OwYcPUsWPHfI1jNpvzvKa6p6dnno4h3tj4wpgT8Y6NL4w5EW/8GMQ7Nr4w5kS88WMQ79j4wpgT8Y6NL4w5EW/8GMQ7NoKZjCYAAC+SSURBVL4w5kS88WMUtfjGjRvfdiykTtVHG52Cw22Me8/oFFxeoWmiS9LYsWMVHx+vzz77TJL09ttvKz4+Xu+//74k6erVq2rWrJk+//xzBQcH52sMs9msGjVq5NiWlpamuLg4Va9eXSVLlsx1zG+//ZbrmFsh3th4e8fcqs6F7RqIz98x1LjwxjtqDGpccPFG5USN8x9fGHOixsaPQY2NjS+MOVFj48dwhnhnqnFhzMkZ4o2scUGMUdTimYmeNzTRkR+GL+dyw5EjR7Rx40atWrXKti0sLEy9evXSvn371KRJE82ePVt+fn4KCgq6o7Hs3VxKlixpd19eb0jEGxt/q2Ps1bmwXQPx+T+GGhfOeEeOQY0LJr4gxqDGjo0viDGosWPjC2IMamxsfEGMQY0dG18QYzhLvLPUuCDGcNV4o2pcEGMUtXgAd1ehaKJbrVa98cYbCg8PV+3atW3b69WrpxEjRqh///4qU6aMUlNT9cEHH8jNzfDnoQIAAAAAAAAAioBC0UQ3mUz64osvbrpv0KBBCg0NVUxMjBo0aKCAgIACzg4AAAAAAACAS7AUmpWt4UQK1ZroBcFsNhudAgAAAAAAAOAQd7rscVHTqdqLRqfgcBtPvW90Ci6vUMxEL2h/vbmkpqYqKipKgYGBN11zymw25+mGRLyx8faOuVWdC9s1EJ+/Y6hx4Y131BjUuODijcqJGuc/vjDmRI2NH4MaGxtfGHOixsaP4QzxzlTjwpiTM8QbWeOCGKOoxQO4+1hcHAAAAAAAAAAAO4rkTHQAAAAAAAAARVDRWtkaDsJMdAAAAAAAAAAA7KCJDgAAAAAAAACAHTTRAQAAAAAAAACwgzXRAQAAAAAAABQNrImOfGAmOgAAAAAAAAAAdtBEBwAAAAAAAADADproAAAAAAAAAADYYbJai9ZCQGaz2egUAAAAAAAAAIcICgoyOgWn0qnyCKNTcLiNf84yOgWXVyQfLPrXm0tqaqqioqIUGBgoLy+vXPFmszlPNyTijY23d8yt6lzYroH4/B1DjQtvvKPGoMYFF29UTtQ4//GFMSdqbPwY1NjY+MKYEzU2fgxniHemGhfGnJwh3sgaF8QYRS0ewN3Hci4AAAAAAAAAANhBEx0AAAAAAAAAADuK5HIuAAAAAAAAAIogi8XoDOCEmIkOAAAAAAAAAIAdNNEBAAAAAAAAALCDJjoAAAAAAAAAAHawJjoAAAAAAACAosFqNToDOCFmogMAAAAAAAAAYIfJai1ab7+YzWajUwAAAAAAAAAcIigoyOgUnEqne4YanYLDbTw7x+gUXF6RXM7lrzeX1NRURUVFKTAwUF5eXrnizWZznm5IxBsbb++YW9W5sF0D8fk7hhoX3nhHjUGNCy7eqJyocf7jC2NO1Nj4MaixsfGFMSdqbPwYzhDvTDUujDk5Q7yRNS6IMYpaPIC7j+VcAAAAAAAAAACwo0jORAcAAAAAAABQBBWtla3hIMxEBwAAAAAAAADADproAAAAAAAAAADYQRMdAAAAAAAAAAA7WBMdAAAAAAAAQNFgYU105B0z0QEAAAAAAAAAsIMmOgAAAAAAAAAAdtBEBwAAAAAAAADADpPVai1SCwGZzWajUwAAAAAAAAAcIigoyOgUnErHci8YnYLDfXshwugUXF6RfLDoX28uqampioqKUmBgoLy8vHLFm83mPN2QiDc23t4xt6pzYbsG4vN3DDUuvPGOGoMaF1y8UTlR4/zHF8acqLHxY1BjY+MLY07U2PgxnCHemWpcGHNyhngja1wQYxS1eAB3H8u5AAAAAAAAAABgB010AAAAAAAAAADsKJLLuQAAAAAAAAAogixF6vGQcBBmogMAAAAAAAAAYAdNdAAAAAAAAAAA7KCJDgAAAAAAAACAHayJDgAAAAAAAKBosLImOvKOmegAAAAAAAAAANhBEx0AAAAAAAAAADtMVmvR+gyD2Ww2OgUAAAAAAADAIYKCgoxOwal09BtodAoO9+2lBUan4PKK5Jrof725pKamKioqSoGBgfLy8soVbzab83RDIt7YeHvH3KrOhe0aiM/fMdS48MY7agxqXHDxRuVEjfMfXxhzosbGj0GNjY0vjDlRY+PHcIZ4Z6pxYczJGeKNrHFBjFHU4pFHFovRGcAJsZwLAAAAAAAAAAB20EQHAAAAAAAAAMAOmugAAAAAAAAAANhRJNdEBwAAAAAAAFAEWa1GZwAnxEx0AAAAAAAAAADsoIkOAAAAAAAAAIAdNNEBAAAAAAAAALCDNdEBAAAAAAAAFAlWi8XoFOCETFZr0VpN32w2G50CAAAAAAAA4BBBQUFGp+BUHvd51ugUHO67lE+NTsHlFcmZ6H+9uaSmpioqKkqBgYHy8vLKFW82m/N0QyLe2Hh7x9yqzoXtGojP3zHUuPDGO2oMalxw8UblRI3zH18Yc6LGxo9BjY2NL4w5UWPjx3CGeGeqcWHMyRnijaxxQYxR1OIB3H2siQ4AAAAAAAAAgB000QEAAAAAAAAAsKNILucCAAAAAAAAoAgqWo+HhIMwEx0AAAAAAAAAADtoogMAAAAAAAAAYAdNdAAAAAAAAAAA7GBNdAAAAAAAAABFg4U10ZF3zEQHAAAAAAAAAMAOmugAAAAAAAAAANhhslqtReozDGaz2egUAAAAAAAAAIcICgoyOgWn8njJfkan4HDfpS0xOgWXVyTXRP/rzSU1NVVRUVEKDAyUl5dXrniz2ZynGxLxxsbbO+ZWdS5s10B8/o6hxoU33lFjUOOCizcqJ2qc//jCmBM1Nn4MamxsfGHMiRobP4YzxDtTjQtjTs4Qb2SNC2KMohaPPLJajM4ATojlXAAAAAAAAAAAsIMmOgAAAAAAAAAAdtBEBwAAAAAAAADAjiK5JjoAAAAAAACAosdqsRqdApwQM9EBAAAAAAAAALCDJjoAAAAAAAAAAHbQRAcAAAAAAAAAwA7WRAcAAAAAAABQNFgtRmcAJ8RMdAAAAAAAAAAA7DBZrdYi9Uhas9lsdAoAAAAAAACAQwQFBRmdglPp4NHb6BQcblPmcqNTcHlFcjmXv95cUlNTFRUVpcDAQHl5eeWKN5vNebohEW9svL1jblXnwnYNxOfvGGpceOMdNQY1Lrh4o3KixvmPL4w5UWPjx6DGxsYXxpyosfFjOEO8M9W4MObkDPFG1rggxihq8QDuviLZRAcAAAAAAABQ9FgtRWpRDjgIa6IDAAAAAAAAAGAHTXQAAAAAAAAAAOygiQ4AAAAAAAAAgB000QEAAAAAAAAUDVaL633dBdu3b1doaKgaN26siRMn6tq1a3k6Pj09Xb1799aqVaty7fv6668VEhKiZs2a6d1335XF8t9rSE1N1bhx4/TQQw+pU6dO+umnn+74WhyBJjoAAAAAAAAAQJIUExOjYcOGqUuXLlq9erUuX76sGTNm3PbxV65c0bBhw3TgwIFc+3bs2KHXXntNQ4YM0RdffKF9+/bps88+s+2fOHGiDhw4oMWLF2vcuHF6+eWXdfbsWYdc152giQ4AAAAAAAAAkCQtWbJEgYGBGjp0qKpWraqJEydqxYoVtz0bfeTIkQoODlalSpVy7fv444/VoUMH9ezZU9WrV9e4ceNsTfSEhARt2LBBEyZMUL169dS6dWu1bdv2prPZCxpNdAAAAAAAAACAJOno0aNq2bKl7fuAgAD5+fnp2LFjt3X85MmTNWrUKJlMpr89d4MGDXTmzBklJiYqMjJSHh4eat68uW3/gw8+qEOHDt3B1ThGMaMTAAAAAAAAAADkT9u2bW+5f8uWLbm2jR49Wjt37rxpvK+vr6pVq5ZrW0JCgho0aPC3+dx777129yUnJ+fY7+7uLm9vb507d07Jycm65557VLx4cdv+0qVLKyEh4W/HvNtMVqvVanQSAAAAAAAAAIC8y08T/eLFi0pLS7tp/PPPP6/Ro0erU6dOtm19+/ZV79691bVr19vOKyQkRMOHD9cTTzxh29agQQMtXbpUwcHBtm2tW7fWjBkzdPbsWc2ZM0cbNmyw7duzZ48mTZqkTZs23fa4dwMz0QEAAAAAAADASd2sSf53ypYte8t9Fy5cyLEtOTk5xwzx/PLz88t17pSUFBUvXvym+xw17p1iTXQAAAAAAAAAgCSpUaNG2r9/v+37q1ev6uTJkzd9UOidnvu3335TSkqKKlasqPr16+vq1auKjY217T9y5IgqVqx4x+PeKZroAAAAAAAAAABJUlhYmDZv3qx9+/ZJkmbPni0/Pz8FBQVJkq5du6aUlJR8nbtr16768ssvFRsbq+zsbM2aNUsNGzZUuXLl5Ovrq1atWumdd95RRkaG/vjjD61YsUIhISEOu7b8YjkXAAAAAAAAAIAkqV69ehoxYoT69++vMmXKKDU1VR988IHc3K7Px46IiNDmzZu1Zs2aPJ+7Xbt22rVrl7p16yYfHx9J0qJFi2z7X3vtNQ0cOFCPPvqo0tLS1LRpU/Xs2dMxF3YHeLAoAAAAAAAAACCH06dPKyYmRg0aNFBAQIBDz338+HGdOnVKjRs3VpkyZXLsy8jI0L59+1S8eHE99NBDMplMDh07P2iiAwAAAAAAAABgB2uiAwAAAAAAAABgB010AAAAAAAAAADsoIkOAAAAAAAAAIAdNNEBAAAAAAAAALCDJjoAAAAAAAAAAHbQRAcAAAAAAAAAwA6a6AAAAAAAAAAA2FGkmugWi8XoFADkk9VqNToFFADq7Nqob9FAnV0fNXZ91Nj1UWPXR40BwLFcvol++fJlXblyRcnJyXJzc/nLLbJuvEDghYLryczMzPE9b4a5Jurs2m7U02QySZKys7ONTAd3QUZGhqTrv4dv1Bmuh3u166PGru+vNeZ3suuhxgBwdxQzOoG7KSYmRiNHjtS9996rkydP6umnn1bjxo3VoEEDo1ODg1y+fFmSdO3aNVWoUIE/3F3M8ePHNW/ePPn6+spisWjkyJHy9/c3Oi04GHV2bbGxsVq2bJlKlCihsmXLqnfv3ipZsqTRacGBjh07pmnTpsnf31/JyckaM2aMqlSpIk9PT6NTgwNxr3Z91Nj1UWPXR40B4O5x2anZqampmjRpktq1a6f33ntPo0ePVlxcnBYuXKitW7canR4cICYmRuHh4Ro5cqRGjRqlt99+W9euXTM6LTjIuXPn1L9/fwUEBKhu3bpKSUlRnz59tHXrVqWkpBidHhyEOru2+Ph49e3bVx4eHrJarTp8+LA6d+6sY8eOGZ0aHCQxMVGDBw9WYGCgwsLCVLFiRY0fP15fffWV4uPjjU4PDsK92vVRY9dHjV0fNQaAu8tlZ6J7eHgoIyNDtWrVkre3t0JDQ1WrVi19++23+vjjj5Wdna127doZnSbyKS0tTf/+97/VokULPfXUU0pLS9PYsWOVmJioZ599VoGBgcxKd3Lx8fEqX768hg8fLi8vL/Xq1Utz5szR3LlzdeHCBXXo0EG+vr5Gp4k7RJ1d040lPaKjo1W9enW9+uqrtn2TJ0/W0KFDNXnyZD3yyCPcq53clStX5OXlpT59+qhSpUpq2bKlVqxYoc2bN+vixYvq0aOHqlatanSauEPx8fGqUKEC92oXdvbsWZUtW5YauzB+jl0fr6sB4O5yyZnoVqtVGRkZunLlik6ePGnbXqtWLT3xxBNq1qyZli9frgMHDhiYJe5ERkaGUlNT1aRJE917772qW7euFi9erKSkJC1cuFAHDx40OkXcoaysLEVHR+v48eO2bUOHDlXnzp21cuVK7dy5UxLr4Ds7q9VKnV3QjTV009PTFR0drdjYWNu+SZMmKTQ0VP/3f/9nu1ezVqfzOX/+vM6cOSOLxaITJ07keL3Vs2dPdevWTVFRUdq4cSOz31xAZmamoqKidOLECds27tWu4fjx41q9erXKlCmjY8eOUWMXduPn+O9ec7EOvvO68ffT/37ijxoDgOO4ZBPdZDLJ29tbzz33nD7//PMcy7dUrlxZ7du3V7FixbR7925J/BJxRt7e3vLw8NCOHTts2/z8/DRt2jSlp6dr4cKFSkxMlMSLfWfyxx9/aOXKlfrhhx8kSY899pjWrVunixcv2mKeffZZtWzZUm+++aYSEhKYxepkrFarfv75Z40bN07Lly/XlStX1Lp1a61bt872MytRZ2d24cIFDRgwQOfPn1fjxo1VqVIl7dy50/bgSUl66aWX1KJFC40cOVIpKSlyd3c3MGPkVWRkpLp166a4uDhVr15d7dq105IlS3T69GlbTGhoqNq0aaPly5fr1KlTBmaL/Przzz915swZSVK1atXUuHFjrV27lnu1C4mOjlaPHj00btw4/f7779TYBSUkJGjHjh365ZdfVLp0abVo0eJvX3O5ublki8BlXbx4UUeOHNGePXtkMpkUEhKi9evX68KFC7YYagwAjuHSd8+uXbuqU6dOWrRokX766SdJ1xs4tWvXVoMGDbR27VplZGTwS8RJJCUl2V7wmUwmtWjRQlFRUdqzZ48txtfXV1OnTtWRI0cUERFhi0XhFx0drW7dumnx4sUaP368VqxYodKlS2vv3r368ccflZaWZosdNmyYKlSooM8//9zAjJEf69ev16uvvqoLFy5o+fLl2rFjh+rWratdu3Zp165dSk1NtcVSZ+eUlJSkn376SRMmTFDp0qXVvXt3zZ07V0eOHMkRN27cOPn4+GjdunUGZYr8iIqK0jPPPKPu3bvrkUcekZubm7p06aKkpCRt2LBBCQkJtthevXqpcuXK+vTTTw3MGPk1cuRIffbZZ5KkChUqqH379tqzZ4927dqV49MF3KudU1RUlHr16qXevXurc+fOKlWqlLp37277fczrLucXHR2tnj17aubMmRo6dKg2b96sxo0b85rLhcTExKh379564403NGzYMM2dO1dXrlzR4cOHtXv3bmoMAA7m0t3jUqVKadiwYbr//vs1e/Zsbdq0ydZQ9fb2VrFixZSZmWlwlvg7VqtVqampmjx5spYsWaLExES5u7vrmWeekSQtW7Ysx0fWSpcureeff14HDx7M8QcACq+kpCS9+OKLCg8P15o1azR58mRFRkaqQ4cO6ty5syIiIvTdd9/lmFHh7+/PEgFO5ty5c3rrrbc0duxYLViwQMOGDdN3332n/v37q2PHjlq4cKE2bdqkc+fO2Y6hzs6nevXquu+++3T48GH1799f/fv3V9euXTVq1Cj9/PPPtj/oSpQooRIlSuT4pAkKt9OnT6tHjx7q37+/XnnlFWVmZio6OlqNGjVSs2bNdOjQIX311Vc5lnapUKGCMjMz+VSYEwoODlaxYv99fFL79u11zz33aO3atbY172/gXu1cIiMj1bdvX73wwguaOHGi/P39tWTJEvXs2VOPPfaYFixYwOsuJ5eQkKCBAwfqySef1BdffKGxY8dq8eLF6t+/vzp06KCPP/5YmzZtyvHGJzV2LomJiRozZoy6d++uRYsW6f3331epUqVUs2ZN1apVSxEREdq0aRM/xwDgQC77YNEbAgICNHz4cC1btkyvvPKKVq1aJTc3N+3fv19Dhw6Vt7e30Snib5hMJnl5eSk2NlZHjx6Vp6enevTooYCAAE2fPl0vvvii5s6dq7CwMIWEhEi6/qLi4sWLLNXjJDIzM+Xj46MePXpIkjp27Kg9e/bo008/1ZIlS5SWlqavvvpKO3fu1KOPPqqUlBRFR0erb9++BmeOvLBYLKpWrZratm0r6XpD5uOPP9b27dvVunVrHT9+XJs3b9bOnTv1yCOPKDU1lTo7maysLJlMJpUtW1ZdunTRpUuX1L9/fy1atEgVKlTQ66+/rpCQEDVu3FgXLlzQmTNn1LBhQ6PTxm0ym82qV6+eevfuLYvFon79+ik5OVkXL15UmzZtlJaWpvj4eI0fP14hISHKyMjQ7t27NWHCBD4V5oRq166tJUuWKDg4WDt27NDOnTtltVp19uxZJSQkaOfOnWrevLnS0tK4VzuR8+fPq3///goPD9fw4cMlSY888ogWLVok6fqnhN577z1edzm5+Ph4BQYGatSoUZKuP6ti2bJl2rNnjx566CEdPnxYP/zwA6+5nNj58+dVunRphYeHy9vbW61atVJSUpLeeustrVmzRmvWrNGKFSv4OQYAB3L5Jrok25PmW7Vqpe+//15JSUl6++231bx5c6NTw22wWCxyc3NT1apVlZmZqR07dshkMqlHjx6qUqWKZs6cqXfeeUdLly7VnDlzFBgYqI0bN+rFF1/kTRInkZmZqYsXL+rKlSu2bU2bNrUtwzR69Ght3bpV+/bt07x582QymTRixAi1bNnSqJSRT+np6Tp16pRq1aqlpUuX6tdff1VycrIkqVKlSqpatap8fX0VEREhd3d36uxkbsxafeCBB5SYmKgXXnhBU6ZM0eDBgzVgwABZrVadOXNGa9askZubm4YMGaKmTZsanDVuV9OmTbVu3TotWrRIv//+u0qVKqV33nlHZrNZ+/btU2pqqmrUqKHAwECtXr1aJpNJL7/8stq3b2906siHqlWr6sqVKzpw4IBq1aqlvn37Kjk5WS+99JIsFovq16+vjz76iHu1kylRooRmzZqlhx9+2LatRYsWmjp1qhYuXKgBAwbYXnft379f8+bNo8ZOqESJEjp8+LC2bNmitm3b6t1339XRo0e1bt06JScnKyMjQ97e3qpatSqvuZxURkaGDh06pMjISNvPc/PmzZWUlKS1a9dq4MCBql69ug4ePMjPMQA4iMnK52vhBNLT0zVhwgSNGTNG69ev1+bNm9W2bVv16NFDFSpU0JUrV3Ty5EmtW7dOmZmZatWqlVq2bKnixYsbnTpu0+bNm9WgQQMFBARIks6cOaOnn35aCxYsUM2aNSVdXw6kQoUKunr1Km+QOKmTJ0/K399fvr6+OnjwoCwWi4KDg3X8+HF98sknqlSpkkaNGqXLly+rWLFi1NlJRUREaNu2bfr8889lsVj0/PPP6+eff9bQoUM1fPhwJSQkyNPTU2XKlDE6VeRRTEyM+vXrpzJlyujLL7+01fDw4cOaNGmSOnTooCFDhigtLU3u7u78HnZiV69eVUhIiOrXr6+ZM2fKx8dHkvTNN99oypQpWrFihby8vLhXO7ns7Gy5u7trwYIFio2N1b/+9S95eXnZ9icnJ8vNzY0aOxmr1aqIiAjNmzdPderU0cGDB7V27VrVrl1bZ86c0ezZs5WVlaVp06YpKSlJHh4e1NjJpKena+zYsfL29tZTTz2l+++/X1OmTNG3336rBx98UAsXLrTF8nMMAI5RJGaiw/mVKFFCEyZMULly5fTCCy8oIyNDW7ZskSQ98cQTKl++vBo2bMiyAE6sTZs2cnd3l3T90wcmk0lXr15VVlaWpOtNuY0bN2rJkiW2P+ThfO677z5J1/9ob9SokW17YGCgKlSooAMHDshiscjX19egDOEIjz32mLZt2yZJ2rt3ryIjI9WgQQOtWbNGPXv2tL1ZBudTp04djRkzRuvWrVOxYsVsDbjg4GD5+voqKipKklSyZEmDM8WdsFqt8vDwUJUqVeTp6SkfHx9brX18fOTt7S03Nzfu1S7gxmuvxo0ba/78+eratauaN29ue45BqVKljEwP+WQymTRw4EC1bt1au3btUtmyZVW7dm1J1z/55+bmpsjISF27do03tJ1UiRIlNGrUKE2bNk0jRoyQh4eHnnzySU2ZMkWzZs1SQkKCypUrJzc3N36OAcBBaKLDaZQrV05Wq1Umk8m2huOWLVvk5uamHj16qFy5cgZniDtx44846fof76VLl1bp0qXl4+OjTz75RDNnztTy5ctpoLuIG/XOyMiwzVR1c3NTtWrVeAChC/D19VVGRoY+/vhjzZ8/X8OGDVNoaKjmzJmjjIwMo9PDHQoLC1OnTp3k4+Nje6MzPT1dnp6eqlevnsHZwRFMJpOKFy+uZ555RhMmTNDq1avVvXt3Sdc/deDu7i4PDw9jk4RDPfDAA3ryySf14Ycfqnr16qpYsaLRKeEOubu7q27dujp79qxWrFih6Oho1a1bV3FxcTpz5owCAgKUlZUlT09Po1NFPt1///2aPn26Tp8+rfT0dDVu3FixsbE6f/684uLimLQAAA5GEx1OxWQy2dZIHz58uNzc3LRy5Up5eHgoPDxcbm5uRqcIB3B3d5e3t7fKlCmj0aNH6+jRo1q2bJmCgoKMTg0OdOnSJb3++utKS0uTm5ubfvnlF3366ac53lCBc/L391exYsX07rvv6tVXX1W/fv0kSRMnTrStmw7ndWOW+enTp/XNN9/Iw8ND8fHxOnDggF599VWDs4MjdezYUSdPntSECRO0cuVK+fj46MiRI4qIiJC/v7/R6cHB2rVrp23btumnn35S9+7deSCwi2jUqJFq1aqliRMnqnbt2rpw4YIiIyO1ZMkSlvdwAaVLl1b9+vVt39esWVPBwcEym81q1qyZgZkBgOthTXQ4pRsz0iXpo48+UqdOnVSlShWDs4KjWK1WXbt2Te3bt9fFixf19ddfq06dOkanBQfLysrSnj179O2336pKlSrq0KGDbf17OL9Dhw4pJiZGvXr1MjoV3CV//vmnFi9erF9//VXlypXTyJEjFRgYaHRacLDs7Gzt379fP/74o6pUqaJmzZqpWrVqRqeFu2Tq1Knq27ev7r33XqNTgQP98ccfmj9/vsxms6pXr65hw4bp/vvvNzot3CWzZs1SWFiYqlevbnQqAOBSaKLDad2YkQ7XtWnTJtWsWZPGKgAUYjeW6OEhooDz+t8JKnBNWVlZslgsslqtLOHiovg5BoC7iyY6AAAAAAAAAAB2MI0XAAAAAAAAAAA7aKIDAAAAAAAAAGAHTXQAAAAAAAAAAOygiQ4AAAAAAAAAgB000QEAAAAAAAAAsIMmOgAAAP5WVlZWnrYDAAAAgKugiQ4AAFBEJCQk2P7dYrFo/fr1io2Nva1jn3/+eb311ls5tn3zzTfq2LGjUlNTb3nstWvXJEmxsbFaunSpJOnq1au2/QcOHMiRmyNkZGQoLi7Ooec0yg8//KDBgwcrLS3tlnHnzp1TUlJSwSQFAAAAFCE00QEAAIqIIUOG6JVXXpEkubm5adGiRZozZ87fHpeYmKh9+/bJZDLl2N64cWNdvHhRERERdo9NSEjQ448/ruPHjysmJsY23qRJkzRr1ixJ0rvvvqspU6bk97Ju6rXXXtPChQsdes5bSUlJueNznD17VnFxcfrjjz9yfCUmJmrr1q367rvvcu37/fffdfr0aUnSqlWrNGLECGVmZt5xLgAAAAD+q5jRCQAAABhh7969Cg8PV0xMjNGpFIidO3cqMjJSY8aMsW0bNWqUBg8erN69e6tJkyZ2j924caMsFot69eql9PR0mUwmeXp6KiAgQIMGDVKZMmVssVarVdeuXVOJEiUkSQEBAfrHP/6h//znP+rXr588PDwUHx+vLVu2aMOGDcrOzlZUVJRef/11h13r0qVLFRcXpyVLlti2RUdHa/LkyYqOjlbDhg01depUVaxY0WFjjho1Si1btlT//v3zfY4ZM2bou+++k4eHR659pUqV0htvvJFre3Z2toKCgrRkyRK98MILOnTokN577z3bmyUAAAAA7pzJarVajU4CAACgoKWkpOjkyZNq0KCB0ancddnZ2erVq5dKlSqlTz75JMe+QYMG6cSJE1q1apV8fX1venznzp3l7u6utWvX6s0339TixYtvOV6ZMmW0d+9eSdKuXbt0/PhxZWRkKD4+Xhs3btSwYcMUGRmpwMBANWzYUH369NH3339vGz87O1seHh7y8fHJ87VevHhRXbp00WeffaaaNWvatnXu3Fl16tTRwIED9c0338hsNuvrr79WsWKOmVOSlJSkgQMHqk2bNho2bJhDzpkfiYmJ6tKli5YsWWK7fgAAAAB3hiY6AACAi5s/f75mzpyplStXqm7dujn2JSQkqHv37rrvvvsUERGRq3G9a9cuDRgwQM2bN9cnn3yixMREpaWlqXjx4jcdy2q1KisrS5UqVZIkLVq0SLt371ZKSooOHDigkiVLqmXLlrJYLMrIyFD9+vU1d+7cXOcZNWqUhg4dmudrnTt3ruLj4/Wf//zHtm3GjBlasWKFtmzZIi8vL2VnZ6t9+/YaO3asOnXqlOcx7ElJSdGQIUMUHByssWPH5vs8Fy5cUIsWLf427siRIzetQ0REhP78809Nnjw53zkAAAAA+C/WRAcAAHBhR44c0ezZszV06NBcDXTp+nIr8+fP14kTJ9S7d2+dOHHCts9qteqDDz7IsRa6v7+/PD09dfHiRZUvX972FRcXp7lz5yopKcnWQJeuP5B02rRpysjIUO3ateXn56caNWpoyJAh+uijj7RhwwYNGTJEMTExiomJUfXq1fXOO+/on//8Z76ud9OmTQoLC8uxbc+ePWrbtq28vLwkSe7u7goJCdGePXvyNYY9Pj4+WrBggY4fP67Jkycrv3NVbiyFs3r1att/l//9Wr16tUwmk903Mjp37qzNmzfLYrHk+1oAAAAA/BdNdAAA4FIyMjL09ttvq3nz5nrooYc0aNAgnTp1Klfc3r17VadOnZueY9y4cRo3bpzOnj2rl156Sc2aNdOff/6Z5zHsqVOnjubOnas2bdqoVatW2r59u7p06aKmTZvqhx9+0OnTp1WnTh0dOHDAdozValWLFi302Wef3fY4cXFxGjJkiOrXr6/WrVvrxIkTio2NzfXl7e2tKVOmKDU1VT169LA9KPTLL7/U0aNHFRoamuO8kyZNyvUg0H379mnp0qW5Grs7d+7UP/7xDwUGBmr48OHKyspSzZo1NWDAAH311Vc6depUjodynj9/XuXLl7/puuB/x2q16vjx42rUqFGO7QkJCblqXaVKFcXFxd3WeS0Wi7Kysm76lZ2dnSPW09NTH374oS5duqTx48fn2n87bjwYdMCAAWrVqlWurwEDBshqtdp9gGjlypXl7u6u8+fP53lsAAAAALnxYFEAAOBSxo0bp927d+uVV17RPffco9mzZ2vgwIHasGFDnhqzSUlJ6tOnj5o0aaIRI0bkWC/cEWOsX79er7/+ul555RW9+OKLevPNN7VixQotX75cERERCg4O1rZt2/TAAw9Iuj6j/NKlS+rYseNtnd9qtepf//qX/Pz8NGjQID355JO3jK9Ro4a++uorjR8/3jbm5s2b1bt3b5UpU0aJiYm22M6dO2vs2LFKSkqyPVT02LFjuu+++1S9enVbXFZWlrZt26bnnntO/fr100cffaQOHTqoa9euKlmypJYuXapy5crp6NGjkqTU1FRdvXpVVapUua1r/KtLly7Jx8cnVw2uXbumUqVK5djm7e2tS5cu3dZ5P/zwQ82ePfum+ypXrqwffvghxzYPDw/NmDFDEydO1OjRo/Xuu+/m6f89Pz8/RUZG/m3crdZzL1++vM6dO6eAgIDbHhcAAADAzdFEBwAALiMuLk4bNmzQtGnT1K1bN0nXlx+ZM2eOLl68qHvuuee2z7V161ZNmDBBzz777F0ZY/DgwXr00UdVo0YN3XfffQoNDdWJEye0b98+Sdcb1atWrdLo0aNt+TRp0kTlypW7rfObTCbNnTtX6enp8vPz0+HDh+Xh4aFWrVopPDxcL7zwgi12woQJOnv2rPz9/TV//nzb9smTJ9/0YaStWrWSyWTS1q1b1aNHD0nS0aNH9eijj+aIK1asmJ599llZLBb98ssvmj59ut5//32dPXtW9erVU9myZRUeHq6RI0cqPT1dsbGx8vLyyrEcTF6YTKabLqHi4eEhd3f3XNvT09Nv67y9e/dWu3btbrrPXnM8IyNDiYmJqly58m0/vDQ2NjbXrP/b8dprryk8PDzHNovFctNrBgAAAJB3NNEBAIDLuDGjuXHjxrZtdevW1cyZM/N8rlq1aqlfv353bYwKFSpIut74/d9/v6FTp056++23FR8fr4oVK2rbtm3q06dPnsbw8fGxPSjU3d1dly5d0vnz53MtbZKQkHDTGcv2mtk+Pj566KGHtHPnTvXo0UMpKSk6deqUmjVrlit29OjROnbsmLKzs+Xh4aEJEyZIuj7r/P3331dISIgqV66s7du36+zZs2rUqFG+m79+fn5KTU3VtWvX5OnpadtetmxZnT17NkdsUlKSSpYseVvnvbHu++1KTk7W4MGD1aRJE7344ou3fdyNhvz27dtv+82Yjh073vQ6biyLAwAAAODOsSY6AABwaVarVb/88kue14cOCgqSm9vtvVTK7xi3EhAQoAcffFBbt25VQkKCjh8/rvbt29/RObds2SIPD48cbwBI0rlz51SxYsU8natp06bau3evJOnQoUOyWq1q0qRJrriVK1dq//79Klu2rGbPnq0DBw5o3rx58vb2VuvWrSVJYWFhWrx4sTZu3KhHHnkkn1d3Xb169Wyz+W+oW7eu9u/fn2NbZGSk7c0LRzp//rz69eun9u3b56mBLuV8E0WSXn31VT3yyCPq2LGj7euxxx7LNeP/r286/Pbbb3J3d7/tTy0AAAAAuDWa6AAAwGXUrVtXkvTLL7/Ytv3xxx96+umndeTIEacZ44bQ0FBt27ZN27ZtU/PmzeXn55fvc129elVz587V448/bpudfkNCQkK+mugXLlxQXFycfv31V9WsWVP+/v43jV22bJkuXboks9msn3/+WTNnzlS/fv3k5eUlSerbt6+OHDmiyMjIv127/e88/vjjWrNmTa5tu3btUlRUlKTr9dqyZcsdN+z/6vTp03rmmWcUHh6u/v375/n4vzbRS5UqpWeeeUbffvut7WvSpEl/uzzM2rVr9fjjj+c6HwAAAID8YTkXAADgMmrUqKGOHTvqrbfeksVi0T333KO5c+eqWrVqevjhh51mjBs6duyo6dOnKy0tTU888US+z3P58mUNGTJEycnJGjNmTI59iYmJunz58m0tH2KxWJSVlaXixYsrODhY33//vapVq6Y9e/bkmN2enZ0tq9Vqa/b26dNHtWvX1u7duzVo0CClpqaqVKlSOnLkiBo0aKCrV6/Ky8tLVqtVSUlJdpvxt6Nnz54KDQ3V4cOHFRwcLElq06aNmjZtqmeffVYhISHatWuXypYtq169euV7nL+Kjo7W0KFDNX78+Hx/YuCv67knJSXpyy+/1KJFi2zbsrOzVaJECbvn+PPPP/XFF19o5cqV+coBAAAAQG400QEAgEuZNm2aZsyYoWnTpik7O1tNmjTRlClTbLOenWUM6fpa3g0bNtT+/fs1Z86cPB9vsVi0adMmvffee7p06ZIiIiJsM85jYmL0zTffaN++fSpWrJjq169v9zxZWVmSrs+07tChw01j9u/fry+//NL2/dSpU22N/+LFi6t27dpat26dSpcurffee08//vij9u/fr/Pnz2vChAnq2rWrYmNjFR4ervnz598yn1spVaqUxo8fr7Fjx2rZsmXy9/eXyWTSvHnz9OGHH2rHjh1q1qyZxowZk2tG/p344IMP9Oabb6p58+b5PofFYsnx/fTp0+3Gnj9/XseOHdO5c+dsy7mkp6fr5Zdf1vPPP5/vh7MCAAAAyM1k/euUFwAAAOSbxWLJ1Qz9X25ubre91vqd2rdvn5577jk9/PDDmjRpkqpWrWrbl56errZt26patWr65z//qZCQELvneeedd3T48GEtWLBAJ06ckKen5y2XCsnMzFT58uVVtmxZbd++XV9//bW2b9+uLl266OWXX1aZMmV0+vRpvfnmm9q9e7fGjBmj8PBwpaSkaPDgwTp48KCmTp2qsLCwfF/7jBkzdO7cOb311lv5PkdBO3HihDp37nxbDxb9/fffFRoaqpo1a2rOnDmqUqWKZs2apbi4OE2fPp2lXAAAAAAHookOAADgQOPGjdPXX39td394eLhee+21Asvn999/17333ntH5/j3v/+tY8eOafny5Xk+NiYmRuvXr9dTTz2lKlWq2LZnZmZq9uzZevLJJ1WtWjXb9uzsbC1fvly9evWSh4dHvnO2Wq26cuWKfH19832Owi4tLU0lS5bM8b27u7uKFy9uYFYAAACA66GJDgAA4EBnzpzR5cuX7e739/dXQEBAAWYEAAAAALgTNNEBAAAAAAAAALCjYBbkBAAAAAAAAADACdFEBwAAAAAAAADADproAAAAAAAAAADYQRMdAAAAAAAAAAA7aKIDAAAAAAAAAGAHTXQAAAAAAAAAAOygiQ4AAAAAAAAAgB000QEAAAAAAAAAsOP/AfQGabMFBGdIAAAAAElFTkSuQmCC",
"text/plain": [
"<Figure size 1600x1200 with 2 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import pandas as pd\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"import seaborn as sns\n",
"from scipy.stats import spearmanr\n",
"from tqdm import tqdm # 用于显示进度条 (可选)\n",
"\n",
"# 设置 Matplotlib/Seaborn 样式 (可选)\n",
"sns.set_theme(style=\"whitegrid\")\n",
"plt.rcParams['font.sans-serif'] = ['SimHei'] # 或者其他支持中文的字体\n",
"plt.rcParams['axes.unicode_minus'] = False # 解决负号显示问题\n",
"\n",
"def analyze_score_performance_2d(score_df: pd.DataFrame,\n",
" score_col: str = 'score',\n",
" label_col: str = 'label',\n",
" condition1_col: str = 'circ_mv',\n",
" condition2_col: str = 'future_return',\n",
" n_bins: int = 100,\n",
" min_samples_per_bin: int = 30): # 每个格子最少样本数\n",
" \"\"\"\n",
" 分析 score 在两个条件下 (如市值、未来收益) 的二维分箱表现。\n",
"\n",
" Args:\n",
" score_df (pd.DataFrame): 包含分数、标签和条件列的 DataFrame。\n",
" score_col (str): 预测分数所在的列名。\n",
" label_col (str): 目标标签所在的列名 (应为数值或可排序类别)。\n",
" condition1_col (str): 第一个条件列名 (例如 'circ_mv')。\n",
" condition2_col (str): 第二个条件列名 (例如 'future_return')。\n",
" n_bins (int): 每个条件划分的箱数 (分位数数量)。\n",
" min_samples_per_bin (int): 计算指标所需的最小样本数,小于此数目的格子结果将被屏蔽。\n",
"\n",
" Returns:\n",
" tuple: 包含 (performance_pivot, count_pivot, fig)\n",
" performance_pivot: 以二维分箱为索引/列的 Spearman 相关系数矩阵。\n",
" count_pivot: 每个二维分箱的样本数量矩阵。\n",
" fig: 生成的热力图 Matplotlib Figure 对象。\n",
" \"\"\"\n",
" print(f\"开始分析 '{score_col}' 在 '{condition1_col}' 和 '{condition2_col}' 下的表现...\")\n",
"\n",
" required_cols = [score_col, label_col, condition1_col, condition2_col]\n",
" if not all(col in score_df.columns for col in required_cols):\n",
" missing = [col for col in required_cols if col not in score_df.columns]\n",
" raise ValueError(f\"输入 DataFrame 缺少必需列: {missing}\")\n",
"\n",
" # --- 1. 数据准备和清洗 ---\n",
" print(\"准备数据,处理 NaN 值...\")\n",
" # 只保留需要的列,并移除包含 NaN 的行,避免影响分箱和计算\n",
" analysis_df = score_df[required_cols].dropna().copy()\n",
" n_original = len(score_df)\n",
" n_after_drop = len(analysis_df)\n",
" print(f\"原始数据 {n_original} 行,移除 NaN 后剩余 {n_after_drop} 行用于分析。\")\n",
"\n",
" if n_after_drop < min_samples_per_bin * n_bins: # 检查数据量是否过少\n",
" print(f\"警告: 清理 NaN 后数据量 ({n_after_drop}) 可能不足以支持 {n_bins}x{n_bins} 的精细分箱分析。\")\n",
" if n_after_drop < min_samples_per_bin:\n",
" print(\"错误: 有效数据过少,无法进行分析。\")\n",
" return None, None, None\n",
"\n",
" # --- 2. 二维分箱 ---\n",
" print(f\"对 '{condition1_col}' 和 '{condition2_col}' 进行 {n_bins} 分位数分箱...\")\n",
" bin1_col = f'{condition1_col}_bin'\n",
" bin2_col = f'{condition2_col}_bin'\n",
"\n",
" try:\n",
" # 使用 qcut 进行分位数分箱labels=False 返回 0 到 n_bins-1 的整数标签\n",
" # duplicates='drop' 会丢弃导致边界不唯一的重复值所在的箱子,可能导致某些箱号缺失\n",
" # 对于可视化,这通常可以接受,但如果需要严格的等分,需先 rank\n",
" analysis_df[bin1_col] = pd.qcut(analysis_df[condition1_col], q=n_bins, labels=False, duplicates='drop')\n",
" analysis_df[bin2_col] = pd.qcut(analysis_df[condition2_col], q=n_bins, labels=False, duplicates='drop')\n",
" except Exception as e:\n",
" print(f\"错误: 分箱失败,请检查数据分布或减少 n_bins。错误信息: {e}\")\n",
" # 可以尝试先 rank 再 qcut\n",
" # analysis_df[bin1_col] = pd.qcut(analysis_df[condition1_col].rank(method='first'), q=n_bins, labels=False, duplicates='raise')\n",
" # analysis_df[bin2_col] = pd.qcut(analysis_df[condition2_col].rank(method='first'), q=n_bins, labels=False, duplicates='raise')\n",
" return None, None, None\n",
"\n",
" # --- 3. 分组计算表现指标 (Spearman Rank IC) ---\n",
" print(\"按二维分箱分组计算 Spearman Rank IC...\")\n",
"\n",
" def safe_spearmanr(x, y):\n",
" \"\"\"安全计算 Spearman 相关性,处理数据量过少的情况\"\"\"\n",
" if len(x) < max(2, min_samples_per_bin): # 要求至少有 min_samples_per_bin 个点才计算\n",
" return np.nan\n",
" corr, p_value = spearmanr(x, y)\n",
" return corr if not np.isnan(corr) else np.nan # 确保返回 NaN 而不是 None 或其他\n",
"\n",
" # 按两个分箱列分组\n",
" grouped = analysis_df.groupby([bin1_col, bin2_col])\n",
"\n",
" # 计算每个格子的 Spearman 相关系数\n",
" # apply 可能较慢,但计算相关性通常需要 apply\n",
" performance_series = grouped.apply(lambda sub: safe_spearmanr(sub[score_col], sub[label_col]))\n",
"\n",
" # 计算每个格子的样本数量\n",
" count_series = grouped.size()\n",
"\n",
" # --- 4. 结果整理成 Pivot Table (用于绘图) ---\n",
" print(\"整理结果用于绘图...\")\n",
" try:\n",
" # 将 performance_series 转换成二维矩阵\n",
" # index 为 condition1_bin, columns 为 condition2_bin\n",
" performance_pivot = performance_series.unstack(level=0) # level=0 对应第一个 groupby key (bin1_col)\n",
" count_pivot = count_series.unstack(level=0)\n",
"\n",
" # 可选:按列和索引排序,确保顺序正确\n",
" performance_pivot = performance_pivot.sort_index(axis=0).sort_index(axis=1)\n",
" count_pivot = count_pivot.sort_index(axis=0).sort_index(axis=1)\n",
" \n",
" print(performance_pivot)\n",
"\n",
" except Exception as e:\n",
" print(f\"错误: 无法将结果转换为二维矩阵,可能因为分箱不均匀或数据问题: {e}\")\n",
" return None, None, None\n",
"\n",
" # --- 5. 可视化:绘制热力图 ---\n",
" print(\"生成热力图...\")\n",
" fig, ax = plt.subplots(figsize=(16, 12)) # 调整图像大小\n",
"\n",
" # 使用 count_pivot 创建一个 mask屏蔽掉样本量过小的格子\n",
" mask = count_pivot < min_samples_per_bin\n",
"\n",
" # 绘制热力图\n",
" sns.heatmap(performance_pivot,\n",
" annot=False, # 100x100 个格子加注释会太密集\n",
" fmt=\".2f\",\n",
" cmap=\"viridis\", # 选择颜色映射, 'viridis', 'coolwarm', 'RdYlGn' 等都不错\n",
" linewidths=.5,\n",
" linecolor='lightgray',\n",
" # mask=mask, # 应用 mask\n",
" ax=ax,\n",
" cbar_kws={'label': f'Spearman Rank IC ({score_col} vs {label_col})'}) # 颜色条标签\n",
"\n",
" # 设置标题和轴标签\n",
" ax.set_title(f'{score_col} 表现分析 (Rank IC vs {label_col})\\n基于 {condition1_col} 和 {condition2_col} {n_bins}x{n_bins} 分箱', fontsize=16)\n",
" ax.set_xlabel(f'{condition1_col} 分位数 (0 -> 高)', fontsize=12)\n",
" ax.set_ylabel(f'{condition2_col} 分位数 (0 -> 高)', fontsize=12)\n",
"\n",
" # 可选:调整刻度标签,避免显示所有 100 个刻度\n",
" if n_bins > 20:\n",
" tick_interval = n_bins // 10 # 大约显示 10 个刻度\n",
" ax.set_xticks(np.arange(0, n_bins, tick_interval) + 0.5)\n",
" ax.set_yticks(np.arange(0, n_bins, tick_interval) + 0.5)\n",
" ax.set_xticklabels(np.arange(0, n_bins, tick_interval))\n",
" ax.set_yticklabels(np.arange(0, n_bins, tick_interval))\n",
"\n",
" plt.xticks(rotation=45, ha='right')\n",
" plt.yticks(rotation=0)\n",
" plt.tight_layout() # 调整布局\n",
"\n",
" print(\"分析完成。\")\n",
" return performance_pivot, count_pivot, fig\n",
"\n",
"# --- 如何使用 ---\n",
"# 假设你的包含预测结果和所需列的 DataFrame 是 final_predictions_df\n",
"# 确保它包含 'score', 'label', 'circ_mv', 'future_return'\n",
"\n",
"# # 示例调用 (你需要有实际的 score_df)\n",
"try:\n",
" # 确保数据类型正确\n",
" cols_to_numeric = ['score', 'label', 'circ_mv', 'future_return']\n",
" for col in cols_to_numeric:\n",
" if col in score_df.columns:\n",
" score_df[col] = pd.to_numeric(score_df[col], errors='coerce')\n",
"\n",
" # 调用分析函数\n",
" performance_matrix, count_matrix, heatmap_figure = analyze_score_performance_2d(\n",
" score_df,\n",
" n_bins=100, # 你要求的100分箱\n",
" min_samples_per_bin=50 # 每个格子至少需要50个样本才显示IC可以调整\n",
" )\n",
"\n",
" # 显示图像\n",
" if heatmap_figure:\n",
" plt.show()\n",
"\n",
" # 可以查看具体的 performance_matrix 和 count_matrix\n",
" # print(\"\\nPerformance Matrix (Spearman IC):\")\n",
" # print(performance_matrix)\n",
" # print(\"\\nCount Matrix:\")\n",
" # print(count_matrix)\n",
"\n",
"except ValueError as ve:\n",
" print(f\"数据错误: {ve}\")\n",
"except Exception as e:\n",
" print(f\"发生未知错误: {e}\")"
]
},
{
"cell_type": "code",
"execution_count": 32,
"id": "a436dba4",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Empty DataFrame\n",
"Columns: [ts_code, trade_date, is_st]\n",
"Index: []\n"
]
}
],
"source": [
"print(df[(df['ts_code'] == '600242.SH') & (df['trade_date'] >= '2023-06-01')][['ts_code', 'trade_date', 'is_st']])"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "new_trader",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.11"
}
},
"nbformat": 4,
"nbformat_minor": 5
}