Skip to content

Commit

Permalink
简单修复了下bug
Browse files Browse the repository at this point in the history
  • Loading branch information
woldy committed Sep 10, 2024
1 parent 6a1fd50 commit 1c22ed1
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 12 deletions.
2 changes: 1 addition & 1 deletion database/finhack_structure.sql
Original file line number Diff line number Diff line change
Expand Up @@ -255,4 +255,4 @@ CREATE TABLE `rqalpha` (
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;

-- Dump completed on 2024-09-10 1:12:17
-- Dump completed on 2024-09-11 2:06:05
20 changes: 17 additions & 3 deletions finhack/factor/default/indicatorCompute.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def computeListByStock(ts_code,list_name='all',where='',factor_list=None,c_list=
single_factors_path=SINGLE_FACTORS_DIR+factor_name+'.csv'
if os.path.isfile(single_factors_path):
has_csv=True
#df_factor没有此列,diff_date>100,且代码未发生变化,且存在csv
#df_factor没有此列,diff_date>100,且代码未发生变化,或不存在csv
if (not factor_name in df_factor.columns or diff_date>100 or has_csv==False) and (not factor_name in c_list):
# if not factor_name in df_all.columns:
if True:
Expand All @@ -243,7 +243,11 @@ def computeListByStock(ts_code,list_name='all',where='',factor_list=None,c_list=
df_all[factor_name]=np.nan
else:
df_all=df_all_tmp

# print(factor_name)
# print(df_all)



#否则计算250日数据
else:
if df_250 is None or isinstance(df_250, bool):
Expand All @@ -254,6 +258,9 @@ def computeListByStock(ts_code,list_name='all',where='',factor_list=None,c_list=



# print(df_all)
# exit()

if(first_time):
df_factor=df_all
else:
Expand Down Expand Up @@ -346,12 +353,16 @@ def computeListByStock(ts_code,list_name='all',where='',factor_list=None,c_list=

#计算单个股票单个因子
def computeFactorByStock(ts_code,factor_name,df_price=pd.DataFrame(),where='',db='tushare'):


if(df_price.empty):
df_price=AStock.getStockDailyPriceByCode(code=ts_code,where=where,db='tushare')
#df_result=df_price.copy()
if(df_price.empty):
return pd.DataFrame()

if factor_name in df_price.columns:
return df_price

indicators,func_name,code,return_fileds=indicatorCompute.getFactorInfo(factor_name)

Expand Down Expand Up @@ -455,6 +466,8 @@ def computeFactorByStock(ts_code,factor_name,df_price=pd.DataFrame(),where='',db
#print(suffix)
#exit()



#如果返回多列,则需要同样进行shift计算
if True:
for f in rlist:
Expand All @@ -472,9 +485,10 @@ def computeFactorByStock(ts_code,factor_name,df_price=pd.DataFrame(),where='',db
#print("f=%s,fs=%s" %(f,factor_name))
#print(len(df.columns))

# print("hint")
# print(factor_name)
# print(df)

#exit()
del df_price
del func
return df
Expand Down
9 changes: 1 addition & 8 deletions finhack/factor/default/taskRunner.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,19 +27,13 @@ def runTask(task_list):
if True:
for factor_list_name in task_list:
#factor列表

if os.path.exists(CONFIG_DIR+"/factorlist/indicatorlist/"+factor_list_name):

with open(CONFIG_DIR+"/factorlist/indicatorlist/"+factor_list_name, 'r', encoding='utf-8') as f:
factor_list=[_.rstrip('\n') for _ in f.readlines()]
#print(factor_list)
indicatorCompute.computeList(list_name=factor_list_name,factor_list=factor_list,c_list=c_list)



#continue
#alpha列表

for factor_list_name in task_list:
if os.path.exists(CONFIG_DIR+"/factorlist/alphalist/"+factor_list_name):
with open(CONFIG_DIR+"/factorlist/alphalist/"+factor_list_name, 'r', encoding='utf-8') as f:
Expand All @@ -51,9 +45,8 @@ def runTask(task_list):
i=i+1
alpha_name=factor_list_name+'_'+str(i).zfill(3)
mytask=pool.submit(alphaEngine.calc,factor,pd.DataFrame(),alpha_name,False,True)

#alphaEngine.calc(factor,pd.DataFrame(),alpha_name)
#exit()
os.system('mv '+CACHE_DIR+'/single_factors_tmp2/* '+SINGLE_FACTORS_DIR)
factorPkl.save()

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
UPPER_0
MIDDLE_0
LOWER
DEMA_0
EMA_90_0
HTTRENDLINE_0
MACD
MACDSIGNAL_0
MACDHIST

0 comments on commit 1c22ed1

Please sign in to comment.