diff --git a/database/finhack_structure.sql b/database/finhack_structure.sql index f5ffcc7..0fb4ce3 100644 --- a/database/finhack_structure.sql +++ b/database/finhack_structure.sql @@ -178,4 +178,4 @@ CREATE TABLE `factors_mining` ( /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; --- Dump completed on 2023-12-27 13:30:47 +-- Dump completed on 2024-01-06 21:56:46 diff --git a/finhack/factor/default/indicators/QIML365.py b/examples/demo-project/indicators/QIML365.py similarity index 100% rename from finhack/factor/default/indicators/QIML365.py rename to examples/demo-project/indicators/QIML365.py diff --git a/finhack/factor/default/indicators/__init__.py b/examples/demo-project/indicators/__init__.py similarity index 100% rename from finhack/factor/default/indicators/__init__.py rename to examples/demo-project/indicators/__init__.py diff --git a/finhack/factor/default/indicators/extend.py b/examples/demo-project/indicators/extend.py similarity index 100% rename from finhack/factor/default/indicators/extend.py rename to examples/demo-project/indicators/extend.py diff --git a/finhack/factor/default/indicators/financial.py b/examples/demo-project/indicators/financial.py similarity index 100% rename from finhack/factor/default/indicators/financial.py rename to examples/demo-project/indicators/financial.py diff --git a/finhack/factor/default/indicators/member.py b/examples/demo-project/indicators/member.py similarity index 100% rename from finhack/factor/default/indicators/member.py rename to examples/demo-project/indicators/member.py diff --git a/finhack/factor/default/indicators/myfactors.py b/examples/demo-project/indicators/myfactors.py similarity index 100% rename from finhack/factor/default/indicators/myfactors.py rename to examples/demo-project/indicators/myfactors.py diff --git a/finhack/factor/default/indicators/ta_lib.py b/examples/demo-project/indicators/ta_lib.py similarity index 100% rename from finhack/factor/default/indicators/ta_lib.py rename to examples/demo-project/indicators/ta_lib.py diff --git a/finhack/factor/default/indicators/volumeprice.py b/examples/demo-project/indicators/volumeprice.py similarity index 100% rename from finhack/factor/default/indicators/volumeprice.py rename to examples/demo-project/indicators/volumeprice.py diff --git a/examples/demo-project/loader/testmodule_loader.py b/examples/demo-project/loader/testmodule_loader.py index 5876d94..fe88ade 100644 --- a/examples/demo-project/loader/testmodule_loader.py +++ b/examples/demo-project/loader/testmodule_loader.py @@ -7,7 +7,7 @@ def testaction(self): Log.logger.debug("loading "+self.module_name) Log.logger.debug("testarg1 is:"+str(self.args.testarg1)) Log.logger.debug("testarg2 is:"+str(self.args.testarg2)) - obj=self.klass() + obj=self.klass obj.args=self.args obj.run() diff --git a/examples/demo-project/testmodule/default/default_testmodule.py b/examples/demo-project/testmodule/default/default_testmodule.py index d2735b6..f49d0ba 100644 --- a/examples/demo-project/testmodule/default/default_testmodule.py +++ b/examples/demo-project/testmodule/default/default_testmodule.py @@ -1,8 +1,10 @@ import finhack.library.log as Log from runtime.constant import * import runtime.global_var as global_var - +from finhack.market.astock.astock import AStock import time +from finhack.factor.default.factorManager import factorManager +from finhack.factor.default.factorAnalyzer import factorAnalyzer class DefaultTestmodule(): def __init__(self): pass @@ -18,4 +20,10 @@ def run(self): def run2(self): print(self.args) - print('run2') \ No newline at end of file + print('run2') + stock_list=AStock.getStockCodeList(strict=False, db='tushare') + print(stock_list) + + + def run3(self): + factorAnalyzer.alphalens("pe_0") \ No newline at end of file diff --git a/finhack/__init__.py b/finhack/__init__.py index f9250db..f65329a 100644 --- a/finhack/__init__.py +++ b/finhack/__init__.py @@ -1 +1 @@ -__version__ = '0.0.1.dev4' +__version__ = '0.0.1.dev5' diff --git a/finhack/collector/tushare/tushare_collector.py b/finhack/collector/tushare/tushare_collector.py index 28f659f..080f912 100755 --- a/finhack/collector/tushare/tushare_collector.py +++ b/finhack/collector/tushare/tushare_collector.py @@ -69,6 +69,9 @@ def run(self): table=list(v.values())[0] tsSHelper.setIndex(table,db) + + def save(): + pass def getAStockBasic(self): diff --git a/finhack/core/loader/collector_loader.py b/finhack/core/loader/collector_loader.py index 00f30cc..5585bef 100644 --- a/finhack/core/loader/collector_loader.py +++ b/finhack/core/loader/collector_loader.py @@ -5,7 +5,7 @@ def run(self): # print(self.module_path) # print(self.user_module_path) # print(self.klass) - collector=self.klass() + collector=self.klass collector.run() pass diff --git a/finhack/core/loader/factor_loader.py b/finhack/core/loader/factor_loader.py index f25bc93..78f8b03 100644 --- a/finhack/core/loader/factor_loader.py +++ b/finhack/core/loader/factor_loader.py @@ -5,7 +5,7 @@ def run(self): # print(self.module_path) # print(self.user_module_path) # print(self.klass) - factor=self.klass() + factor=self.klass factor.run() pass diff --git a/finhack/core/loader/helper_loader.py b/finhack/core/loader/helper_loader.py index 2c79608..7a44486 100644 --- a/finhack/core/loader/helper_loader.py +++ b/finhack/core/loader/helper_loader.py @@ -5,7 +5,7 @@ def run(self): # print(self.module_path) # print(self.user_module_path) # print(self.klass) - helper=self.klass() + helper=self.klass helper.run() pass diff --git a/finhack/core/loader/trader_loader.py b/finhack/core/loader/trader_loader.py index d14266d..f1143c2 100644 --- a/finhack/core/loader/trader_loader.py +++ b/finhack/core/loader/trader_loader.py @@ -5,7 +5,7 @@ def run(self): # print(self.module_path) # print(self.user_module_path) # print(self.klass) - trader=self.klass() + trader=self.klass trader.args=self.args trader.run() diff --git a/finhack/factor/default/alphaEngine.py b/finhack/factor/default/alphaEngine.py index 197ab08..d62a1a0 100755 --- a/finhack/factor/default/alphaEngine.py +++ b/finhack/factor/default/alphaEngine.py @@ -786,9 +786,6 @@ def calc(formula='',df=pd.DataFrame(),name="alpha",check=False,replace=False): df=df.sort_index() - - - if diff_date>0 and diff_date<100: dt=datetime.datetime.strptime(str(max_date),'%Y%m%d') start_date=dt-datetime.timedelta(days=700) diff --git a/finhack/factor/default/default_factor.py b/finhack/factor/default/default_factor.py index 6e4d4a0..cf8274b 100644 --- a/finhack/factor/default/default_factor.py +++ b/finhack/factor/default/default_factor.py @@ -6,5 +6,12 @@ from finhack.market.astock.astock import AStock from finhack.factor.default.taskRunner import taskRunner class DefaultFactor: + def __init__(self): + pass + def run(self): - taskRunner.runTask() \ No newline at end of file + pass + taskRunner.runTask() + + def test(self): + print(self.args) \ No newline at end of file diff --git a/finhack/factor/default/factorAnalyzer.py b/finhack/factor/default/factorAnalyzer.py index 6d066b7..3a35f60 100755 --- a/finhack/factor/default/factorAnalyzer.py +++ b/finhack/factor/default/factorAnalyzer.py @@ -3,9 +3,18 @@ import traceback import numpy as np import pandas as pd - +import pandas as pd +import alphalens as al +from alphalens.utils import get_clean_factor_and_forward_returns +from alphalens.tears import create_full_tear_sheet +import warnings +warnings.simplefilter(action='ignore', category=FutureWarning) +warnings.simplefilter(action='ignore', category=RuntimeWarning) +warnings.simplefilter(action='ignore', category=UserWarning) from finhack.library.mydb import mydb from finhack.factor.default.factorManager import factorManager +from finhack.market.astock.astock import AStock +from scipy.stats import zscore class factorAnalyzer(): @@ -13,6 +22,114 @@ class factorAnalyzer(): def all_corr(): pass + + + def alphalens(factor_name): + + + df_industry=AStock.getStockIndustry() + + + # df_all.index=df_all['date'] + # price.index = pd.to_datetime(price.index) + # assets = df_all.set_index( [df_all.index,df_all['symbol']], drop=True,append=False, inplace=False) + df=factorManager.getFactors(factor_list=['close',factor_name]) + # 假设 df 是您提供的 DataFrame,我们首先重置索引 + df = df.reset_index().merge(df_industry, on='ts_code') + df['industry'] = df['industry'].fillna('其他') + + df['trade_date'] = pd.to_datetime(df['trade_date'], format='%Y%m%d') + + df[factor_name] = df.groupby(['trade_date', 'industry'])[factor_name].transform(zscore) + + # 确保因子值没有 NaN,如果有 NaN,可以选择填充或者去除对应的行 + df = df.dropna(subset=[factor_name,'industry']) + + # 重置索引,准备进行 Alphalens 分析 + df = df.set_index(['trade_date', 'ts_code']) + + # 创建价格 DataFrame + prices = df['close'].unstack() + + # 获取行业中性化后的因子数据 + factor = df[factor_name] + + + unique_industries = df['industry'].unique() + # 创建 groupby_labels 字典,将每个行业标签映射到自己,确保没有遗漏 + groupby_labels = {ind: ind for ind in unique_industries} + + + # 检查 groupby_labels 是否包含所有 unique_industries 中的行业 + missing_labels = [ind for ind in unique_industries if ind not in groupby_labels] + if missing_labels: + print(f"Missing industry labels in groupby_labels: {missing_labels}") + # 您可以选择添加缺失的行业标签到 groupby_labels 中 + for missing in missing_labels: + groupby_labels[missing] = '其他' # 或者将其映射到 '其他' + + # 使用 Alphalens 进行因子分析 + factor_data = al.utils.get_clean_factor_and_forward_returns( + factor=factor, + prices=prices, + periods=(1, 5, 10), + groupby=df['industry'], # 指定行业分组 + groupby_labels=groupby_labels, # 指定行业标签 + ) + + + # 因子收益率分析 + mean_return_by_qt, std_err_by_qt = al.performance.mean_return_by_quantile(factor_data) + #aal.plotting.plot_quantile_returns_bar(mean_return_by_qt) + #aplt.show() + + # 因子信息比率 + ic_by_day = al.performance.factor_information_coefficient(factor_data) + #al.plotting.plot_information_coefficient(ic_by_day) + #plt.show() + + # 分位数平均收益率 + quantile_returns = al.performance.mean_return_by_quantile(factor_data)[0].apply(al.utils.rate_of_return, axis=0, base_period='1D') + #al.plotting.plot_quantile_returns_violin(quantile_returns) + #plt.show() + + # 分位数累积收益 + #cumulative_returns_by_qt = al.performance.cumulative_returns_by_quantile(factor_data, period=1) + #al.plotting.plot_cumulative_returns_by_quantile(cumulative_returns_by_qt, period=1) + #plt.show() + + # 分位数收益率的全面统计 + #full_tear_sheet = al.tears.create_full_tear_sheet(factor_data, long_short=True, group_neutral=False, by_group=False) + + # 因子自相关性分析 + autocorrelation = al.performance.factor_rank_autocorrelation(factor_data) + #al.plotting.plot_autocorrelation(autocorrelation) + #plt.show() + + # 因子收益率和分位数收益率的IC分析 + mean_monthly_ic = al.performance.mean_information_coefficient(factor_data, by_time='M') + #al.plotting.plot_monthly_ic_heatmap(mean_monthly_ic) + #plt.show() + + print("\nmean_return_by_qt") + print(mean_return_by_qt) + print("\nic_by_day") + print(ic_by_day) + print("\nquantile_returns") + print(quantile_returns) + print("\nautocorrelation") + print(autocorrelation) + print("\nmean_monthly_ic") + print(mean_monthly_ic) + + # print('---') + # print(full_tear_sheet) + + #al.plotting.plot_quantile_returns_bar(mean_return_by_qt) + pass + + + def analys(factor_name,df=pd.DataFrame(),days=[1,2,3,5,8,13,21],pool='all',start_date='20000101',end_date='20100101',formula="",relace=False,table='factors_analysis'): try: diff --git a/finhack/factor/default/factorManager.py b/finhack/factor/default/factorManager.py index fc51ff3..2e44e7b 100755 --- a/finhack/factor/default/factorManager.py +++ b/finhack/factor/default/factorManager.py @@ -164,7 +164,7 @@ def getAlphaList(listname): def getIndicatorsList(): return_fileds=[] - path = os.path.dirname(__file__)+"/indicators/" + path = INDICATORS_DIR for subfile in os.listdir(path): if not '__' in subfile: indicators=subfile.split('.py') diff --git a/finhack/factor/default/indicatorCompute.py b/finhack/factor/default/indicatorCompute.py index c865533..9008c16 100755 --- a/finhack/factor/default/indicatorCompute.py +++ b/finhack/factor/default/indicatorCompute.py @@ -12,7 +12,7 @@ from functools import lru_cache from importlib import import_module from multiprocessing import cpu_count - +import importlib from runtime.constant import * from finhack.library.mydb import mydb from finhack.library.config import Config @@ -66,7 +66,9 @@ def split_list_n_list(origin_list, n): for code_list in code_lists: with ProcessPoolExecutor(max_workers=n) as pool: for ts_code in code_list: - mytask=pool.submit(indicatorCompute.computeListByStock,ts_code,list_name,'',factor_list,c_list) + #computeListByStock(ts_code,list_name='all',where='',factor_list=None,c_list=[],pure=True,check=True,df_price=pd.DataFrame(),db='tushare'): + + mytask=pool.submit(indicatorCompute.computeListByStock,ts_code,list_name,'',factor_list,c_list,false,false) tasklist.append(mytask) wait(tasklist,return_when=ALL_COMPLETED) @@ -79,7 +81,7 @@ def split_list_n_list(origin_list, n): #计算单支股票的一坨因子 #pure=True时,只保留factor_list中的因子 - def computeListByStock(ts_code,list_name='all',where='',factor_list=None,c_list=[],pure=True,check=False,df_price=pd.DataFrame(),db='tushare'): + def computeListByStock(ts_code,list_name='all',where='',factor_list=None,c_list=[],pure=True,check=True,df_price=pd.DataFrame(),db='tushare'): try: Log.logger.info('computeListByStock---'+ts_code) @@ -344,7 +346,22 @@ def computeFactorByStock(ts_code,factor_name,df_price=pd.DataFrame(),where='',db df_price=indicatorCompute.computeFactorByStock(ts_code,f,df_price,db) factor=factor_name.split('_') - module = getattr(import_module('finhack.factor.default.indicators.'+indicators), indicators) + #module = getattr(import_module('finhack.factor.default.indicators.'+indicators), indicators) + + + + # 定义文件路径 + file_path = INDICATORS_DIR+indicators+".py" + + # 获取文件名和类名 + module_name = indicators + class_name = indicators + + # 加载模块 + spec = importlib.util.spec_from_file_location(module_name, file_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + func=getattr(module,func_name,lambda x,y:x) shift="0" @@ -424,7 +441,7 @@ def computeFactorByStock(ts_code,factor_name,df_price=pd.DataFrame(),where='',db def getFactorInfo(factor_name): factor=factor_name.split('_') factor_filed=factor[0] - path = os.path.dirname(__file__)+"/indicators/" + path = INDICATORS_DIR for subfile in os.listdir(path): if not '__' in subfile: indicators=subfile.split('.py') diff --git a/finhack/factor/default/preCheck.py b/finhack/factor/default/preCheck.py index f62136f..830cbf6 100755 --- a/finhack/factor/default/preCheck.py +++ b/finhack/factor/default/preCheck.py @@ -56,7 +56,7 @@ def checkIndicatorsChange(): factor=factor_name.split('_') factor_filed=factor[0] return_fileds=[] - path = os.path.dirname(__file__)+"/indicators/" + path = INDICATORS_DIR for subfile in os.listdir(path): if find: continue diff --git a/finhack/market/astock/astock.py b/finhack/market/astock/astock.py index 8ed7ebc..adbd9ab 100755 --- a/finhack/market/astock/astock.py +++ b/finhack/market/astock/astock.py @@ -15,431 +15,32 @@ from concurrent.futures import ThreadPoolExecutor,ProcessPoolExecutor, wait, ALL_COMPLETED -class AStock: - - def getStockCodeList(strict=True,db='tushare'): - # - if strict: - sql="select ts_code from astock_price_daily GROUP BY ts_code" - else: - sql = "select ts_code from astock_basic;" - try: - df_code=mydb.selectToDf(sql,'tushare') - return df_code - except Exception as e: - print("MySQL getStockCodeList Error:%s" % str(e)) - return False - - - - def getIndexMember(index='000300.SH',trade_date='20221031'): - if trade_date=='': - sql = "select con_code from astock_index_weight where ts_code='%s'" % (index) - else: - sql = "select con_code from astock_index_weight where ts_code='%s' and trade_date='%s' " % (index,trade_date); - - - try: - df=mydb.selectToDf(sql,'tushare') - return df['con_code'].tolist() - except Exception as e: - print("MySQL getStockCodeList Error:%s" % str(e)) - return False - - - def getIndexPrice(ts_code='000300.SH',start_date=None,end_date=None): - c1="" - c2="" - if not start_date==None: - c1=" and trade_date>='%s' " % (start_date) - if not end_date==None: - c2=" and trade_date<='%s' " % (end_date) - - sql = "select trade_date,close from astock_index_daily where ts_code='%s' %s %s order by trade_date asc" % (ts_code,c1,c2) - - #print(sql) - - try: - df=mydb.selectToDf(sql,'tushare') - return df - except Exception as e: - print("MySQL getStockCodeList Error:%s" % str(e)) - return False - return df - - - - - def getTableDataByCode(table,ts_code,where="",db='tushare'): - sql="select * from "+table+" where ts_code='"+ts_code+"' "+where - result=mydb.select(sql,'tushare') - df_date = pd.DataFrame(list(result)) - df_date=df_date.reset_index(drop=True) - return df_date - - - def getTableData(table,where="",db='tushare'): - sql="select * from "+table+" where 1=1 "+where - #print(sql) - result=mydb.select(sql,'tushare') - df_date = pd.DataFrame(list(result)) - df_date=df_date.reset_index(drop=True) - return df_date - - - - #获取股票日线行情 - def getStockDailyPrice(code_list=[],where="",startdate='',enddate='',fq='hfq',db='tushare',cache=True): - df=[] - - result=[] - if len(code_list)==0: - df_code=AStock.getStockCodeList() - code_list=df_code['ts_code'].tolist() - - - - hashstr=','.join(code_list)+'-'+where+'-'+startdate+'-'+enddate+'-'+fq+'-'+db - md5=hashlib.md5(hashstr.encode(encoding='utf-8')).hexdigest() - cache_path=PRICE_CACHE_DIR+md5 - if os.path.isfile(cache_path): - #print('read cache---'+cache_path) - #print(hashstr) - t = time.time()-os.path.getmtime(cache_path) - if t<60*60*12 and cache: #缓存时间为12小时 - df=pd.read_pickle(cache_path) - return df - - - with ProcessPoolExecutor(max_workers=5) as pool: - def get_result(task): - exception = task.exception() - if exception: - # 如果exception获取到了值,说明有异常.exception就是异常类 - print(exception) - else: - result.append(task.result()) - tasklist=[] - for code in code_list: - mytask=pool.submit(AStock.getStockDailyPriceByCode,code=code,where=where,startdate=startdate,enddate=enddate,fq=fq) - mytask.add_done_callback(get_result) - tasklist.append(mytask) - wait(tasklist, return_when=ALL_COMPLETED) - - print('all completed') - if len(result)==0: - return pd.DataFrame() - df=pd.concat(result) - - df=df.sort_values(by=['ts_code','trade_date'], ascending=[True,True]) - - df.to_pickle(cache_path) - return df - - - def checkLimit(): - pass - - def getStockDailyPriceByCode(code,where="",startdate='',enddate='',fq='hfq',db='tushare',cache=True): - try: - - datewhere1='' - datewhere2='' - if startdate!='': - datewhere1=' and trade_date>='+startdate+' ' - if enddate!='': - datewhere2=' and trade_date<='+enddate+' ' - datewhere=datewhere1+datewhere2 - - where=where+datewhere - - #print('getStockDailyPriceByCode---'+code) - hashstr=code+'-'+where+'-'+startdate+'-'+enddate+'-'+fq - md5 = hashlib.md5(hashstr.encode(encoding='utf-8')).hexdigest() - cache_path=PRICE_CACHE_DIR+code+'_'+md5 - - - try: - if os.path.isfile(cache_path): - #print('read cache---'+code+','+cache_path) - t = time.time()-os.path.getmtime(cache_path) - if t<60*60*12 and cache: #缓存时间为12小时 - df=pd.read_pickle(cache_path) - return df - except Exception as e: - print(str(e)) - print(cache_path) - - - if where.strip()!="" and where.strip().lower()[0:3] !='and': - where=' and '+where - - - df_price=AStock.getTableDataByCode('astock_price_daily',code,where) - df_price.drop_duplicates(subset='trade_date',keep='first',inplace=True) - #df_price['ts_code']=code - if(df_price.empty): - return df_price - - - - - - - calendar=AStock.getTableData('astock_trade_cal',datewhere.replace('trade_date','cal_date')+' and is_open=1') - calendar.rename(columns={'cal_date':'trade_date'}, inplace = True) - calendar=calendar[['trade_date']] - last_date=max(df_price['trade_date']) - calendar=calendar[calendar.trade_date<=last_date] - - df_adj = AStock.getTableDataByCode('astock_price_adj_factor',code,'') - - # print(111) - # print(df_adj) - - - - - first_adj=float(df_adj.iloc[0].adj_factor) - last_adj=float(df_adj.iloc[-1].adj_factor) - - df_adj = pd.merge(calendar,df_adj, on='trade_date',how='left') - - - - - df_adj = df_adj.ffill() - df_adj=df_adj.drop('ts_code',axis=1) - - # print(df_adj) - - - df_name=AStock.getTableDataByCode('astock_namechange',code,datewhere.replace('trade_date','ann_date')) - - - - if df_name.empty: - name=AStock.getTableDataByCode('astock_basic',code,'') - df_price['name']=name['name'].values[0] - else: - df_name.rename(columns={'start_date':'trade_date'}, inplace = True) - df_name=df_name[['trade_date','name']] - df_price = pd.merge(df_price,df_name,how = 'outer',on=['trade_date']) - df_price=df_price.sort_values('trade_date') - df_price['name']=df_price['name'].ffill() - df_price=df_price.dropna(subset=['ts_code']) - - - - - df_updown=AStock.getTableDataByCode('astock_price_stk_limit',code,'') - - - if df_updown.empty or df_price.empty: - return pd.DataFrame() - df_price = pd.merge(df_price,df_updown, on='trade_date',how='left') - - df_price.rename(columns={'ts_code_x':'ts_code'}, inplace = True) - - - df_price['name']=df_price['name'].bfill() - df_price['name']=df_price['name'].fillna("") - - - df=df_price - df["adj_factor"]=1 - df["open"]=df["open"].astype(float) - df["high"]=df["high"].astype(float) - df["low"]=df["low"].astype(float) - df["close"]=df["close"].astype(float) - df["pre_close"]=df["pre_close"].astype(float) - df["change"]=df["change"].astype(float) - df["pct_chg"]=df["pct_chg"].astype(float) - df["vol"]=df["vol"].astype(float) - df['amount']=df['amount'].astype(float) - df["vwap"]=(df['amount'].astype(float)*1000)/(df['vol'].astype(float)*100+1) - df["stop"]=pd.isna(df['close']).astype(int) - #df["lh_limit"]=pd.isna(df['high']==df['low']).astype(int) - df.rename(columns={'vol':'volume','pct_chg':'returns'}, inplace = True) - - - df["upLimit"]=df['close'].shift(1) - df["downLimit"]=df['close'].shift(1) - - def updown(x,t="up"): - - if x.ts_code[0:3]=='300': - limit=0.20 - if x.trade_date<"2020824": - limit=0.10 - elif x.ts_code[0:3]=='688': - limit=0.20 - elif x.ts_code[0:1]=='7' or x.ts_code[0:1]=='8': - limit=0.30 - else: - limit=0.10 - if "ST" in x['name'] or "st" in x['name']: - limit=0.05 - - - if t=="up": - if pd.isnull(x.up_limit) and not pd.isnull(x.upLimit): - return round(x.upLimit*(1+limit),2) - else: - return x.up_limit - else: - if pd.isnull(x.down_limit) and not pd.isnull(x.downLimit) : - return round(x.downLimit*(1-limit),2) - else: - return x.down_limit - - - # df_price.at[0,'upLimit']=9999 - # df_price.at[0,'downLimit']=0 - - - - - df["upLimit"]= df.apply(lambda x:updown(x,"up") , axis=1) - df["downLimit"]= df.apply(lambda x:updown(x,"down") , axis=1) - - - - - - if(df_adj.empty): - df=df_price - df["adj_factor"]=1 - else: - df=df.drop("adj_factor",axis=1) - df = pd.merge(df,df_adj,how = 'right',on=['trade_date']) - df=df.dropna(subset=['ts_code']) - - if fq=="no": - df["open"]=df["open"].astype(float) - df["high"]=df["high"].astype(float) - df["low"]=df["low"].astype(float) - df["close"]=df["close"].astype(float) - df["pre_close"]=df["pre_close"].astype(float) - - df["upLimit"]=df["upLimit"].astype(float) - df["downLimit"]=df["downLimit"].astype(float) - - elif fq=="qfq": - #前复权价格 = close * adj_factor / last_adj - - df["open"]=df["open"].astype(float)*df["adj_factor"].astype(float)/last_adj - df["high"]=df["high"].astype(float)*df["adj_factor"].astype(float)/last_adj - df["low"]=df["low"].astype(float)*df["adj_factor"].astype(float)/last_adj - df["close"]=df["close"].astype(float)*df["adj_factor"].astype(float)/last_adj - df["pre_close"]=df["pre_close"].astype(float)*df["adj_factor"].astype(float)/last_adj - - df["upLimit"]=df["upLimit"].astype(float)*df["adj_factor"].astype(float)/last_adj - df["downLimit"]=df["downLimit"].astype(float)*df["adj_factor"].astype(float)/last_adj - - - else: - #后复权价格 = close × (first_adj × adj_factor / last_adj) - - df["open"]=df["open"].astype(float)*df["adj_factor"].astype(float)/first_adj - df["high"]=df["high"].astype(float)*df["adj_factor"].astype(float)/first_adj - df["low"]=df["low"].astype(float)*df["adj_factor"].astype(float)/first_adj - df["close"]=df["close"].astype(float)*df["adj_factor"].astype(float)/first_adj - df["pre_close"]=df["pre_close"].astype(float)*df["adj_factor"].astype(float)/first_adj - - df["upLimit"]=df["upLimit"].astype(float)*df["adj_factor"].astype(float)/first_adj - df["downLimit"]=df["downLimit"].astype(float)*df["adj_factor"].astype(float)/first_adj - - - df["change"]=df["change"].astype(float) - df["vwap"]=(df['amount'].astype(float)*1000)/(df['volume'].astype(float)*100+1) - df["stop"]=pd.isna(df['close']).astype(int) - - - df=df.drop('ts_code_y', axis=1) - df=df.drop(labels='up_limit', axis=1) - df=df.drop(labels='down_limit', axis=1) - - - df=df.ffill() - df=df.dropna(subset=['adj_factor','ts_code']) - df.drop_duplicates('trade_date',inplace = True) - df=df.sort_values(by='trade_date', ascending=True) - df=df.reset_index(drop=True) - del df_adj - del calendar - - - #print(df.columns) - - df.to_pickle(cache_path ) - return df - except Exception as e: - print("error") - print("err exception is %s" % traceback.format_exc()) - traceback.print_exc() - return pd.DataFrame() - - - - - def alignStockFactors(df,table,date,filed,conv=0,db='tushare'): - df=df.copy() - df=df.reset_index() - ts_code=df['ts_code'].tolist()[0] - df.drop_duplicates('trade_date',inplace = True) - - - if(filed=='*'): - df_factor=mydb.selectToDf("select * from "+table+" where ts_code='"+ts_code+"'",'tushare') - filed=mydb.selectToDf("select COLUMN_NAME from information_schema.COLUMNS where table_name = '"+table+"'",'tushare') - filed=filed['COLUMN_NAME'].tolist() - filed=",".join(filed) - else: - df_factor=mydb.selectToDf("select "+date+","+filed+" from "+table+" where ts_code='"+ts_code+"'",'tushare') - - - if isinstance(df_factor, bool) or df_factor.empty: - return pd.DataFrame() - - #去重 - try: - df_factor = df_factor[~df_factor[date].duplicated()] - except Exception as e: - print(df_factor) - - #财务报表中的时间,需要+1处理 - if conv==3: - df_factor[date]=df_factor[date].astype(str) - df_factor[date]=pd.to_datetime(df_factor[date],format='%Y%m%d',errors='coerce') - df_factor[date]=df_factor[date]+timedelta(days=1) - df_factor[date]=df_factor[date].astype(str) - df_factor[date]=df_factor[date].map(lambda x: x.replace('-','')) - df_factor['trade_date']=df_factor[date].map(lambda x: x.replace('-','')) - - - if not 'pandas' in str(type(df_factor)) or df_factor.empty: - df_res=df - for f in filed.split(','): - df[f]=0 - return df_res - - #转换时间,将yyyy-mm-dd转为yyyymmdd - if conv==1: - df_factor[date]=df_factor[date].astype(str) - df_factor['trade_date']=df_factor[date].map(lambda x: x.replace('-','')) - - df_res=pd.merge(df, df_factor, how='left', on='trade_date',validate="one_to_many", copy=True, indicator=False) - df_res.drop_duplicates('trade_date',inplace = True) - - if conv==2: #不填充 - pass - else: - df_res=df_res.fillna(method='ffill') # conv=0向下填充 - - df_res=df_res.set_index('index') - - del df_factor - return df_res - - +import importlib +from types import FunctionType + +class AStockMeta(type): + def __new__(cls, name, bases, dct): + # 创建新类时动态添加方法 + def create_dynamic_method(method_name): + @staticmethod + def dynamic_method(*args, **kwargs): + # 动态导入对应的模块 + source="tushare" + module_name = f"finhack.market.astock.{source}.astock" + module = importlib.import_module(module_name) + # 获取模块中的函数并调用 + func = getattr(module, method_name) + return func(*args, **kwargs) + return dynamic_method + + # 假设我们知道所有可能的方法名称列表 + # 或者可以从某处动态获取它们 + method_names = ['getStockCodeList', 'getStockIndustry', 'getIndexMember', 'getIndexPrice', 'getTableDataByCode', 'getTableData', 'getStockDailyPrice', 'checkLimit', 'getStockDailyPriceByCode', 'alignStockFactors'] # 示例方法列表 + for method_name in method_names: + # 为每个方法名创建一个动态方法并添加到类字典中 + dct[method_name] = create_dynamic_method(method_name) + + return type.__new__(cls, name, bases, dct) + +class AStock(metaclass=AStockMeta): + pass diff --git a/finhack/market/astock/tushare/__init__.py b/finhack/market/astock/tushare/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/finhack/market/astock/tushare/astock.py b/finhack/market/astock/tushare/astock.py new file mode 100755 index 0000000..58dfade --- /dev/null +++ b/finhack/market/astock/tushare/astock.py @@ -0,0 +1,450 @@ +# coding=utf-8 +import pandas as pd +import sys +import datetime +import os +import traceback +import numpy as np +import hashlib +import time +import threading +# 股票信息获取模块 +from datetime import timedelta +from runtime.constant import * +from finhack.library.mydb import mydb +from concurrent.futures import ThreadPoolExecutor,ProcessPoolExecutor, wait, ALL_COMPLETED + + + +def getStockCodeList(strict=True,db='tushare'): + # + if strict: + sql="select ts_code from astock_price_daily GROUP BY ts_code" + else: + sql = "select ts_code from astock_basic;" + try: + df_code=mydb.selectToDf(sql,'tushare') + return df_code + except Exception as e: + print("MySQL getStockCodeList Error:%s" % str(e)) + return False + + + +def getStockIndustry(): + df=getTableData("astock_basic") + df=df[['ts_code','industry']] + return df + + +def getIndexMember(index='000300.SH',trade_date='20221031'): + if trade_date=='': + sql = "select con_code from astock_index_weight where ts_code='%s'" % (index) + else: + sql = "select con_code from astock_index_weight where ts_code='%s' and trade_date='%s' " % (index,trade_date); + + + try: + df=mydb.selectToDf(sql,'tushare') + return df['con_code'].tolist() + except Exception as e: + print("MySQL getStockCodeList Error:%s" % str(e)) + return False + + +def getIndexPrice(ts_code='000300.SH',start_date=None,end_date=None): + c1="" + c2="" + if not start_date==None: + c1=" and trade_date>='%s' " % (start_date) + if not end_date==None: + c2=" and trade_date<='%s' " % (end_date) + + sql = "select trade_date,close from astock_index_daily where ts_code='%s' %s %s order by trade_date asc" % (ts_code,c1,c2) + + #print(sql) + + try: + df=mydb.selectToDf(sql,'tushare') + return df + except Exception as e: + print("MySQL getStockCodeList Error:%s" % str(e)) + return False + return df + + + + +def getTableDataByCode(table,ts_code,where="",db='tushare'): + sql="select * from "+table+" where ts_code='"+ts_code+"' "+where + result=mydb.select(sql,'tushare') + df_date = pd.DataFrame(list(result)) + df_date=df_date.reset_index(drop=True) + return df_date + + +def getTableData(table,where="",db='tushare'): + sql="select * from "+table+" where 1=1 "+where + #print(sql) + result=mydb.select(sql,'tushare') + df_date = pd.DataFrame(list(result)) + df_date=df_date.reset_index(drop=True) + return df_date + + + + #获取股票日线行情 +def getStockDailyPrice(code_list=[],where="",startdate='',enddate='',fq='hfq',db='tushare',cache=True): + df=[] + + result=[] + if len(code_list)==0: + df_code=getStockCodeList() + code_list=df_code['ts_code'].tolist() + + + + hashstr=','.join(code_list)+'-'+where+'-'+startdate+'-'+enddate+'-'+fq+'-'+db + md5=hashlib.md5(hashstr.encode(encoding='utf-8')).hexdigest() + cache_path=PRICE_CACHE_DIR+md5 + if os.path.isfile(cache_path): + #print('read cache---'+cache_path) + #print(hashstr) + t = time.time()-os.path.getmtime(cache_path) + if t<60*60*12 and cache: #缓存时间为12小时 + df=pd.read_pickle(cache_path) + return df + + + with ProcessPoolExecutor(max_workers=5) as pool: + def get_result(task): + exception = task.exception() + if exception: + # 如果exception获取到了值,说明有异常.exception就是异常类 + print(exception) + else: + result.append(task.result()) + tasklist=[] + for code in code_list: + mytask=pool.submit(getStockDailyPriceByCode,code=code,where=where,startdate=startdate,enddate=enddate,fq=fq) + mytask.add_done_callback(get_result) + tasklist.append(mytask) + wait(tasklist, return_when=ALL_COMPLETED) + + print('all completed') + if len(result)==0: + return pd.DataFrame() + df=pd.concat(result) + + df=df.sort_values(by=['ts_code','trade_date'], ascending=[True,True]) + + df.to_pickle(cache_path) + return df + + +def checkLimit(): + pass + +def getStockDailyPriceByCode(code,where="",startdate='',enddate='',fq='hfq',db='tushare',cache=True): + try: + + datewhere1='' + datewhere2='' + if startdate!='': + datewhere1=' and trade_date>='+startdate+' ' + if enddate!='': + datewhere2=' and trade_date<='+enddate+' ' + datewhere=datewhere1+datewhere2 + + where=where+datewhere + + #print('getStockDailyPriceByCode---'+code) + hashstr=code+'-'+where+'-'+startdate+'-'+enddate+'-'+fq + md5 = hashlib.md5(hashstr.encode(encoding='utf-8')).hexdigest() + cache_path=PRICE_CACHE_DIR+code+'_'+md5 + + + try: + if os.path.isfile(cache_path): + #print('read cache---'+code+','+cache_path) + t = time.time()-os.path.getmtime(cache_path) + if t<60*60*12 and cache: #缓存时间为12小时 + df=pd.read_pickle(cache_path) + return df + except Exception as e: + print(str(e)) + print(cache_path) + + + if where.strip()!="" and where.strip().lower()[0:3] !='and': + where=' and '+where + + + df_price=getTableDataByCode('astock_price_daily',code,where) + df_price.drop_duplicates(subset='trade_date',keep='first',inplace=True) + #df_price['ts_code']=code + if(df_price.empty): + return df_price + + + + + + + calendar=getTableData('astock_trade_cal',datewhere.replace('trade_date','cal_date')+' and is_open=1') + calendar.rename(columns={'cal_date':'trade_date'}, inplace = True) + calendar=calendar[['trade_date']] + last_date=max(df_price['trade_date']) + calendar=calendar[calendar.trade_date<=last_date] + + df_adj = getTableDataByCode('astock_price_adj_factor',code,'') + + # print(111) + # print(df_adj) + + + + + first_adj=float(df_adj.iloc[0].adj_factor) + last_adj=float(df_adj.iloc[-1].adj_factor) + + df_adj = pd.merge(calendar,df_adj, on='trade_date',how='left') + + + + + df_adj = df_adj.ffill() + df_adj=df_adj.drop('ts_code',axis=1) + + # print(df_adj) + + + df_name=getTableDataByCode('astock_namechange',code,datewhere.replace('trade_date','ann_date')) + + + + if df_name.empty: + name=getTableDataByCode('astock_basic',code,'') + df_price['name']=name['name'].values[0] + else: + df_name.rename(columns={'start_date':'trade_date'}, inplace = True) + df_name=df_name[['trade_date','name']] + df_price = pd.merge(df_price,df_name,how = 'outer',on=['trade_date']) + df_price=df_price.sort_values('trade_date') + df_price['name']=df_price['name'].ffill() + df_price=df_price.dropna(subset=['ts_code']) + + + + + df_updown=getTableDataByCode('astock_price_stk_limit',code,'') + + + if df_updown.empty or df_price.empty: + return pd.DataFrame() + df_price = pd.merge(df_price,df_updown, on='trade_date',how='left') + + df_price.rename(columns={'ts_code_x':'ts_code'}, inplace = True) + + + df_price['name']=df_price['name'].bfill() + df_price['name']=df_price['name'].fillna("") + + + df=df_price + df["adj_factor"]=1 + df["open"]=df["open"].astype(float) + df["high"]=df["high"].astype(float) + df["low"]=df["low"].astype(float) + df["close"]=df["close"].astype(float) + df["pre_close"]=df["pre_close"].astype(float) + df["change"]=df["change"].astype(float) + df["pct_chg"]=df["pct_chg"].astype(float) + df["vol"]=df["vol"].astype(float) + df['amount']=df['amount'].astype(float) + df["vwap"]=(df['amount'].astype(float)*1000)/(df['vol'].astype(float)*100+1) + df["stop"]=pd.isna(df['close']).astype(int) + #df["lh_limit"]=pd.isna(df['high']==df['low']).astype(int) + df.rename(columns={'vol':'volume','pct_chg':'returns'}, inplace = True) + + + df["upLimit"]=df['close'].shift(1) + df["downLimit"]=df['close'].shift(1) + + def updown(x,t="up"): + + if x.ts_code[0:3]=='300': + limit=0.20 + if x.trade_date<"2020824": + limit=0.10 + elif x.ts_code[0:3]=='688': + limit=0.20 + elif x.ts_code[0:1]=='7' or x.ts_code[0:1]=='8': + limit=0.30 + else: + limit=0.10 + if "ST" in x['name'] or "st" in x['name']: + limit=0.05 + + + if t=="up": + if pd.isnull(x.up_limit) and not pd.isnull(x.upLimit): + return round(x.upLimit*(1+limit),2) + else: + return x.up_limit + else: + if pd.isnull(x.down_limit) and not pd.isnull(x.downLimit) : + return round(x.downLimit*(1-limit),2) + else: + return x.down_limit + + + # df_price.at[0,'upLimit']=9999 + # df_price.at[0,'downLimit']=0 + + + + + df["upLimit"]= df.apply(lambda x:updown(x,"up") , axis=1) + df["downLimit"]= df.apply(lambda x:updown(x,"down") , axis=1) + + + + + + if(df_adj.empty): + df=df_price + df["adj_factor"]=1 + else: + df=df.drop("adj_factor",axis=1) + df = pd.merge(df,df_adj,how = 'right',on=['trade_date']) + df=df.dropna(subset=['ts_code']) + + if fq=="no": + df["open"]=df["open"].astype(float) + df["high"]=df["high"].astype(float) + df["low"]=df["low"].astype(float) + df["close"]=df["close"].astype(float) + df["pre_close"]=df["pre_close"].astype(float) + + df["upLimit"]=df["upLimit"].astype(float) + df["downLimit"]=df["downLimit"].astype(float) + + elif fq=="qfq": + #前复权价格 = close * adj_factor / last_adj + + df["open"]=df["open"].astype(float)*df["adj_factor"].astype(float)/last_adj + df["high"]=df["high"].astype(float)*df["adj_factor"].astype(float)/last_adj + df["low"]=df["low"].astype(float)*df["adj_factor"].astype(float)/last_adj + df["close"]=df["close"].astype(float)*df["adj_factor"].astype(float)/last_adj + df["pre_close"]=df["pre_close"].astype(float)*df["adj_factor"].astype(float)/last_adj + + df["upLimit"]=df["upLimit"].astype(float)*df["adj_factor"].astype(float)/last_adj + df["downLimit"]=df["downLimit"].astype(float)*df["adj_factor"].astype(float)/last_adj + + + else: + #后复权价格 = close × (first_adj × adj_factor / last_adj) + + df["open"]=df["open"].astype(float)*df["adj_factor"].astype(float)/first_adj + df["high"]=df["high"].astype(float)*df["adj_factor"].astype(float)/first_adj + df["low"]=df["low"].astype(float)*df["adj_factor"].astype(float)/first_adj + df["close"]=df["close"].astype(float)*df["adj_factor"].astype(float)/first_adj + df["pre_close"]=df["pre_close"].astype(float)*df["adj_factor"].astype(float)/first_adj + + df["upLimit"]=df["upLimit"].astype(float)*df["adj_factor"].astype(float)/first_adj + df["downLimit"]=df["downLimit"].astype(float)*df["adj_factor"].astype(float)/first_adj + + + df["change"]=df["change"].astype(float) + df["vwap"]=(df['amount'].astype(float)*1000)/(df['volume'].astype(float)*100+1) + df["stop"]=pd.isna(df['close']).astype(int) + + + df=df.drop('ts_code_y', axis=1) + df=df.drop(labels='up_limit', axis=1) + df=df.drop(labels='down_limit', axis=1) + + + df=df.ffill() + df=df.dropna(subset=['adj_factor','ts_code']) + df.drop_duplicates('trade_date',inplace = True) + df=df.sort_values(by='trade_date', ascending=True) + df=df.reset_index(drop=True) + del df_adj + del calendar + + + #print(df.columns) + + df.to_pickle(cache_path ) + return df + except Exception as e: + print("error") + print("err exception is %s" % traceback.format_exc()) + traceback.print_exc() + return pd.DataFrame() + + + + +def alignStockFactors(df,table,date,filed,conv=0,db='tushare'): + df=df.copy() + df=df.reset_index() + ts_code=df['ts_code'].tolist()[0] + df.drop_duplicates('trade_date',inplace = True) + + + if(filed=='*'): + df_factor=mydb.selectToDf("select * from "+table+" where ts_code='"+ts_code+"'",'tushare') + filed=mydb.selectToDf("select COLUMN_NAME from information_schema.COLUMNS where table_name = '"+table+"'",'tushare') + filed=filed['COLUMN_NAME'].tolist() + filed=",".join(filed) + else: + df_factor=mydb.selectToDf("select "+date+","+filed+" from "+table+" where ts_code='"+ts_code+"'",'tushare') + + + if isinstance(df_factor, bool) or df_factor.empty: + return pd.DataFrame() + + #去重 + try: + df_factor = df_factor[~df_factor[date].duplicated()] + except Exception as e: + print(df_factor) + + #财务报表中的时间,需要+1处理 + if conv==3: + df_factor[date]=df_factor[date].astype(str) + df_factor[date]=pd.to_datetime(df_factor[date],format='%Y%m%d',errors='coerce') + df_factor[date]=df_factor[date]+timedelta(days=1) + df_factor[date]=df_factor[date].astype(str) + df_factor[date]=df_factor[date].map(lambda x: x.replace('-','')) + df_factor['trade_date']=df_factor[date].map(lambda x: x.replace('-','')) + + + if not 'pandas' in str(type(df_factor)) or df_factor.empty: + df_res=df + for f in filed.split(','): + df[f]=0 + return df_res + + #转换时间,将yyyy-mm-dd转为yyyymmdd + if conv==1: + df_factor[date]=df_factor[date].astype(str) + df_factor['trade_date']=df_factor[date].map(lambda x: x.replace('-','')) + + df_res=pd.merge(df, df_factor, how='left', on='trade_date',validate="one_to_many", copy=True, indicator=False) + df_res.drop_duplicates('trade_date',inplace = True) + + if conv==2: #不填充 + pass + else: + df_res=df_res.fillna(method='ffill') # conv=0向下填充 + + df_res=df_res.set_index('index') + + del df_factor + return df_res + + diff --git a/finhack/market/astock/indexHelper.py b/finhack/market/astock/tushare/indexHelper.py similarity index 100% rename from finhack/market/astock/indexHelper.py rename to finhack/market/astock/tushare/indexHelper.py diff --git a/finhack/widgets/templates/empty_project/data/cache/runtime/constant.py b/finhack/widgets/templates/empty_project/data/cache/runtime/constant.py index 3ccbe20..c4af197 100644 --- a/finhack/widgets/templates/empty_project/data/cache/runtime/constant.py +++ b/finhack/widgets/templates/empty_project/data/cache/runtime/constant.py @@ -1,4 +1,4 @@ -FRAMEWORK_DIR="/root/anaconda3/lib/python3.9/site-packages/finhack" +FRAMEWORK_DIR="/root/anaconda3/envs/finhack/lib/python3.9/site-packages/finhack" BASE_DIR="/data/code/finhack/examples/demo-project" DATA_DIR=BASE_DIR+"/data/" CACHE_DIR=DATA_DIR+"cache/" @@ -15,4 +15,5 @@ FACTORS_CACHE_DIR=CACHE_DIR+"factors/" PRICE_CACHE_DIR=CACHE_DIR+"price/" CHOICE_CACHE_DIR=CACHE_DIR+"choice/" -KV_CACHE_DIR=CACHE_DIR+"kv/" \ No newline at end of file +KV_CACHE_DIR=CACHE_DIR+"kv/" +INDICATORS_DIR=BASE_DIR+"/indicators/" \ No newline at end of file diff --git a/finhack/widgets/templates/empty_project/data/config/constant.conf b/finhack/widgets/templates/empty_project/data/config/constant.conf index 8d4013f..eaf9de5 100644 --- a/finhack/widgets/templates/empty_project/data/config/constant.conf +++ b/finhack/widgets/templates/empty_project/data/config/constant.conf @@ -15,4 +15,5 @@ SINGLE_FACTORS_DIR=FACTORS_DIR+"single_factors/" FACTORS_CACHE_DIR=CACHE_DIR+"factors/" PRICE_CACHE_DIR=CACHE_DIR+"price/" CHOICE_CACHE_DIR=CACHE_DIR+"choice/" -KV_CACHE_DIR=CACHE_DIR+"kv/" \ No newline at end of file +KV_CACHE_DIR=CACHE_DIR+"kv/" +INDICATORS_DIR=BASE_DIR+"/indicators/" \ No newline at end of file diff --git a/finhack/widgets/templates/empty_project/indicators/QIML365.py b/finhack/widgets/templates/empty_project/indicators/QIML365.py new file mode 100644 index 0000000..3e67004 --- /dev/null +++ b/finhack/widgets/templates/empty_project/indicators/QIML365.py @@ -0,0 +1,41 @@ +# import numpy as np + +# class QIML365: +# #QIML365 2022-01-16 +# #累计震动升降指标 +# #当ASI为正,说明趋势会继续,当ASI为负,说明趋势会终结 +# def ASI(df,p): +# def SI(N): +# df_c=df.copy() +# df_c=df_c.shift(N) +# close_1=df_c['close'].shift(1) +# open_1=df_c['open'].shift(1) +# low_1=df_c['low'].shift(1) +# A=abs(df_c.high-close_1) +# B=abs(df_c.low-close_1) +# C=abs(df_c.high-low_1) +# D=abs(close_1-open_1) +# E=df_c.close-close_1 +# F=df_c.close-df_c.open +# G=close_1-open_1 +# X=E+0.5+G +# K=max(A,B) +# R=np.where(A>B and A>C,A+0.5*B+0.25*D,np.where(B>A and B>C,B+0.5*A+0.25*D,C+0.25*D)) +# SI=16*X/R+K +# return SI + +# if len(p)==1: +# p=[p,20] +# N=p[1] + +# ASI=0 +# for i in range(0,n): +# ASI=ASI+SI + +# df['ASI']=ASI +# return df + +# def is_ASI(df,p): +# is_ASI=np.where(df['ASI']>0,True,False) +# df['isASI']=ASI +# return df \ No newline at end of file diff --git a/finhack/widgets/templates/empty_project/indicators/__init__.py b/finhack/widgets/templates/empty_project/indicators/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/finhack/widgets/templates/empty_project/indicators/extend.py b/finhack/widgets/templates/empty_project/indicators/extend.py new file mode 100755 index 0000000..078e577 --- /dev/null +++ b/finhack/widgets/templates/empty_project/indicators/extend.py @@ -0,0 +1,206 @@ +import pandas as pd +import numpy as np + +class extend: + + # Pivot Points, Supports and Resistances + def PPSR(df,p): + PP = pd.Series((df['high'] + df['low'] + df['close']) / 3) + R1 = pd.Series(2 * PP - df['low']) + S1 = pd.Series(2 * PP - df['high']) + R2 = pd.Series(PP + df['high'] - df['low']) + S2 = pd.Series(PP - df['high'] + df['low']) + R3 = pd.Series(df['high'] + 2 * (PP - df['low'])) + S3 = pd.Series(df['low'] - 2 * (df['high'] - PP)) + psr = {'PP':PP, 'R1':R1, 'S1':S1, 'R2':R2, 'S2':S2, 'R3':R3, 'S3':S3} + PSR = pd.DataFrame(psr) + + if PSR.empty: + return df + + df = df.join(PSR) + + df['PPSR']=df.PP + df['PPR1']=df.R1 + df['PPS1']=df.S1 + df['PPR2']=df.R2 + df['PPS2']=df.S2 + df['PPR3']=df.R3 + df['PPS3']=df.S3 + return df + + + # Stochastic oscillator %K + def STOK(df): + SOk = pd.Series((df['close'] - df['low']) / (df['high'] - df['low']), name = 'SO%k') + df = df.join(SOk) + return df + + + # Stochastic Oscillator, EMA smoothing, nS = slowing (1 if no slowing) + def STO(df, nK, nD, nS=1): + SOk = pd.Series((df['close'] - df['low'].rolling(nK).min()) / (df['high'].rolling(nK).max() - df['low'].rolling(nK).min()), name = 'SO%k'+str(nK)) + SOd = pd.Series(SOk.ewm(ignore_na=False, span=nD, min_periods=nD-1, adjust=True).mean(), name = 'SO%d'+str(nD)) + SOk = SOk.ewm(ignore_na=False, span=nS, min_periods=nS-1, adjust=True).mean() + SOd = SOd.ewm(ignore_na=False, span=nS, min_periods=nS-1, adjust=True).mean() + df = df.join(SOk) + df = df.join(SOd) + return df + + + # Stochastic Oscillator, SMA smoothing, nS = slowing (1 if no slowing) + def STO(df, nK, nD, nS=1): + SOk = pd.Series((df['close'] - df['low'].rolling(nK).min()) / (df['high'].rolling(nK).max() - df['low'].rolling(nK).min()), name = 'SO%k'+str(nK)) + SOd = pd.Series(SOk.rolling(window=nD, center=False).mean(), name = 'SO%d'+str(nD)) + SOk = SOk.rolling(window=nS, center=False).mean() + SOd = SOd.rolling(window=nS, center=False).mean() + df = df.join(SOk) + df = df.join(SOd) + return df + + + # Mass Index + def MassI(df): + Range = df['high'] - df['low'] + EX1 = pd.ewma(Range, span = 9, min_periods = 8) + EX2 = pd.ewma(EX1, span = 9, min_periods = 8) + Mass = EX1 / EX2 + MassI = pd.Series(pd.rolling_sum(Mass, 25), name = 'Mass Index') + df = df.join(MassI) + return df + + + # Vortex Indicator: http://www.vortexindicator.com/VFX_VORTEX.PDF + def Vortex(df, n): + i = 0 + TR = [0] + while i < df.index[-1]: + Range = max(df.get_value(i + 1, 'high'), df.get_value(i, 'close')) - min(df.get_value(i + 1, 'low'), df.get_value(i, 'close')) + TR.append(Range) + i = i + 1 + i = 0 + VM = [0] + while i < df.index[-1]: + Range = abs(df.get_value(i + 1, 'high') - df.get_value(i, 'low')) - abs(df.get_value(i + 1, 'low') - df.get_value(i, 'high')) + VM.append(Range) + i = i + 1 + VI = pd.Series(pd.rolling_sum(pd.Series(VM), n) / pd.rolling_sum(pd.Series(TR), n), name = 'Vortex_' + str(n)) + df = df.join(VI) + return df + + + # KST Oscillator + def KST(df, r1, r2, r3, r4, n1, n2, n3, n4): + M = df['close'].diff(r1 - 1) + N = df['close'].shift(r1 - 1) + ROC1 = M / N + M = df['close'].diff(r2 - 1) + N = df['close'].shift(r2 - 1) + ROC2 = M / N + M = df['close'].diff(r3 - 1) + N = df['close'].shift(r3 - 1) + ROC3 = M / N + M = df['close'].diff(r4 - 1) + N = df['close'].shift(r4 - 1) + ROC4 = M / N + KST = pd.Series(pd.rolling_sum(ROC1, n1) + pd.rolling_sum(ROC2, n2) * 2 + pd.rolling_sum(ROC3, n3) * 3 + pd.rolling_sum(ROC4, n4) * 4, name = 'KST_' + str(r1) + '_' + str(r2) + '_' + str(r3) + '_' + str(r4) + '_' + str(n1) + '_' + str(n2) + '_' + str(n3) + '_' + str(n4)) + df = df.join(KST) + return df + + + # True Strength Index + def TSI(df, r, s): + M = pd.Series(df['close'].diff(1)) + aM = abs(M) + EMA1 = pd.Series(pd.ewma(M, span = r, min_periods = r - 1)) + aEMA1 = pd.Series(pd.ewma(aM, span = r, min_periods = r - 1)) + EMA2 = pd.Series(pd.ewma(EMA1, span = s, min_periods = s - 1)) + aEMA2 = pd.Series(pd.ewma(aEMA1, span = s, min_periods = s - 1)) + TSI = pd.Series(EMA2 / aEMA2, name = 'TSI_' + str(r) + '_' + str(s)) + df = df.join(TSI) + return df + + + # Accumulation/Distribution + def ACCDIST(df, n): + ad = (2 * df['close'] - df['high'] - df['low']) / (df['high'] - df['low']) * df['Volume'] + M = ad.diff(n - 1) + N = ad.shift(n - 1) + ROC = M / N + AD = pd.Series(ROC, name = 'Acc/Dist_ROC_' + str(n)) + df = df.join(AD) + return df + + + # Force Index + def FORCE(df, n): + F = pd.Series(df['close'].diff(n) * df['Volume'].diff(n), name = 'Force_' + str(n)) + df = df.join(F) + return df + + + # Ease of Movement + def EOM(df, p): + if len(p)==2: + p[1]=10 + EoM = (df['high'].diff(1) + df['low'].diff(1)) * (df['high'] - df['low']) / (2 * df['volume']) + Eom_ma = pd.Series(EoM.rolling(p[1]).mean(), name = 'EoM_' + str(p[1])) + + + + #df = df.join(Eom_ma) + a=2 + + df['EMO']=Eom_ma + + + + return df + + + # Coppock Curve + def COPP(df, n): + M = df['close'].diff(int(n * 11 / 10) - 1) + N = df['close'].shift(int(n * 11 / 10) - 1) + ROC1 = M / N + M = df['close'].diff(int(n * 14 / 10) - 1) + N = df['close'].shift(int(n * 14 / 10) - 1) + ROC2 = M / N + Copp = pd.Series(pd.ewma(ROC1 + ROC2, span = n, min_periods = n), name = 'Copp_' + str(n)) + df = df.join(Copp) + return df + + + # Keltner Channel + def KELCH(df, n): + KelChM = pd.Series(pd.rolling_mean((df['high'] + df['low'] + df['close']) / 3, n), name = 'KelChM_' + str(n)) + KelChU = pd.Series(pd.rolling_mean((4 * df['high'] - 2 * df['low'] + df['close']) / 3, n), name = 'KelChU_' + str(n)) + KelChD = pd.Series(pd.rolling_mean((-2 * df['high'] + 4 * df['low'] + df['close']) / 3, n), name = 'KelChD_' + str(n)) + df = df.join(KelChM) + df = df.join(KelChU) + df = df.join(KelChD) + return df + + + # Donchian Channel + def DONCH(low, high, timeperiod: int = 20): + if len(high) != len(low): + return [], [] + dc_low = [] + dc_high = [] + for i in range(0, len(high)): + if i < timeperiod - 1: + dc_low.append(np.nan) + dc_high.append(np.nan) + else: + min_list = low.ix[i - (timeperiod - 1): i] + max_list = high.ix[i - (timeperiod - 1): i] + if len(min_list) == 0 or len(max_list) == 0: + dc_low.append(np.nan) + dc_high.append(np.nan) + else: + dc_min = min(min_list) + dc_max = max(max_list) + dc_low.append(dc_min) + dc_high.append(dc_max) + return dc_low, dc_high \ No newline at end of file diff --git a/finhack/widgets/templates/empty_project/indicators/financial.py b/finhack/widgets/templates/empty_project/indicators/financial.py new file mode 100755 index 0000000..58edb10 --- /dev/null +++ b/finhack/widgets/templates/empty_project/indicators/financial.py @@ -0,0 +1,125 @@ +import numpy as np +from finhack.market.astock.astock import AStock + +class financial: + #财务指标,anndate需要加1,防止未来函数 + def financeIndicator(df,p): + df_fi=AStock.alignStockFactors(df,'astock_finance_indicator','ann_date',filed='*',conv=3,db='tushare') + df=df.reset_index(drop=0) + df_fi=df_fi.reset_index(drop=0) + + if df_fi.empty: + return df_fi + + if(len(df_fi)!=len(df)): + print('len(df_fi)!=len(df)!') + + + df['eps']=df_fi['eps'] + df['dtEps']=df_fi['dt_eps'] + df['totalRevenuePs']=df_fi['total_revenue_ps'] + df['revenuePs']=df_fi['revenue_ps'] + df['capitalResePs']=df_fi['capital_rese_ps'] + df['surplusResePs']=df_fi['surplus_rese_ps'] + df['undistProfitPs']=df_fi['undist_profit_ps'] + df['extraItem']=df_fi['extra_item'] + df['profitDedt']=df_fi['profit_dedt'] + df['grossMargin']=df_fi['gross_margin'] + df['currentRatio']=df_fi['current_ratio'] + df['quickRatio']=df_fi['quick_ratio'] + df['cashRatio']=df_fi['cash_ratio'] + df['arTurn']=df_fi['ar_turn'] + df['caTurn']=df_fi['ca_turn'] + df['faTurn']=df_fi['fa_turn'] + df['assetsTurn']=df_fi['assets_turn'] + df['opIncome']=df_fi['op_income'] + df['ebit']=df_fi['ebit'] + df['ebitda']=df_fi['ebitda'] + df['fcff']=df_fi['fcff'] + df['fcfe']=df_fi['fcfe'] + df['currentExint']=df_fi['current_exint'] + df['noncurrentExint']=df_fi['noncurrent_exint'] + df['interestdebt']=df_fi['interestdebt'] + df['netdebt']=df_fi['netdebt'] + df['tangibleAsset']=df_fi['tangible_asset'] + df['workingCapital']=df_fi['working_capital'] + df['networkingCapital']=df_fi['networking_capital'] + df['investCapital']=df_fi['invest_capital'] + df['retainedEarnings']=df_fi['retained_earnings'] + df['diluted2Eps']=df_fi['diluted2_eps'] + df['bps']=df_fi['bps'] + df['ocfps']=df_fi['ocfps'] + df['retainedps']=df_fi['retainedps'] + df['cfps']=df_fi['cfps'] + df['ebitPs']=df_fi['ebit_ps'] + df['fcffPs']=df_fi['fcff_ps'] + df['fcfePs']=df_fi['fcfe_ps'] + df['netprofitMargin']=df_fi['netprofit_margin'] + df['grossprofitMargin']=df_fi['grossprofit_margin'] + df['cogsOfSales']=df_fi['cogs_of_sales'] + df['expenseOfSales']=df_fi['expense_of_sales'] + df['profitToGr']=df_fi['profit_to_gr'] + df['saleexpToGr']=df_fi['saleexp_to_gr'] + df['adminexpOfGr']=df_fi['adminexp_of_gr'] + df['finaexpOfGr']=df_fi['finaexp_of_gr'] + df['impaiTtm']=df_fi['impai_ttm'] + df['gcOfGr']=df_fi['gc_of_gr'] + df['opOfGr']=df_fi['op_of_gr'] + df['ebitOfGr']=df_fi['ebit_of_gr'] + df['roe']=df_fi['roe'] + df['roeWaa']=df_fi['roe_waa'] + df['roeDt']=df_fi['roe_dt'] + df['roa']=df_fi['roa'] + df['npta']=df_fi['npta'] + df['roic']=df_fi['roic'] + df['roeYearly']=df_fi['roe_yearly'] + df['roa2Yearly']=df_fi['roa2_yearly'] + df['debtToAssets']=df_fi['debt_to_assets'] + df['assetsToEqt']=df_fi['assets_to_eqt'] + df['dpAssetsToEqt']=df_fi['dp_assets_to_eqt'] + df['caToAssets']=df_fi['ca_to_assets'] + df['ncaToAssets']=df_fi['nca_to_assets'] + df['tbassetsToTotalassets']=df_fi['tbassets_to_totalassets'] + df['intToTalcap']=df_fi['int_to_talcap'] + df['eqtToTalcapital']=df_fi['eqt_to_talcapital'] + df['currentdebtToDebt']=df_fi['currentdebt_to_debt'] + df['longdebToDebt']=df_fi['longdeb_to_debt'] + df['ocfToShortdebt']=df_fi['ocf_to_shortdebt'] + df['debtToEqt']=df_fi['debt_to_eqt'] + df['eqtToDebt']=df_fi['eqt_to_debt'] + df['eqtToInterestdebt']=df_fi['eqt_to_interestdebt'] + df['tangibleassetToDebt']=df_fi['tangibleasset_to_debt'] + df['tangassetToIntdebt']=df_fi['tangasset_to_intdebt'] + df['tangibleassetToNetdebt']=df_fi['tangibleasset_to_netdebt'] + df['ocfToDebt']=df_fi['ocf_to_debt'] + df['turnDays']=df_fi['turn_days'] + df['roaYearly']=df_fi['roa_yearly'] + df['roaDp']=df_fi['roa_dp'] + df['fixedAssets']=df_fi['fixed_assets'] + df['profitToOp']=df_fi['profit_to_op'] + df['qSaleexpToGr']=df_fi['q_saleexp_to_gr'] + df['qGcToGr']=df_fi['q_gc_to_gr'] + df['qRoe']=df_fi['q_roe'] + df['qDtRoe']=df_fi['q_dt_roe'] + df['qNpta']=df_fi['q_npta'] + df['qOcfToSales']=df_fi['q_ocf_to_sales'] + df['basicEpsYoy']=df_fi['basic_eps_yoy'] + df['dtEpsYoy']=df_fi['dt_eps_yoy'] + df['cfpsYoy']=df_fi['cfps_yoy'] + df['opYoy']=df_fi['op_yoy'] + df['ebtYoy']=df_fi['ebt_yoy'] + df['netprofitYoy']=df_fi['netprofit_yoy'] + df['dtNetprofitYoy']=df_fi['dt_netprofit_yoy'] + df['ocfYoy']=df_fi['ocf_yoy'] + df['roeYoy']=df_fi['roe_yoy'] + df['bpsYoy']=df_fi['bps_yoy'] + df['assetsYoy']=df_fi['assets_yoy'] + df['eqtYoy']=df_fi['eqt_yoy'] + df['trYoy']=df_fi['tr_yoy'] + df['orYoy']=df_fi['or_yoy'] + df['qSalesYoy']=df_fi['q_sales_yoy'] + df['qOpQoq']=df_fi['q_op_qoq'] + df['equityYoy']=df_fi['equity_yoy'] + del df_fi + return df + diff --git a/finhack/widgets/templates/empty_project/indicators/member.py b/finhack/widgets/templates/empty_project/indicators/member.py new file mode 100644 index 0000000..ec3941c --- /dev/null +++ b/finhack/widgets/templates/empty_project/indicators/member.py @@ -0,0 +1,7 @@ +import pandas as pd +import numpy as np + +class extend: + def ZZ1000(df,p): + + return df \ No newline at end of file diff --git a/finhack/widgets/templates/empty_project/indicators/myfactors.py b/finhack/widgets/templates/empty_project/indicators/myfactors.py new file mode 100755 index 0000000..cb82e9d --- /dev/null +++ b/finhack/widgets/templates/empty_project/indicators/myfactors.py @@ -0,0 +1,53 @@ +import numpy as np +from finhack.market.astock.astock import AStock + +class myfactors: + def close(df,p): + # print(p) + # print("p="+','.join(p)) + # if len(p)==1: + # p=[p,0] + df_c=df.copy() + df['close']=df_c['close'] + return df + + def basic(df,p): + df_basic=AStock.alignStockFactors(df,'astock_price_daily_basic','trade_date',filed='turnover_rate,turnover_rate_f,volume_ratio,pe,pe_ttm,pb,ps,ps_ttm,dv_ratio,dv_ttm,total_share,float_share,free_share,total_mv,circ_mv',conv=0,db='tushare') + df['turnoverRate']=df_basic['turnover_rate'] + df['turnoverRatef']=df_basic['turnover_rate_f'] + df['volumeRatio']=df_basic['volume_ratio'] + df['pe']=df_basic['pe'] + df['peTtm']=df_basic['pe_ttm'] + df['pb']=df_basic['pb'] + df['ps']=df_basic['ps'] + df['psTtm']=df_basic['ps_ttm'] + df['dvRatio']=df_basic['dv_ratio'] + df['dvTtm']=df_basic['dv_ttm'] + df['totalShare']=df_basic['total_share'] + df['floatShare']=df_basic['float_share'] + df['freeShare']=df_basic['free_share'] + df['totalMv']=df_basic['total_mv'] + df['circMv']=df_basic['circ_mv'] + return df + + + def xx123(df,p): + df['xx1']=p[0] + df['xx2']=p[1] + df['xx3']=df['open'] + return df + + # def rim(df,p): + # df_rim=AStock.alignStockFactors(df,'stock_finhack_rim','date',filed='name,industry,value,value_end,value_max,vp,vep,vmp,rcount',conv=1,db='finhack') + # if df_rim.empty: + # return df + # df['rimn']=df_rim['name'].astype("string") + # df['rimi']=df_rim['industry'].astype("string") + # df['rimv']=df_rim['value'] + # df['rimve']=df_rim['value_end'] + # df['rimvm']=df_rim['value_max'] + # df['rimvp']=df_rim['vp'] + # df['rimvep']=df_rim['vep'] + # df['rimvmp']=df_rim['vmp'] + # df['rimrc']=df_rim['rcount'] + # return df \ No newline at end of file diff --git a/finhack/widgets/templates/empty_project/indicators/ta_lib.py b/finhack/widgets/templates/empty_project/indicators/ta_lib.py new file mode 100755 index 0000000..8ef488a --- /dev/null +++ b/finhack/widgets/templates/empty_project/indicators/ta_lib.py @@ -0,0 +1,671 @@ +import talib as ta +import numpy as np +from finhack.market.astock.astock import AStock + +class ta_lib: + def BBANDS(df,p): + if len(p)<=2: + p=[p,90] + + #print(p) + df['UPPER'], df['MIDDLE'], df['LOWER'] = ta.BBANDS(df.close, timeperiod=p[1], nbdevup=2, nbdevdn=2, matype=0) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def DEMA(df,p): + if len(p)<=2: + p=[p,90] + df['DEMA']=ta.DEMA(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def EMA(df,p): + if len(p)<=2: + p=[p,90] + df['EMA'] =ta.EMA(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def HT_TRENDLINE(df,p): + df['HTTRENDLINE'] = ta.HT_TRENDLINE(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MA(df,p): + if len(p)<=2: + p=[p,90] + df['MA']=ta.MA(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MAMA(df,p): + df['MAMA'], df['FAMA'] = ta.MAMA(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MIDPOINT(df,p): + if len(p)<=2: + p=[p,90] + df['MIDPOINT'] = ta.MIDPOINT(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MIDPRICE(df,p): + if len(p)<=2: + p=[p,90] + df['MIDPRICE'] = ta.MIDPRICE(df.high, df.low, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def SAR(df,p): + if len(p)<=2: + p=[p,0,0] + df['SAR']=ta.SAR(df.high, df.low, acceleration=p[1], maximum=p[2]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def SAREXT(df,p): + if len(p)<=2: + p=[p,0,0,0] + df['SAREXT']=ta.SAREXT(df.high, df.low, p[1], p[2], p[3]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def SMA(df,p): + if len(p)<=2: + p=[p,90] + df['SMA']=ta.SMA(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def T3(df,p): + if len(p)<=2: + p=[p,90] + df['T3'] =ta.T3(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def TEMA(df,p): + if len(p)<=2: + p=[p,90] + df['TEMA']=ta.TEMA(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def TRIMA(df,p): + if len(p)<=2: + p=[p,90] + df['TRIMA'] =ta.TRIMA(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def WMA(df,p): + if len(p)<=2: + p=[p,90] + df['WMA']=ta.WMA(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def ADX(df,p): + if len(p)<=2: + p=[p,90] + df['ADX']=ta.ADX(df.high, df.low, df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def ADXR(df,p): + if len(p)<=2: + p=[p,90] + df['ADXR']=ta.ADXR(df.high, df.low, df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def APO(df,p): + if len(p)<=2: + p=[p,30,90] + df['APO']=ta.APO(df.close, p[1], p[2]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def AROON(df,p): + if len(p)<=2: + p=[p,90] + df['AROONDOWN'], df['ARRONUP'] = ta.AROON(df.high, df.low, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def AROONOSC(df,p): + if len(p)<=2: + p=[p,90] + df['AROONOSC'] = ta.AROONOSC(df.high, df.low, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def BOP(df,p): + if len(p)<=2: + p=[p,90] + df['BOP']=ta.BOP(df.open, df.high, df.low, df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def CCI(df,p): + if len(p)<=2: + p=[p,90] + df['CCI']=ta.CCI(df.high, df.low, df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def CMO(df,p): + if len(p)<=2: + p=[p,90] + df['CMO']=ta.CMO(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def DX(df,p): + if len(p)<=2: + p=[p,90] + df['DX'] =ta.DX(df.high, df.low, df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MACD(df,p): + if len(p)<=2: + p=[p,12,26,9] + df['MACD'], df['MACDSIGNAL'], df['MACDHIST'] = ta.MACD(df.close, fastperiod=p[1], slowperiod=p[2], signalperiod=p[3]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MACDEXT(df,p): + if len(p)<=2: + p=[p,12,26,9] + df['MACDX'], df['MACDSIGNALX'], df['MACDHISTX'] = ta.MACDEXT(df.close, fastperiod=p[1], slowperiod=p[2], signalperiod=p[3]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MACDFIX(df,p): + if len(p)<=2: + p=[p,90] + df['MACDFIX'], df['MACDSIGNALFIX'], df['MACDHISTFIX'] = ta.MACDFIX(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MFI(df,p): + if len(p)<=2: + p=[p,90] + df['MFI']=ta.MFI(df.high, df.low, df.close, df.volume, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MINUS_DI(df,p): + if len(p)<=2: + p=[p,90] + df['MINUSDI'] = ta.MINUS_DI(df.high, df.low, df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MINUS_DM(df,p): + if len(p)<=2: + p=[p,90] + df['MINUSDM'] = ta.MINUS_DM(df.high, df.low, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MOM(df,p): + if len(p)<=2: + p=[p,90] + df['MOM']=ta.MOM(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def PLUS_DI(df,p): + if len(p)<=2: + p=[p,90] + df['PLUSDI'] = ta.PLUS_DI(df.high, df.low, df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def PLUS_DM(df,p): + if len(p)<=2: + p=[p,90] + df['PLUSDM'] = ta.PLUS_DM(df.high, df.low, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def PPO(df,p): + if len(p)<=2: + p=[p,30,90] + df['PPO']=ta.PPO(df.close, p[1], p[2]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def ROC(df,p): + if len(p)<=2: + p=[p,90] + df['ROC']=ta.ROC(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def ROCR(df,p): + if len(p)<=2: + p=[p,90] + df['ROCR']=ta.ROCR(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def ROCR100(df,p): + if len(p)<=2: + p=[p,90] + df['ROCR100'] = ta.ROCR100(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def RSI(df,p): + if len(p)<=2: + p=[p,90] + df['RSI']=ta.RSI(df.close,p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def STOCH(df,p): + if len(p)<=2: + p=[p,90] + df['SLOWK'], df['SLOWD'] = ta.STOCH(df.high, df.low, df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def STOCHF(df,p): + if len(p)<=2: + p=[p,90] + df['FASTK'], df['FASTD'] = ta.STOCHF(df.high, df.low, df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def STOCHRSI(df,p): + if len(p)<=2: + p=[p,90] + df['FASTKRSI'], df['FASTDRSI'] = ta.STOCHRSI(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def TRIX(df,p): + if len(p)<=2: + p=[p,90] + df['TRIX']=ta.TRIX(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def ULTOSC(df,p): + if len(p)<=2: + p=[p,90] + df['ULTOSC']=ta.ULTOSC(df.high, df.low, df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def WILLR(df,p): + if len(p)<=2: + p=[p,90] + df['WILLR'] =ta.WILLR(df.high, df.low, df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def AD(df,p): + if len(p)<=2: + p=[p,90] + df['AD'] =ta.AD(df.high, df.low, df.close, df.volume) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def ADOSC(df,p): + if len(p)<=2: + p=[p,90] + df['ADOSC'] =ta.ADOSC(df.high, df.low, df.close, df.volume) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def OBV(df,p): + if len(p)<=2: + p=[p,90] + df['OBV']=ta.OBV(df.close, df.volume) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def ATR(df,p): + if len(p)<=2: + p=[p,90] + df['ATR']=ta.ATR(df.high, df.low, df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def NATR(df,p): + if len(p)<=2: + p=[p,90] + df['NATR']=ta.NATR(df.high, df.low, df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def TRANGE(df,p): + df['TRANGE']=ta.TRANGE(df.high, df.low, df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def AVGPRICE(df,p): + df['AVGPRICE'] = ta.AVGPRICE(df.open, df.high, df.low, df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MEDPRICE(df,p): + df['MEDPRICE'] = ta.MEDPRICE(df.high, df.low) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def TYPPRICE(df,p): + df['TYPPRICE'] = ta.TYPPRICE(df.high, df.low, df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def WCLPRICE(df,p): + df['WCLPRICE'] = ta.WCLPRICE(df.high, df.low, df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def HT_DCPERIOD(df,p): + df['HTDCPERIOD'] = ta.HT_DCPERIOD(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def HT_DCPHASE(df,p): + df['HTDCPHASE'] = ta.HT_DCPHASE(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def HT_PHASOR(df,p): + df['INPHASE'], df['QUADRATURE'] = ta.HT_PHASOR(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def HT_SINE(df,p): + df['SINE'] , df['LEADSINE'] = ta.HT_SINE(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def HT_TRENDMODE(df,p): + df['HTTRENDMODE'] = ta.HT_TRENDMODE(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def BETA(df,p): + if len(p)<=2: + p=[p,90] + df['BETA']=ta.BETA(df.high, df.low, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def CORREL(df,p): + if len(p)<=2: + p=[p,90] + df['CORREL']=ta.CORREL(df.high, df.low, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def LINEARREG(df,p): + if len(p)<=2: + p=[p,90] + df['LINEARREG'] = ta.LINEARREG(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def LINEARREG_ANGLE(df,p): + if len(p)<=2: + p=[p,90] + df['LINEARREGANGLE'] = ta.LINEARREG_ANGLE(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def LINEARREG_INTERCEPT(df,p): + if len(p)<=2: + p=[p,90] + df['LINEARREGINTERCEPT'] = ta.LINEARREG_INTERCEPT(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def LINEARREG_SLOPE(df,p): + if len(p)<=2: + p=[p,90] + df['LINEARREGSLOPE'] = ta.LINEARREG_SLOPE(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def STDDEV(df,p): + #print(p) + if len(p)<=2: + p=[p,90] + df['STDDEV']=ta.STDDEV(df.close, p[1], 1) + + #print(df) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def TSF(df,p): + if len(p)<=2: + p=[p,90] + df['TSF']=ta.TSF(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def VAR(df,p): + if len(p)<=2: + p=[p,90] + df['VAR']=ta.VAR(df.close, p[1], 1) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def ACOS(df,p): + df['ACOS']=ta.ACOS(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def ASIN(df,p): + df['ASIN']=ta.ASIN(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def ATAN(df,p): + df['ATAN']=ta.ATAN(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def CEIL(df,p): + df['CEIL']=ta.CEIL(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def COS(df,p): + df['COS']=ta.COS(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def COSH(df,p): + df['COSH']=ta.COSH(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def EXP(df,p): + df['EXP']=ta.EXP(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def FLOOR(df,p): + df['FLOOR'] =ta.FLOOR(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def LN(df,p): + df['LN'] =ta.LN(df.close) # Log + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def LOG10(df,p): + df['LOG10'] =ta.LOG10(df.close) # Log + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def SIN(df,p): + df['SIN']=ta.SIN(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def SINH(df,p): + df['SINH']=ta.SINH(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def SQRT(df,p): + df['SQRT']=ta.SQRT(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def TAN(df,p): + df['TAN']=ta.TAN(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def TANH(df,p): + df['TANH']=ta.TANH(df.close) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def ADD(df,p): + df['ADD']=ta.ADD(df.high, df.low) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def DIV(df,p): + df['DIV']=ta.DIV(df.high, df.low) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MAX(df,p): + if len(p)<=2: + p=[p,90] + df['MAX']=ta.MAX(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MAXINDEX(df,p): + if len(p)<=2: + p=[p,90] + df['MAXINDEX'] = ta.MAXINDEX(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MIN(df,p): + if len(p)<=2: + p=[p,90] + df['MIN']=ta.MIN(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MININDEX(df,p): + if len(p)<=2: + p=[p,90] + df['MININDEX'] = ta.MININDEX(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MINMAXINDEX(df,p): + if len(p)<=2: + p=[p,90] + df['MINIDX'], df['MAXIDX'] = ta.MINMAXINDEX(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def MULT(df,p): + df['MULT']=ta.MULT(df.high, df.low) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def SUB(df,p): + df['SUB']=ta.SUB(df.high, df.low) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + def SUM(df,p): + if len(p)<=2: + p=[p,90] + df['SUM']=ta.SUM(df.close, p[1]) + df = df.replace([np.inf, -np.inf], np.nan) + return df + + + + + + + def kline(df,p): + df['2CROWS'] = ta.CDL2CROWS(df.open, df.high, df.low, df.close) + df['3BLACKCROWS'] = ta.CDL3BLACKCROWS(df.open, df.high, df.low, df.close) + df['3INSIDE'] = ta.CDL3INSIDE(df.open, df.high, df.low, df.close) + df['3LINESTRIKE'] = ta.CDL3LINESTRIKE(df.open, df.high, df.low, df.close) + df['3OUTSIDE'] = ta.CDL3OUTSIDE(df.open, df.high, df.low, df.close) + df['3STARSINSOUTH'] = ta.CDL3STARSINSOUTH(df.open, df.high, df.low, df.close) + df['3WHITESOLDIERS'] = ta.CDL3WHITESOLDIERS(df.open, df.high, df.low, df.close) + df['ABANDONEDBABY'] = ta.CDLABANDONEDBABY(df.open, df.high, df.low, df.close) + df['ADVANCEBLOCK'] = ta.CDLADVANCEBLOCK(df.open, df.high, df.low, df.close) + df['BELTHOLD'] = ta.CDLBELTHOLD(df.open, df.high, df.low, df.close) + df['BREAKAWAY'] = ta.CDLBREAKAWAY(df.open, df.high, df.low, df.close) + df['CLOSINGMARUBOZU'] = ta.CDLCLOSINGMARUBOZU(df.open, df.high, df.low, df.close) + df['CONCEALBABYSWALL'] = ta.CDLCONCEALBABYSWALL(df.open, df.high, df.low, df.close) + df['COUNTERATTACK'] = ta.CDLCOUNTERATTACK(df.open, df.high, df.low, df.close) + df['DARKCLOUDCOVER'] = ta.CDLDARKCLOUDCOVER(df.open, df.high, df.low, df.close) + df['DOJI'] = ta.CDLDOJI(df.open, df.high, df.low, df.close) + df['DOJISTAR'] = ta.CDLDOJISTAR(df.open, df.high, df.low, df.close) + df['DRAGONFLYDOJI'] = ta.CDLDRAGONFLYDOJI(df.open, df.high, df.low, df.close) + df['ENGULFING'] = ta.CDLENGULFING(df.open, df.high, df.low, df.close) + df['DOJISTAR'] = ta.CDLDOJISTAR(df.open, df.high, df.low, df.close) + df['EVENINGSTAR'] = ta.CDLEVENINGSTAR(df.open, df.high, df.low, df.close) + df['GAPSIDESIDEWHITE'] = ta.CDLGAPSIDESIDEWHITE(df.open, df.high, df.low, df.close) + df['GRAVESTONEDOJI'] = ta.CDLGRAVESTONEDOJI(df.open, df.high, df.low, df.close) + df['HAMMER'] = ta.CDLHAMMER(df.open, df.high, df.low, df.close) + df['HANGINGMAN'] = ta.CDLHANGINGMAN(df.open, df.high, df.low, df.close) + df['HARAMI'] = ta.CDLHARAMI(df.open, df.high, df.low, df.close) + df['HARAMICROSS'] = ta.CDLHARAMICROSS(df.open, df.high, df.low, df.close) + df['HIGHWAVE'] = ta.CDLHIGHWAVE(df.open, df.high, df.low, df.close) + df['HIKKAKE'] = ta.CDLHIKKAKE(df.open, df.high, df.low, df.close) + df['HIKKAKEMOD'] = ta.CDLHIKKAKEMOD(df.open, df.high, df.low, df.close) + df['HOMINGPIGEON'] = ta.CDLHOMINGPIGEON(df.open, df.high, df.low, df.close) + df['IDENTICAL3CROWS'] = ta.CDLIDENTICAL3CROWS(df.open, df.high, df.low, df.close) + df['INNECK'] = ta.CDLINNECK(df.open, df.high, df.low, df.close) + df['INVERTEDHAMMER'] = ta.CDLINVERTEDHAMMER(df.open, df.high, df.low, df.close) + df['KICKING'] = ta.CDLKICKING(df.open, df.high, df.low, df.close) + df['KICKINGBYLENGTH'] = ta.CDLKICKINGBYLENGTH(df.open, df.high, df.low, df.close) + df['LADDERBOTTOM'] = ta.CDLLADDERBOTTOM(df.open, df.high, df.low, df.close) + df['LONGLEGGEDDOJI'] = ta.CDLLONGLEGGEDDOJI(df.open, df.high, df.low, df.close) + df['LONGLINE'] = ta.CDLLONGLINE(df.open, df.high, df.low, df.close) + df['MARUBOZU'] = ta.CDLMARUBOZU(df.open, df.high, df.low, df.close) + df['MATCHINGLOW'] = ta.CDLMATCHINGLOW(df.open, df.high, df.low, df.close) + df['MATHOLD'] = ta.CDLMATHOLD(df.open, df.high, df.low, df.close) + df['MORNINGDOJISTAR'] = ta.CDLMORNINGDOJISTAR(df.open, df.high, df.low, df.close) + df['MORNINGSTAR'] = ta.CDLMORNINGSTAR(df.open, df.high, df.low, df.close) + df['ONNECK'] = ta.CDLONNECK(df.open, df.high, df.low, df.close) + df['PIERCING'] = ta.CDLPIERCING(df.open, df.high, df.low, df.close) + df['RICKSHAWMAN'] = ta.CDLRICKSHAWMAN(df.open, df.high, df.low, df.close) + df['RISEFALL3METHODS'] = ta.CDLRISEFALL3METHODS(df.open, df.high, df.low, df.close) + df['SEPARATINGLINES'] = ta.CDLSEPARATINGLINES(df.open, df.high, df.low, df.close) + df['SHOOTINGSTAR'] = ta.CDLSHOOTINGSTAR(df.open, df.high, df.low, df.close) + df['SHORTLINE'] = ta.CDLSHORTLINE(df.open, df.high, df.low, df.close) + df['SPINNINGTOP'] = ta.CDLSPINNINGTOP(df.open, df.high, df.low, df.close) + df['STALLEDPATTERN'] = ta.CDLSTALLEDPATTERN(df.open, df.high, df.low, df.close) + df['STICKSANDWICH'] = ta.CDLSTICKSANDWICH(df.open, df.high, df.low, df.close) + df['TAKURI'] = ta.CDLTAKURI(df.open, df.high, df.low, df.close) + df['TASUKIGAP'] = ta.CDLTASUKIGAP(df.open, df.high, df.low, df.close) + df['THRUSTING'] = ta.CDLTHRUSTING(df.open, df.high, df.low, df.close) + df['TRISTAR'] = ta.CDLTRISTAR(df.open, df.high, df.low, df.close) + df['UNIQUE3RIVER'] = ta.CDLUNIQUE3RIVER(df.open, df.high, df.low, df.close) + df['UPSIDEGAP2CROWS'] = ta.CDLUPSIDEGAP2CROWS(df.open, df.high, df.low, df.close) + df['XSIDEGAP3METHODS'] = ta.CDLXSIDEGAP3METHODS(df.open, df.high, df.low, df.close) + df['EVENINGDOJISTAR']=ta.CDLEVENINGDOJISTAR(df.open, df.high, df.low, df.close) + return df + + + diff --git a/finhack/widgets/templates/empty_project/indicators/volumeprice.py b/finhack/widgets/templates/empty_project/indicators/volumeprice.py new file mode 100755 index 0000000..aaacc6f --- /dev/null +++ b/finhack/widgets/templates/empty_project/indicators/volumeprice.py @@ -0,0 +1,50 @@ +import numpy as np +from finhack.market.astock.astock import AStock + +class volumeprice: + + def moneyflow(df,p): + df_vp=AStock.alignStockFactors(df,'astock_price_moneyflow','trade_date',filed='*',conv=0,db='tushare') + if df_vp.empty: + return df_vp + df['buySmVol']=df_vp['buy_sm_vol'] + df['buySmAmount']=df_vp['buy_sm_amount'] + df['sellSmVol']=df_vp['sell_sm_vol'] + df['sellSmAmount']=df_vp['sell_sm_amount'] + df['buyMdVol']=df_vp['buy_md_vol'] + df['buyMdAmount']=df_vp['buy_md_amount'] + df['sellMdVol']=df_vp['sell_md_vol'] + df['sellMdAmount']=df_vp['sell_md_amount'] + df['buyLgVol']=df_vp['buy_lg_vol'] + df['buyLgAmount']=df_vp['buy_lg_amount'] + df['sellLgVol']=df_vp['sell_lg_vol'] + df['sellLgAmount']=df_vp['sell_lg_amount'] + df['buyElgVol']=df_vp['buy_elg_vol'] + df['buyElgAmount']=df_vp['buy_elg_amount'] + df['sellElgVol']=df_vp['sell_elg_vol'] + df['sellElgAmount']=df_vp['sell_elg_amount'] + df['netMfVol']=df_vp['net_mf_vol'] + df['netMfAmount']=df_vp['net_mf_amount'] + del df_vp + return df + + def limit(df,p): + df_vp=AStock.alignStockFactors(df,'astock_price_limit_list','trade_date',filed='*',conv=2,db='tushare') + df['lAmp']=df_vp['amp'] + df['lFcRatio']=df_vp['fc_ratio'] + df['lFlRatio']=df_vp['fl_ratio'] + df['lFdAmount']=df_vp['fd_amount'] + df['lFirstTime']=df_vp['first_time'] + df['lLastTime']=df_vp['last_time'] + df['lOpenTimes']=df_vp['open_times'] + df['lStrth']=df_vp['strth'] + df['lLimit']=df_vp['limit'] + del df_vp + return df + + # def limit_price(df,p): + # df_vp=AStock.alignStockFactors(df,'astock_price_stk_limit','trade_date',filed='*',conv=2,db='tushare') + # df['upLimit']=df_vp['up_limit'] + # df['downLimit']=df_vp['down_limit'] + # del df_vp + # return df diff --git a/finhack/widgets/templates/empty_project/loader/testmodule_loader.py b/finhack/widgets/templates/empty_project/loader/testmodule_loader.py index 5876d94..fe88ade 100644 --- a/finhack/widgets/templates/empty_project/loader/testmodule_loader.py +++ b/finhack/widgets/templates/empty_project/loader/testmodule_loader.py @@ -7,7 +7,7 @@ def testaction(self): Log.logger.debug("loading "+self.module_name) Log.logger.debug("testarg1 is:"+str(self.args.testarg1)) Log.logger.debug("testarg2 is:"+str(self.args.testarg2)) - obj=self.klass() + obj=self.klass obj.args=self.args obj.run() diff --git a/finhack/widgets/templates/empty_project/testmodule/default/default_testmodule.py b/finhack/widgets/templates/empty_project/testmodule/default/default_testmodule.py index d2735b6..f49d0ba 100644 --- a/finhack/widgets/templates/empty_project/testmodule/default/default_testmodule.py +++ b/finhack/widgets/templates/empty_project/testmodule/default/default_testmodule.py @@ -1,8 +1,10 @@ import finhack.library.log as Log from runtime.constant import * import runtime.global_var as global_var - +from finhack.market.astock.astock import AStock import time +from finhack.factor.default.factorManager import factorManager +from finhack.factor.default.factorAnalyzer import factorAnalyzer class DefaultTestmodule(): def __init__(self): pass @@ -18,4 +20,10 @@ def run(self): def run2(self): print(self.args) - print('run2') \ No newline at end of file + print('run2') + stock_list=AStock.getStockCodeList(strict=False, db='tushare') + print(stock_list) + + + def run3(self): + factorAnalyzer.alphalens("pe_0") \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index beeed5a..4b7db59 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,4 +23,5 @@ tabulate==0.8.10 mysql-connector-python==8.0.30 tushare==1.2.87 lightgbm==4.2.0 -cryptography==39.0.1 \ No newline at end of file +cryptography==39.0.1 +alphalens-reloaded==0.4.3 \ No newline at end of file diff --git a/setup.py b/setup.py index 27ab0cf..dffca59 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ import os root_dir = 'finhack' -version='0.0.1.dev4' +version='0.0.1.dev5' for subdir, dirs, files in os.walk(root_dir): if not '__init__.py' in files: