mirror of
https://gitee.com/fasiondog/hikyuu.git
synced 2024-12-03 12:27:48 +08:00
data tool (continue) pytdx增加初始导入日期选择
This commit is contained in:
parent
8a9f175463
commit
14585da6a5
@ -36,7 +36,7 @@ class ProgressBar:
|
||||
|
||||
|
||||
class ImportPytdxToH5:
|
||||
def __init__(self, queue, sqlitefile, market, ktype, quotations, ip, port, dest_dir):
|
||||
def __init__(self, queue, sqlitefile, market, ktype, quotations, ip, port, dest_dir, start_datetime):
|
||||
self.task_name = 'IMPORT_KDATA'
|
||||
self.queue = queue
|
||||
self.sqlitefile = sqlitefile
|
||||
@ -46,7 +46,7 @@ class ImportPytdxToH5:
|
||||
self.ip = ip
|
||||
self.port = port
|
||||
self.dest_dir = dest_dir
|
||||
|
||||
self.startDatetime = start_datetime
|
||||
|
||||
def __call__(self):
|
||||
count = 0
|
||||
@ -57,7 +57,7 @@ class ImportPytdxToH5:
|
||||
api = TdxHq_API()
|
||||
api.connect(self.ip, self.port)
|
||||
count = import_data(connect, self.market, self.ktype, self.quotations,
|
||||
api, self.dest_dir, progress)
|
||||
api, self.dest_dir, self.startDatetime, progress)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
finally:
|
||||
|
@ -44,6 +44,8 @@ class UsePytdxImportToH5Thread(QThread):
|
||||
self.msg_name = 'HDF5_IMPORT'
|
||||
|
||||
self.process_list = []
|
||||
self.hosts = []
|
||||
self.tasks = []
|
||||
|
||||
self.quotations = []
|
||||
if self.config['quotation']['stock']:
|
||||
@ -53,7 +55,6 @@ class UsePytdxImportToH5Thread(QThread):
|
||||
|
||||
self.queue = Queue()
|
||||
|
||||
|
||||
def __del__(self):
|
||||
for p in self.process_list:
|
||||
if p.is_alive():
|
||||
@ -101,12 +102,12 @@ class UsePytdxImportToH5Thread(QThread):
|
||||
if len(use_hosts) < task_count:
|
||||
for i in range(task_count - len(use_hosts)):
|
||||
use_hosts.insert(0, (self.hosts[0][2], self.hosts[0][3]))
|
||||
#for i in range(len(use_hosts)):
|
||||
# print(i, use_hosts[i])
|
||||
# for i in range(len(use_hosts)):
|
||||
# print(i, use_hosts[i])
|
||||
|
||||
cur_host = 0
|
||||
|
||||
#以下按数据量从大到小依次使用速度从高到低的TDX服务器
|
||||
# 以下按数据量从大到小依次使用速度从高到低的TDX服务器
|
||||
if self.config.getboolean('ktype', 'trans', fallback=False):
|
||||
today = datetime.date.today()
|
||||
trans_start_date = datetime.datetime.strptime(config['ktype']['trans_start_date'], '%Y-%m-%d').date()
|
||||
@ -129,11 +130,12 @@ class UsePytdxImportToH5Thread(QThread):
|
||||
cur_host += 1
|
||||
|
||||
if self.config.getboolean('ktype', 'min', fallback=False):
|
||||
start_date = datetime.datetime.strptime(config['ktype']['min_start_date'], '%Y-%m-%d').date()
|
||||
self.tasks.append(
|
||||
ImportPytdxToH5(
|
||||
self.queue, sqlite_file_name, 'SH', '1MIN', self.quotations,
|
||||
use_hosts[cur_host][0], use_hosts[cur_host][1],
|
||||
dest_dir
|
||||
dest_dir, start_date.year * 100000000 + start_date.month * 1000000 + start_date.day * 10000
|
||||
)
|
||||
)
|
||||
cur_host += 1
|
||||
@ -142,7 +144,7 @@ class UsePytdxImportToH5Thread(QThread):
|
||||
self.queue, sqlite_file_name,
|
||||
'SZ', '1MIN', self.quotations,
|
||||
use_hosts[cur_host][0], use_hosts[cur_host][1],
|
||||
dest_dir
|
||||
dest_dir, start_date.year * 100000000 + start_date.month * 1000000 + start_date.day * 10000
|
||||
)
|
||||
)
|
||||
cur_host += 1
|
||||
@ -165,31 +167,51 @@ class UsePytdxImportToH5Thread(QThread):
|
||||
cur_host += 1
|
||||
|
||||
if self.config.getboolean('ktype', 'min5', fallback=False):
|
||||
self.tasks.append(ImportPytdxToH5(self.queue, sqlite_file_name, 'SH', '5MIN',
|
||||
self.quotations,
|
||||
use_hosts[cur_host][0], use_hosts[cur_host][1],
|
||||
dest_dir))
|
||||
start_date = datetime.datetime.strptime(config['ktype']['min5_start_date'], '%Y-%m-%d').date()
|
||||
self.tasks.append(
|
||||
ImportPytdxToH5(
|
||||
self.queue, sqlite_file_name, 'SH', '5MIN',
|
||||
self.quotations,
|
||||
use_hosts[cur_host][0], use_hosts[cur_host][1],
|
||||
dest_dir,
|
||||
start_date.year * 100000000 + start_date.month * 1000000 + start_date.day * 10000
|
||||
)
|
||||
)
|
||||
cur_host += 1
|
||||
self.tasks.append(ImportPytdxToH5(self.queue, sqlite_file_name, 'SZ', '5MIN',
|
||||
self.quotations,
|
||||
use_hosts[cur_host][0], use_hosts[cur_host][1],
|
||||
dest_dir))
|
||||
self.tasks.append(
|
||||
ImportPytdxToH5(
|
||||
self.queue, sqlite_file_name, 'SZ', '5MIN',
|
||||
self.quotations,
|
||||
use_hosts[cur_host][0], use_hosts[cur_host][1],
|
||||
dest_dir,
|
||||
start_date.year * 100000000 + start_date.month * 1000000 + start_date.day * 10000
|
||||
)
|
||||
)
|
||||
cur_host += 1
|
||||
|
||||
|
||||
if self.config.getboolean('ktype', 'day', fallback=False):
|
||||
self.tasks.append(ImportPytdxToH5(self.queue, sqlite_file_name, 'SH', 'DAY',
|
||||
self.quotations,
|
||||
use_hosts[cur_host][0], use_hosts[cur_host][1],
|
||||
dest_dir))
|
||||
start_date = datetime.datetime.strptime(config['ktype']['day_start_date'], '%Y-%m-%d').date()
|
||||
self.tasks.append(
|
||||
ImportPytdxToH5(
|
||||
self.queue, sqlite_file_name, 'SH', 'DAY',
|
||||
self.quotations,
|
||||
use_hosts[cur_host][0], use_hosts[cur_host][1],
|
||||
dest_dir,
|
||||
start_date.year * 100000000 + start_date.month * 1000000 + start_date.day * 10000
|
||||
)
|
||||
)
|
||||
cur_host += 1
|
||||
self.tasks.append(ImportPytdxToH5(self.queue, sqlite_file_name, 'SZ', 'DAY',
|
||||
self.quotations,
|
||||
use_hosts[cur_host][0], use_hosts[cur_host][1],
|
||||
dest_dir))
|
||||
self.tasks.append(
|
||||
ImportPytdxToH5(
|
||||
self.queue, sqlite_file_name, 'SZ', 'DAY',
|
||||
self.quotations,
|
||||
use_hosts[cur_host][0], use_hosts[cur_host][1],
|
||||
dest_dir,
|
||||
start_date.year * 100000000 + start_date.month * 1000000 + start_date.day * 10000
|
||||
)
|
||||
)
|
||||
cur_host += 1
|
||||
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.init_task()
|
||||
|
@ -171,7 +171,7 @@ def guess_5min_n_step(last_datetime):
|
||||
return (n, step)
|
||||
|
||||
|
||||
def import_one_stock_data(connect, api, h5file, market, ktype, stock_record):
|
||||
def import_one_stock_data(connect, api, h5file, market, ktype, stock_record, startDate=199012191500):
|
||||
market = market.upper()
|
||||
pytdx_market = to_pytdx_market(market)
|
||||
|
||||
@ -179,7 +179,7 @@ def import_one_stock_data(connect, api, h5file, market, ktype, stock_record):
|
||||
stock_record[4]
|
||||
|
||||
table = get_h5table(h5file, market, code)
|
||||
last_datetime = table[-1]['datetime'] if table.nrows > 0 else 199012191500
|
||||
last_datetime = table[-1]['datetime'] if table.nrows > 0 else startDate
|
||||
|
||||
today = datetime.date.today()
|
||||
if ktype == 'DAY':
|
||||
@ -264,7 +264,7 @@ def import_one_stock_data(connect, api, h5file, market, ktype, stock_record):
|
||||
return add_record_count
|
||||
|
||||
|
||||
def import_data(connect, market, ktype, quotations, api, dest_dir, progress=ProgressBar):
|
||||
def import_data(connect, market, ktype, quotations, api, dest_dir, startDate=199012190000, progress=ProgressBar):
|
||||
"""导入通达信指定盘后数据路径中的K线数据。注:只导入基础信息数据库中存在的股票。
|
||||
|
||||
:param connect : sqlit3链接
|
||||
@ -289,7 +289,7 @@ def import_data(connect, market, ktype, quotations, api, dest_dir, progress=Prog
|
||||
progress(i, total)
|
||||
continue
|
||||
|
||||
this_count = import_one_stock_data(connect, api, h5file, market, ktype, stock)
|
||||
this_count = import_one_stock_data(connect, api, h5file, market, ktype, stock, startDate)
|
||||
add_record_count += this_count
|
||||
if this_count > 0:
|
||||
if ktype == 'DAY':
|
||||
|
Loading…
Reference in New Issue
Block a user