bug fix: exports
This commit is contained in:
parent
a6b52719f9
commit
4a2c039c38
|
@ -143,6 +143,8 @@ def parse (claim_folder:str,plugin_folder:str = None,config_path:str = None):
|
||||||
# pass
|
# pass
|
||||||
# else:
|
# else:
|
||||||
# pass
|
# pass
|
||||||
|
print ()
|
||||||
|
print (" PARSED ")
|
||||||
print ("...................... FINISHED .........................")
|
print ("...................... FINISHED .........................")
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
@ -188,7 +190,6 @@ def publish (file_type:str,path:str):
|
||||||
_type = 'remits'
|
_type = 'remits'
|
||||||
_x12 = '835'
|
_x12 = '835'
|
||||||
if _type :
|
if _type :
|
||||||
print ([f"Exporting {_type}"])
|
|
||||||
_store = {'source':os.sep.join([CONFIG_FOLDER,'config.json']),'target':path}
|
_store = {'source':os.sep.join([CONFIG_FOLDER,'config.json']),'target':path}
|
||||||
for _key in _store :
|
for _key in _store :
|
||||||
f = open(_store[_key])
|
f = open(_store[_key])
|
||||||
|
@ -200,6 +201,10 @@ def publish (file_type:str,path:str):
|
||||||
x12.publish.init(plugins=_plugins,x12=_x12,store=_store)
|
x12.publish.init(plugins=_plugins,x12=_x12,store=_store)
|
||||||
else:
|
else:
|
||||||
print ("Can not determine type, (837 or 835)")
|
print ("Can not determine type, (837 or 835)")
|
||||||
|
|
||||||
|
print ()
|
||||||
|
print (" EXPORT ")
|
||||||
|
print ("...................... FINISHED .........................")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__' :
|
if __name__ == '__main__' :
|
||||||
|
|
|
@ -6,7 +6,7 @@ import time
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from multiprocessing import Process
|
from multiprocessing import Process
|
||||||
import json
|
import json
|
||||||
|
from healthcareio.logger import X12Logger
|
||||||
def build (**_args):
|
def build (**_args):
|
||||||
"""
|
"""
|
||||||
This function will build SQL statements to create a table (perhaps not needed)
|
This function will build SQL statements to create a table (perhaps not needed)
|
||||||
|
@ -111,25 +111,40 @@ def init(**_args):
|
||||||
_plugins = _args['plugins']
|
_plugins = _args['plugins']
|
||||||
_store = _args['store']
|
_store = _args['store']
|
||||||
_default = build(plugins=_plugins,x12=_file_type)
|
_default = build(plugins=_plugins,x12=_file_type)
|
||||||
|
_logger = X12Logger(store = _store['source'])
|
||||||
|
|
||||||
_df = read(store = _store['source'],x12=_file_type)
|
_df = read(store = _store['source'],x12=_file_type)
|
||||||
|
#
|
||||||
|
# @LOG :
|
||||||
|
if _logger :
|
||||||
|
_logger.log(module='init',action='export-init',data={'rows':_df.shape[0],'attributes':list(_df.columns)})
|
||||||
|
|
||||||
_pkey = util.getPrimaryKey(plugins = _plugins, x12=_file_type)
|
_pkey = util.getPrimaryKey(plugins = _plugins, x12=_file_type)
|
||||||
SEGMENTS = 4 # arbitrary choice
|
SEGMENTS = 4 # arbitrary choice
|
||||||
_indexes = np.array_split(np.arange(_df.shape[0]),SEGMENTS)
|
_indexes = np.array_split(np.arange(_df.shape[0]),SEGMENTS)
|
||||||
jobs = []
|
jobs = []
|
||||||
|
_tables = {}
|
||||||
for _ii in _indexes :
|
for _ii in _indexes :
|
||||||
try:
|
try:
|
||||||
_data = format(rows= _df.iloc[_ii].to_dict(orient='records'),x12=_file_type,primary_key=_pkey)
|
_data = format(rows= _df.iloc[_ii].to_dict(orient='records'),x12=_file_type,primary_key=_pkey)
|
||||||
_thread = Process(target=post,args=({'store':_store['target'],'data':_data,'default':_default,'x12':_file_type},))
|
_thread = Process(target=post,args=({'store':_store['target'],'data':_data,'default':_default,'x12':_file_type},))
|
||||||
|
_thread.start()
|
||||||
jobs.append(_thread)
|
jobs.append(_thread)
|
||||||
|
_tables = list(_data.keys())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
#
|
#
|
||||||
# Log: sigment,
|
# Log: sigment,
|
||||||
print (e)
|
print (e)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
#
|
||||||
|
# @LOG :
|
||||||
|
if _logger :
|
||||||
|
_logger.log(module='init',action='export-wait',data={'jobs':len(jobs),'tables':_tables})
|
||||||
|
|
||||||
if jobs :
|
if jobs :
|
||||||
jobs[0].start()
|
# jobs[0].start()
|
||||||
jobs[0].join()
|
# jobs[0].join()
|
||||||
while jobs :
|
while jobs :
|
||||||
jobs = [thread for thread in jobs if thread.is_alive()]
|
jobs = [thread for thread in jobs if thread.is_alive()]
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
@ -160,6 +175,7 @@ def post(_args):
|
||||||
_tablename = _prefix+_name
|
_tablename = _prefix+_name
|
||||||
_store['table'] = _tablename if _name not in ['remits','claims'] else _name
|
_store['table'] = _tablename if _name not in ['remits','claims'] else _name
|
||||||
_store['context']='write'
|
_store['context']='write'
|
||||||
|
_store['lock'] = True
|
||||||
writer = transport.factory.instance(**_store)
|
writer = transport.factory.instance(**_store)
|
||||||
if len(_data[_name]) == 0 and _name in _default and not writer.has(table=_tablename):
|
if len(_data[_name]) == 0 and _name in _default and not writer.has(table=_tablename):
|
||||||
_rows = [_default[_name]]
|
_rows = [_default[_name]]
|
||||||
|
|
Loading…
Reference in New Issue