2023-12-01 03:43:52 +00:00
|
|
|
import copy
|
|
|
|
from . import util
|
|
|
|
import transport
|
|
|
|
import numpy as np
|
|
|
|
import time
|
|
|
|
import pandas as pd
|
|
|
|
from multiprocessing import Process
|
2024-02-06 20:32:59 +00:00
|
|
|
import json
|
2024-02-06 20:58:31 +00:00
|
|
|
from healthcareio.logger import X12Logger
|
2023-12-01 03:43:52 +00:00
|
|
|
def build (**_args):
|
|
|
|
"""
|
|
|
|
This function will build SQL statements to create a table (perhaps not needed)
|
|
|
|
:plugins loaded plugins
|
|
|
|
:x12 837|835 file types
|
|
|
|
"""
|
|
|
|
_plugins=_args['plugins']
|
|
|
|
_x12 = _args['x12']
|
|
|
|
_template = util.template(plugins=_plugins)[_x12]
|
|
|
|
_primaryKey = util.getPrimaryKey(plugins=_plugins,x12=_x12)
|
2024-02-09 16:00:29 +00:00
|
|
|
_tables = {}
|
2023-12-01 03:43:52 +00:00
|
|
|
_main = {}
|
|
|
|
for _name in _template :
|
|
|
|
_item = _template[_name] #copy.deepcopy(_template[_name])
|
2024-02-09 16:00:29 +00:00
|
|
|
if _name not in _tables and type(_item) in [list,dict] :
|
|
|
|
_tables[_name] = _item
|
2023-12-01 03:43:52 +00:00
|
|
|
if _primaryKey not in _item and type(_item) == dict:
|
|
|
|
_item[_primaryKey] = ''
|
2024-02-09 16:00:29 +00:00
|
|
|
# _tables.append({_name:_item})
|
|
|
|
_tables[_name] = _item
|
2023-12-01 03:43:52 +00:00
|
|
|
else:
|
|
|
|
_main[_name] = ''
|
|
|
|
|
|
|
|
_name = getContext(_x12)
|
2024-02-09 16:00:29 +00:00
|
|
|
# _tables += [{_name:_main}]
|
|
|
|
_tables[_name] = _main
|
2023-12-01 03:43:52 +00:00
|
|
|
_template[_name] = _main
|
|
|
|
|
2024-02-09 16:00:29 +00:00
|
|
|
return _tables #_template #_tables
|
2023-12-01 03:43:52 +00:00
|
|
|
def getContext(_x12) :
|
|
|
|
return 'claims' if _x12 == '837' else 'remits'
|
|
|
|
def format(**_args) :
|
|
|
|
"""
|
|
|
|
:rows rows for the
|
|
|
|
:primary_key primary_key field name
|
|
|
|
:x12 file format
|
|
|
|
"""
|
|
|
|
|
|
|
|
# _name = _args['table']
|
|
|
|
_rows = _args['rows']
|
|
|
|
|
|
|
|
_primary_key = _args['primary_key']
|
|
|
|
_x12 = _args['x12']
|
|
|
|
_mainTableName = getContext(_x12)
|
|
|
|
_tables = {_mainTableName:[]}
|
|
|
|
|
|
|
|
for _claim in _rows :
|
|
|
|
# #
|
|
|
|
# # Turn the claim into a relational model ...
|
|
|
|
# #
|
|
|
|
_main = {}
|
|
|
|
_pkvalue = None
|
|
|
|
if _primary_key in _claim :
|
|
|
|
_pkvalue = _claim[_primary_key]
|
2024-02-06 20:32:59 +00:00
|
|
|
|
2023-12-01 03:43:52 +00:00
|
|
|
for _attrName in _claim :
|
|
|
|
_item = _claim[_attrName]
|
2024-02-09 16:00:29 +00:00
|
|
|
# _item = update(_item,_primary_key,_pkvalue)
|
2024-02-06 20:32:59 +00:00
|
|
|
#
|
|
|
|
# We have a casting problem, with relational data-store and JSON objects
|
|
|
|
#
|
2024-02-09 16:00:29 +00:00
|
|
|
if type(_item) == str and (_item.startswith("[") or _item.startswith("{")) :
|
2024-02-06 20:32:59 +00:00
|
|
|
try:
|
|
|
|
|
|
|
|
_item = json.loads(_item)
|
2024-02-09 16:00:29 +00:00
|
|
|
_item = update(_item,_primary_key,_pkvalue)
|
2024-02-06 20:32:59 +00:00
|
|
|
|
|
|
|
except Exception as ee :
|
|
|
|
# print (ee)
|
|
|
|
pass
|
2023-12-01 03:43:52 +00:00
|
|
|
if _attrName not in _tables and type(_item) in [dict,list]:
|
|
|
|
_tables[_attrName] = []
|
|
|
|
if type(_item) in [dict,list] :
|
|
|
|
_tables[_attrName] += _item if type(_item) == list else [_item]
|
2024-02-06 20:32:59 +00:00
|
|
|
pass
|
2023-12-01 03:43:52 +00:00
|
|
|
else:
|
|
|
|
#
|
|
|
|
# This section suggests we found a main table attribute
|
|
|
|
_main[_attrName] = _item
|
2024-02-06 20:32:59 +00:00
|
|
|
|
2023-12-01 03:43:52 +00:00
|
|
|
_tables[_mainTableName].append(_main)
|
|
|
|
|
|
|
|
return _tables
|
|
|
|
def update (_item,key,value):
|
|
|
|
if type(_item) not in [dict,list] :
|
|
|
|
return _item
|
|
|
|
if type(_item) == dict :
|
|
|
|
_item[key] = value
|
|
|
|
else:
|
|
|
|
#
|
|
|
|
# List, we will go through every item and update accordingly
|
|
|
|
_index = 0
|
|
|
|
for _row in _item :
|
|
|
|
if type(_row) == dict :
|
|
|
|
_row['_index'] = _index
|
|
|
|
_row[key] = value
|
|
|
|
return _item
|
|
|
|
def init(**_args):
|
|
|
|
"""
|
|
|
|
This function will kick off the export process provided claims/remits and the loaded plugins (not sure why)
|
|
|
|
It requires the data it is pulling to be consistently formatted (otherwise nothing can be done)
|
|
|
|
:plugins
|
|
|
|
:store data store information i.e {source,target} specifications for data-transport
|
|
|
|
:x12 file type i.e 837|835
|
|
|
|
"""
|
|
|
|
_file_type = _args['x12']
|
|
|
|
_plugins = _args['plugins']
|
|
|
|
_store = _args['store']
|
|
|
|
_default = build(plugins=_plugins,x12=_file_type)
|
2024-02-06 20:58:31 +00:00
|
|
|
_logger = X12Logger(store = _store['source'])
|
|
|
|
|
2023-12-01 03:43:52 +00:00
|
|
|
_df = read(store = _store['source'],x12=_file_type)
|
2024-02-06 20:58:31 +00:00
|
|
|
#
|
|
|
|
# @LOG :
|
|
|
|
if _logger :
|
|
|
|
_logger.log(module='init',action='export-init',data={'rows':_df.shape[0],'attributes':list(_df.columns)})
|
|
|
|
|
2023-12-01 03:43:52 +00:00
|
|
|
_pkey = util.getPrimaryKey(plugins = _plugins, x12=_file_type)
|
|
|
|
SEGMENTS = 4 # arbitrary choice
|
|
|
|
_indexes = np.array_split(np.arange(_df.shape[0]),SEGMENTS)
|
|
|
|
jobs = []
|
2024-02-06 20:58:31 +00:00
|
|
|
_tables = {}
|
2023-12-01 03:43:52 +00:00
|
|
|
for _ii in _indexes :
|
2023-12-07 19:00:16 +00:00
|
|
|
try:
|
2024-02-06 20:32:59 +00:00
|
|
|
_data = format(rows= _df.iloc[_ii].to_dict(orient='records'),x12=_file_type,primary_key=_pkey)
|
2023-12-07 19:00:16 +00:00
|
|
|
_thread = Process(target=post,args=({'store':_store['target'],'data':_data,'default':_default,'x12':_file_type},))
|
2024-02-06 20:58:31 +00:00
|
|
|
_thread.start()
|
2023-12-07 19:00:16 +00:00
|
|
|
jobs.append(_thread)
|
2024-02-06 20:58:31 +00:00
|
|
|
_tables = list(_data.keys())
|
2023-12-07 19:00:16 +00:00
|
|
|
except Exception as e:
|
|
|
|
#
|
|
|
|
# Log: sigment,
|
2024-02-06 20:32:59 +00:00
|
|
|
print (e)
|
2023-12-07 19:00:16 +00:00
|
|
|
pass
|
2024-02-06 20:58:31 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# @LOG :
|
|
|
|
if _logger :
|
|
|
|
_logger.log(module='init',action='export-wait',data={'jobs':len(jobs),'tables':_tables})
|
|
|
|
|
2023-12-01 03:43:52 +00:00
|
|
|
if jobs :
|
2024-02-06 20:58:31 +00:00
|
|
|
# jobs[0].start()
|
|
|
|
# jobs[0].join()
|
2023-12-01 03:43:52 +00:00
|
|
|
while jobs :
|
|
|
|
jobs = [thread for thread in jobs if thread.is_alive()]
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
def read (**_args):
|
|
|
|
_store = copy.copy(_args['store'])
|
|
|
|
_x12 = _args['x12']
|
|
|
|
_store['table'] = getContext(_x12) #'claims' if _x12 == '837' else 'remits'
|
2024-02-06 20:32:59 +00:00
|
|
|
_store['context'] = 'read'
|
2023-12-01 03:43:52 +00:00
|
|
|
reader = transport.factory.instance(**_store)
|
|
|
|
#
|
|
|
|
# @TODO: reading should support streaming (for scalability)
|
|
|
|
_df = reader.read()
|
|
|
|
|
|
|
|
return _df
|
|
|
|
|
|
|
|
def post(_args):
|
|
|
|
_data = _args['data']
|
|
|
|
_store = _args['store']
|
2024-02-06 20:32:59 +00:00
|
|
|
_store['context'] = 'write'
|
2023-12-01 03:43:52 +00:00
|
|
|
_default = _args['default']
|
2023-12-01 18:16:10 +00:00
|
|
|
_prefix = 'clm_' if _args['x12'] == '837' else 'rem_'
|
2024-02-06 20:32:59 +00:00
|
|
|
# if 'claims' in _data or 'remits' in _data :
|
|
|
|
# _key = 'claims' if 'claims' in _data else 'remits'
|
|
|
|
# _data = _data[_key]
|
2023-12-01 03:43:52 +00:00
|
|
|
for _name in _data :
|
2024-02-06 20:32:59 +00:00
|
|
|
|
2023-12-01 18:16:10 +00:00
|
|
|
_tablename = _prefix+_name
|
|
|
|
_store['table'] = _tablename if _name not in ['remits','claims'] else _name
|
2023-12-01 03:43:52 +00:00
|
|
|
_store['context']='write'
|
2024-02-06 20:58:31 +00:00
|
|
|
_store['lock'] = True
|
2023-12-01 03:43:52 +00:00
|
|
|
writer = transport.factory.instance(**_store)
|
2023-12-01 18:16:10 +00:00
|
|
|
if len(_data[_name]) == 0 and _name in _default and not writer.has(table=_tablename):
|
2023-12-01 03:43:52 +00:00
|
|
|
_rows = [_default[_name]]
|
|
|
|
else:
|
|
|
|
_rows = _data[_name]
|
2023-12-01 18:16:10 +00:00
|
|
|
writer.write(pd.DataFrame(_rows).fillna(''))
|
2023-12-01 03:43:52 +00:00
|
|
|
if hasattr(writer,'close') :
|
|
|
|
writer.close()
|
2024-02-06 20:32:59 +00:00
|
|
|
#
|
|
|
|
# Have a logger here to log what's going on ...
|
2023-12-01 03:43:52 +00:00
|
|
|
# _xwriter = trasnport.factory.instance(**_store)
|
|
|
|
# _xwriter.write(_df)
|
|
|
|
# _info = format()
|
|
|
|
pass
|