bug fixes and interface

This commit is contained in:
Steve Nyemba 2023-12-07 13:00:16 -06:00
parent a863f5e2b9
commit 39a484ee4a
9 changed files with 475 additions and 330 deletions

View File

@ -10,8 +10,104 @@ This code is intended to process and parse healthcare x12 837 (claims) and x12 8
The claims/outpout can be forwarded to a NoSQL Data store like couchdb and mongodb
Usage :
Commandline :
python xreader.py --parse claims|remits --config <path>
python x12parser <action>
action:
- parser
- create.plugin
- register.plugin
-
Embedded :
"""
# import healthcareio
import typer
from typing import Optional
from typing_extensions import Annotated
import uuid
import os
import version
import json
import time
from healthcareio import x12
from healthcareio.x12.parser import X12Parser
# import healthcareio
# import healthcareio.x12.util
# from healthcareio.x12.parser import X12Parser
app = typer.Typer()
CONFIG_FOLDER = os.sep.join([os.environ['HOME'],'.healthcareio'])
@app.command(name='init')
def config(email:str,provider:str='sqlite') :
"""
Generate configuration file needed with default data store. For supported data-store providers visit https://hiplab.mc.vanderbilt.edu/git/hiplab/data-transport.git
:email your email
\r:provider data store provider (visit https://hiplab.mc.vanderbilt.edu/git/hiplab/data-transport.git)
"""
_db = "healthcareio"
# _PATH = os.sep.join([os.environ['HOME'],'.healthcareio'])
if not os.path.exists(CONFIG_FOLDER) :
os.mkdir(CONFIG_FOLDER)
if provider in ['sqlite','sqlite3'] :
_db = os.sep.join([CONFIG_FOLDER,_db+'.db3'])
_config = {
"store":{
"provider":provider,"database":_db,"context":"write"
},
"plugins":None,
"system":{
"uid":str(uuid.uuid4()),
"email":email,
"version":version.__version__,
"copyright":version.__author__
}
}
#
# store this on disk
f = open(os.sep.join([CONFIG_FOLDER,'config.json']),'w')
f.write(json.dumps(_config))
f.close()
@app.command(name='about')
def copyright():
for note in [version.__name__,version.__author__,version.__license__]:
print (note)
pass
@app.command()
def parse (claim_folder:str,plugin_folder:str = None,config_path:str = None):
"""
This function will parse 837 and or 835 claims given a location of parsing given claim folder and/or plugin folder
"""
_plugins,_parents = x12.plugins.instance(path=plugin_folder)
_files = x12.util.file.Location.get(path=claim_folder,chunks=10)
_path = config_path if config_path else os.sep.join([CONFIG_FOLDER,'config.json'])
if os.path.exists(_path) :
f = open(_path)
_config = json.loads(f.read())
f.close()
_store = _config['store']
# print (len(_files))
jobs = []
for _chunks in _files:
pthread = X12Parser(plugins=_plugins,parents=_parents,files=_chunks, store=_store)
pthread.start()
jobs.append(pthread)
while jobs :
jobs = [pthread for pthread in jobs if pthread.is_alive()]
time.sleep(1)
pass
else:
pass
#
#
if __name__ == '__main__' :
app()

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python3
#!/usr/bin/env python3
"""
(c) 2019 Claims Toolkit,
Health Information Privacy Lab, Vanderbilt University Medical Center

View File

@ -1,5 +1,5 @@
__author__ = 'The Phi Technology LLC'
__version__ = '1.0'
__version__ = '2.0-RC'
__license__ = """
(c) 2019 EDI Parser Toolkit,
Health Information Privacy Lab, Vanderbilt University Medical Center & The Phi Technology
@ -8,10 +8,18 @@ Steve L. Nyemba <steve.l.nyemba@vumc.org>
Khanhly Nguyen <khanhly.t.nguyen@gmail.com>
This code is intended to process and parse healthcare x12 837 (claims) and x12 835 (remittances) into human readable JSON format.
The claims/outpout can be forwarded to a NoSQL Data store like couchdb and mongodb
This framework is intended to parse and structure healthcare x12 837 (claims) and x12 835 (remittances) into human readable formats
- ( parse {x12} ) --> ( store as JSON ) --> ( export to database)
The supported databases are mysql, postgresql, sqlite3, mongodb, couchdb ...
More information on supported databases is available at https://hiplab.mc.vanderbilt.edu/git/hiplab/data-transport.git
Sample 835 and 837 claims (zipped) are available for download at https://x12.org/examples/
Usage :
Commandline :
python xreader.py --parse claims|remits --config <path>
Embedded :
"""
"""
__name__ = "Healthcare/IO::Parser "+__version__

View File

@ -21,6 +21,11 @@ import hashlib
import json
import os
import sys
# version 2.0
# import util
# from parser import X12Parser
#-- end
from itertools import islice
from multiprocessing import Process
import transport

View File

@ -11,8 +11,8 @@ import numpy as np
import transport
import copy
# from healthcareio.x12.util import file as File, document as Document
from datetime import datetime
from healthcareio.logger import X12Logger
import time
class BasicParser (Process) :
def __init__(self,**_args):
@ -21,8 +21,26 @@ class BasicParser (Process) :
self._parents = _args['parents']
self._files = _args['files']
self._store = _args['store']
self._template = x12.util.template(plugins=self._plugins)
# self._logger = _args['logger'] if 'logger' in _args else None
self._logger = X12Logger(store = self._store)
if self._logger :
_info = { key:len(self._plugins[key].keys())for key in self._plugins}
_data = {'plugins':_info,'files': len(self._files),'model':self._template}
self._logger.log(module='BasicParser',action='init',data=_data)
def log (self,**_args):
"""
This function logs data into a specified location in JSON format
datetime,module,action,data
"""
pass
def apply(self,**_args):
"""
:content raw claim i.e CLP/CLM Loops and related content
:x12 file type 837|835
:document document template with attributes pre-populated
"""
_content = _args['content']
_filetype = _args['x12']
_doc = _args['document'] #{}
@ -51,20 +69,27 @@ class BasicParser (Process) :
def run(self):
_handleContent = x12.util.file.Content()
_handleDocument = x12.util.document.Builder(plugins = self._plugins,parents=self._parents)
_template = _plugins,_parents = x12.util.template(plugins=self._plugins)
_template = self._template #x12.util.template(plugins=self._plugins)
#
# @TODO: starting initializing parsing jobs :
# - number of files, plugins meta data
_log = {}
for _absolute_path in self._files :
try:
_content = _handleContent.read(filename=_absolute_path)
_content,_filetype = _handleContent.split(_content)
#
# LOG: filename with claims found in it
#
# The first row is the header (it will be common to all claims)
_header = copy.deepcopy(_template[_filetype])
_header = self.apply(content=_content[0],x12=_filetype, document=_header)
_docs = []
for _rawclaim in _content[1:] :
_document = copy.deepcopy(_header) #copy.deepcopy(_template[_filetype])
@ -72,26 +97,30 @@ class BasicParser (Process) :
if type(_absolute_path) == str:
_document['filename'] = _absolute_path
_doc = self.apply(content=_rawclaim,x12=_filetype, document=_document)
if _doc :
#
# @TODO: Make sure the test here is the existence of the primary key
# _doc = _handleDocument.merge(_doc,_header)
if _doc :
_docs.append(_doc)
else:
# print (['wtf ...',_rawclaim])
pass
#
# LOG: information abou the file that has just been processed.
_location = _absolute_path if type(_absolute_path) == str else 'In-Memory'
_data = {'filename':_location, 'available':len(_content[1:]),'x12':_filetype}
_args = {'module':'parse','action':'parse','data':_data}
_data['parsed'] = len(_docs)
self._logger.log(**_args)
#
# Let us submit the batch we have thus far
#
self.post(documents=_docs,type=_filetype)
self.post(documents=_docs,x12=_filetype,filename=_location)
except Exception as e:
#
# LOG: We have filename and segment of the claim within filename
#
print (e)
def post(self,**_args):
pass
@ -107,14 +136,17 @@ class X12Parser(BasicParser):
_documents = _args['documents']
if _documents :
_store = copy.copy(self._store,**{})
TABLE = 'claims' if _args['type'] in ['837','claims'] else 'remits'
TABLE = 'claims' if _args['x12'] in ['837','claims'] else 'remits'
_store['table'] = TABLE
_writer = transport.factory.instance(**_store)
_writer.write(_documents)
if getattr(_writer,'close') :
_writer.close()
#
# LOG: report what was written
_data = {'x12':_args['x12'], 'documents':len(_documents),'filename':_args['filename']}
self._logger.log(module='write',action='write',data=_data)
# def instance (**_args):
# """

View File

@ -138,7 +138,7 @@ def instance(**_args):
_map['835'] = _handler.merge(_map['*'],_map['835'])
_map['837'] = _handler.merge(_map['*'],_map['837'])
if 'path' in _args:
if 'path' in _args and _args['path']:
#
# We can/will override the default modules given the user has provided a location
# _module = imp.load_source('udf',_args['path'])
@ -249,331 +249,331 @@ def filter (**_args) :
_map[x12] = {key:_item[key] for key in _found }
return _map
def getTableName(**_args) :
_plugins = _args['plugins']
_meta = _args['meta']
_x12 = _meta['x12']
_foreignkeys = _args['tableKeys']
_attributes = list(_meta['map'].values()) if 'map' in _meta else _meta['columns']
if 'field' in _meta or 'container' in _meta:
_tableName = _meta['field'] if 'field' in _meta else _meta['container']
# def getTableName(**_args) :
# _plugins = _args['plugins']
# _meta = _args['meta']
# _x12 = _meta['x12']
# _foreignkeys = _args['tableKeys']
# _attributes = list(_meta['map'].values()) if 'map' in _meta else _meta['columns']
# if 'field' in _meta or 'container' in _meta:
# _tableName = _meta['field'] if 'field' in _meta else _meta['container']
# _table = {_id:_attributes}
# # _table = {_id:_attributes}
elif 'anchor' in _meta :
_tableName = _meta['anchor'].values()
# for _name in _meta['anchor'].values() :
# _table[_name] = _attributes
elif 'parent' in _meta and _meta['parent']:
#
# We can have a parent with no field/container/anchor
# We expect either a map or columns ...
#
_parentElement = _meta['parent']
_parentMeta = _plugins[_x12][_parentElement].meta
_parentTable = getTableName(plugins=_plugins,meta = _parentMeta,tableKeys=_foreignkeys)
_tableName = list(_parentTable.keys())[0]
# _table[_id] = _parentTable[_id] + _attributes
_attributes = _parentTable[_tableName] + _attributes
# print (_meta)
else:
#
# baseline tables have no parent, we need to determine the name
#
_tableName = 'claims' if _x12 == '837' else 'remits'
# print (_id,_attributes)
# elif 'anchor' in _meta :
# _tableName = _meta['anchor'].values()
# # for _name in _meta['anchor'].values() :
# # _table[_name] = _attributes
# elif 'parent' in _meta and _meta['parent']:
# #
# # We can have a parent with no field/container/anchor
# # We expect either a map or columns ...
# #
# _parentElement = _meta['parent']
# _parentMeta = _plugins[_x12][_parentElement].meta
# _parentTable = getTableName(plugins=_plugins,meta = _parentMeta,tableKeys=_foreignkeys)
# _tableName = list(_parentTable.keys())[0]
# # _table[_id] = _parentTable[_id] + _attributes
# _attributes = _parentTable[_tableName] + _attributes
# # print (_meta)
# else:
# #
# # baseline tables have no parent, we need to determine the name
# #
# _tableName = 'claims' if _x12 == '837' else 'remits'
# # print (_id,_attributes)
pass
#
# Are there any anchors
if _x12 == '837':
_keys = [_foreignkeys['claims']]
elif _x12 == '835' :
_keys = [_foreignkeys['remits']]
else:
_keys = list(set(_foreignkeys.values()))
_attr = []
for _item in _attributes :
if type(_item) == list :
_attr += _item
else:
_attr.append(_item)
_keys = list(set(_keys) - set(_attr))
_attr = _keys + _attr
# if 'container' in _meta and _meta['container'] == 'procedures' :
# print (_attributes)
_tableName = [_tableName] if type(_tableName) == str else _tableName
return dict.fromkeys(_tableName,_attr)
def _getTableName (**_args):
"""
This function provides a list of attributes associated with an entity
The function infers a relational structure from the JSON representation of a claim and plugin specifications
"""
_meta = _args['meta']
_xattr = list(_meta['map'].values()) if 'map' in _meta else _meta['columns']
_plugins = _args['plugins']
_foreignkeys = _args['tableKeys']
#
# Fix attributes, in case we have an index associated with multiple fields
#
_attr = []
if 'anchor' not in _meta and not _meta['parent']:
for _item in _xattr :
_attr += _item if type(_item) == list else [_item]
_name = None
_info = {}
_infoparent = {}
if 'field' in _meta :
_name = _meta['field']
elif 'container' in _meta :
_name = _meta['container']
elif 'anchor' in _meta :
# pass
# #
# # Are there any anchors
# if _x12 == '837':
# _keys = [_foreignkeys['claims']]
# elif _x12 == '835' :
# _keys = [_foreignkeys['remits']]
# else:
# _keys = list(set(_foreignkeys.values()))
# _attr = []
# for _item in _attributes :
# if type(_item) == list :
# _attr += _item
# else:
# _attr.append(_item)
# _keys = list(set(_keys) - set(_attr))
# _attr = _keys + _attr
# # if 'container' in _meta and _meta['container'] == 'procedures' :
# # print (_attributes)
# _tableName = [_tableName] if type(_tableName) == str else _tableName
# return dict.fromkeys(_tableName,_attr)
# def _getTableName (**_args):
# """
# This function provides a list of attributes associated with an entity
# The function infers a relational structure from the JSON representation of a claim and plugin specifications
# """
# _meta = _args['meta']
# _xattr = list(_meta['map'].values()) if 'map' in _meta else _meta['columns']
# _plugins = _args['plugins']
# _foreignkeys = _args['tableKeys']
# #
# # Fix attributes, in case we have an index associated with multiple fields
# #
# _attr = []
# if 'anchor' not in _meta and not _meta['parent']:
# for _item in _xattr :
# _attr += _item if type(_item) == list else [_item]
# _name = None
# _info = {}
# _infoparent = {}
# if 'field' in _meta :
# _name = _meta['field']
# elif 'container' in _meta :
# _name = _meta['container']
# elif 'anchor' in _meta :
_name = list(_meta['anchor'].values())
# if _name :
# _name = _name if type(_name) == list else [_name]
# _info = dict.fromkeys(_name,_attr)
if _meta['parent'] :
_parentElement = filter(elements=[_meta['parent']],plugins=_plugins)
_x12 = list(_parentElement.keys())[0]
_id = list(_parentElement[_x12].keys())[0]
_infoparent = getTableName(meta = _parentElement[_x12][_id].meta,plugins=_plugins,tableKeys=_foreignkeys)
# _name = list(_meta['anchor'].values())
# # if _name :
# # _name = _name if type(_name) == list else [_name]
# # _info = dict.fromkeys(_name,_attr)
# if _meta['parent'] :
# _parentElement = filter(elements=[_meta['parent']],plugins=_plugins)
# _x12 = list(_parentElement.keys())[0]
# _id = list(_parentElement[_x12].keys())[0]
# _infoparent = getTableName(meta = _parentElement[_x12][_id].meta,plugins=_plugins,tableKeys=_foreignkeys)
if _meta['x12'] == '*' :
_name = ['claims','remits'] if not _name else _name
_attr = list(set(_foreignkeys.values())) + _attr
else:
_name = 'claims' if _meta['x12'] == '837' and not _name else ('remits' if not _name and _meta['x12'] == '835' else _name)
_id = 'claims' if _meta['x12'] == '837' else 'remits'
if _id in _foreignkeys:
_attr = [_foreignkeys[_id]] + _attr
# if _meta['x12'] == '*' :
# _name = ['claims','remits'] if not _name else _name
# _attr = list(set(_foreignkeys.values())) + _attr
# else:
# _name = 'claims' if _meta['x12'] == '837' and not _name else ('remits' if not _name and _meta['x12'] == '835' else _name)
# _id = 'claims' if _meta['x12'] == '837' else 'remits'
# if _id in _foreignkeys:
# _attr = [_foreignkeys[_id]] + _attr
# if not _name :
# if _meta['x12'] == '*' :
# _name = ['claims','remits']
# else:
# _name = 'claims' if _meta['x12'] == '837' else 'remits'
#
# Let us make sure we can get the keys associated here ...
#
# filter (elements = [])
_name = _name if type(_name) == list else [_name]
_info = dict.fromkeys(_name,_attr)
if _infoparent:
_info = dict(_info,**_infoparent)
# # if not _name :
# # if _meta['x12'] == '*' :
# # _name = ['claims','remits']
# # else:
# # _name = 'claims' if _meta['x12'] == '837' else 'remits'
# #
# # Let us make sure we can get the keys associated here ...
# #
# # filter (elements = [])
# _name = _name if type(_name) == list else [_name]
# _info = dict.fromkeys(_name,_attr)
# if _infoparent:
# _info = dict(_info,**_infoparent)
return _info
# return _info
def getTableKeys(**_args):
_plugins=_args['plugins']
_pointer = filter(elements=['CLM'],plugins=_plugins)
_keys = {}
for _element in ['CLM','CLP'] :
_pointer = filter(elements=[_element],plugins=_plugins)
if not _pointer :
continue
_pointer = list(_pointer.values())[0]
_meta = _pointer[_element].meta
_name = _meta['map'][1] if 'map' in _meta else _meta['columns'][0]
_id = 'claims' if _element == 'CLM' else 'remits'
_keys[_id] = _name
return _keys
# print (list(_pointer.values())[0]['CLM'].meta)
# print (_pointer.values()[0].meta)
def sql (**_args):
_plugins = _args['plugins']
# _info = {'foreign':{},'keys':{'claims':None,'remits':None}}
_documentHandler = x12.util.document.Builder(plugins=_plugins,parents=_args['parents'])
_tableKeys = getTableKeys(plugins=_plugins)
_schemas = {}
for key in _plugins :
_mpointers = _plugins[key]
for _element in _mpointers :
_pointer = _mpointers[_element]
_meta = _pointer.meta
_info = getTableName(meta=_meta,plugins=_plugins,tableKeys=_tableKeys)
# _schemas = dict(_schemas,**_info)
if _info :
_schemas = _documentHandler.merge(_schemas,_info)
# print (_info)
return _schemas
# if not _info :
# print (_meta)
# continue
# if _meta['x12'] in ['837','837'] :
# _schema_id = 'claims' if _meta['x12'] == '837' else 'remits'
# _schema_id = [_schema_id]
# else:
# _schema_id = ['claims','remits']
# def getTableKeys(**_args):
# _plugins=_args['plugins']
# _pointer = filter(elements=['CLM'],plugins=_plugins)
# _keys = {}
# for _element in ['CLM','CLP'] :
# _pointer = filter(elements=[_element],plugins=_plugins)
# if not _pointer :
# continue
# _pointer = list(_pointer.values())[0]
# _meta = _pointer[_element].meta
# _name = _meta['map'][1] if 'map' in _meta else _meta['columns'][0]
# _id = 'claims' if _element == 'CLM' else 'remits'
# _keys[_id] = _name
# return _keys
# # print (list(_pointer.values())[0]['CLM'].meta)
# # print (_pointer.values()[0].meta)
# def sql (**_args):
# _plugins = _args['plugins']
# # _info = {'foreign':{},'keys':{'claims':None,'remits':None}}
# _documentHandler = x12.util.document.Builder(plugins=_plugins,parents=_args['parents'])
# _tableKeys = getTableKeys(plugins=_plugins)
# _schemas = {}
# for key in _plugins :
# _mpointers = _plugins[key]
# for _element in _mpointers :
# _pointer = _mpointers[_element]
# _meta = _pointer.meta
# _info = getTableName(meta=_meta,plugins=_plugins,tableKeys=_tableKeys)
# # _schemas = dict(_schemas,**_info)
# if _info :
# _schemas = _documentHandler.merge(_schemas,_info)
# # print (_info)
# return _schemas
# # if not _info :
# # print (_meta)
# # continue
# # if _meta['x12'] in ['837','837'] :
# # _schema_id = 'claims' if _meta['x12'] == '837' else 'remits'
# # _schema_id = [_schema_id]
# # else:
# # _schema_id = ['claims','remits']
# if _info :
# #
# # foreign tables need to be placed here
# # if _info :
# # #
# # # foreign tables need to be placed here
# for _id in _schema_id :
# if type(_info) == list :
# _schemas[_id]['attributes'] += _info
# else:
# _schemas[_id]['foreign'] = dict(_schemas[_id]['foreign'],**_info)
# else:
# #
# # This one goes to the main tables
# for _id in _schema_id :
# print (_info)
# _schemas[_id]['attributes'] += list(_info.values())
# # for _id in _schema_id :
# # if type(_info) == list :
# # _schemas[_id]['attributes'] += _info
# # else:
# # _schemas[_id]['foreign'] = dict(_schemas[_id]['foreign'],**_info)
# # else:
# # #
# # # This one goes to the main tables
# # for _id in _schema_id :
# # print (_info)
# # _schemas[_id]['attributes'] += list(_info.values())
# DEFAULT_PLUGINS='healthcareio.x12.plugins.default'
# class MODE :
# TRUST,CHECK,TEST,TEST_AND_CHECK= [0,1,2,3]
# def instance(**_args) :
# pass
# def has(**_args) :
# """
# This function will inspect if a function is valid as a plugin or not
# name : function name for a given file
# path : python file to examine
# """
# _pyfile = _args['path'] if 'path' in _args else ''
# _name = _args['name']
# # p = os.path.exists(_pyfile)
# _module = {}
# if os.path.exists(_pyfile):
# _info = IL.utils.spec_from_file_location(_name,_pyfile)
# if _info :
# _module = IL.utils.module_from_spec(_info)
# _info.load.exec(_module)
# # DEFAULT_PLUGINS='healthcareio.x12.plugins.default'
# # class MODE :
# # TRUST,CHECK,TEST,TEST_AND_CHECK= [0,1,2,3]
# # def instance(**_args) :
# # pass
# # def has(**_args) :
# # """
# # This function will inspect if a function is valid as a plugin or not
# # name : function name for a given file
# # path : python file to examine
# # """
# # _pyfile = _args['path'] if 'path' in _args else ''
# # _name = _args['name']
# # # p = os.path.exists(_pyfile)
# # _module = {}
# # if os.path.exists(_pyfile):
# # _info = IL.utils.spec_from_file_location(_name,_pyfile)
# # if _info :
# # _module = IL.utils.module_from_spec(_info)
# # _info.load.exec(_module)
# else:
# _module = sys.modules[DEFAULT_PLUGINS]
# return hasattr(_module,_name)
# def get(**_args) :
# """
# This function will inspect if a function is valid as a plugin or not
# name : function name for a given file
# path : python file to examine
# """
# _pyfile = _args['path'] if 'path' in _args else ''
# _name = _args['name']
# # p = os.path.exists(_pyfile)
# _module = {}
# if os.path.exists(_pyfile):
# _info = IL.utils.spec_from_file_location(_name,_pyfile)
# if _info :
# _module = IL.utils.module_from_spec(_info)
# _info.load.exec(_module)
# # else:
# # _module = sys.modules[DEFAULT_PLUGINS]
# # return hasattr(_module,_name)
# # def get(**_args) :
# # """
# # This function will inspect if a function is valid as a plugin or not
# # name : function name for a given file
# # path : python file to examine
# # """
# # _pyfile = _args['path'] if 'path' in _args else ''
# # _name = _args['name']
# # # p = os.path.exists(_pyfile)
# # _module = {}
# # if os.path.exists(_pyfile):
# # _info = IL.utils.spec_from_file_location(_name,_pyfile)
# # if _info :
# # _module = IL.utils.module_from_spec(_info)
# # _info.load.exec(_module)
# else:
# _module = sys.modules[DEFAULT_PLUGINS]
# return getattr(_module,_name) if hasattr(_module,_name) else None
# # else:
# # _module = sys.modules[DEFAULT_PLUGINS]
# # return getattr(_module,_name) if hasattr(_module,_name) else None
# def test (**_args):
# """
# This function will test a plugin to insure the plugin conforms to the norm we are setting here
# :pointer function to call
# """
# _params = {}
# try:
# if 'pointer' in _args :
# _caller = _args['pointer']
# else:
# _name = _args['name']
# _path = _args['path'] if 'path' in _args else None
# _caller = get(name=_name,path=_path)
# _params = _caller()
# #
# # the expected result is a list of field names [field_o,field_i]
# #
# return [_item for _item in _params if _item not in ['',None] and type(_item) == str]
# except Exception as e :
# return []
# pass
# def inspect(**_args):
# _mode = _args['mode']
# _name= _args['name']
# _path= _args['path']
# if _mode == MODE.CHECK :
# _doapply = [has]
# elif _mode == MODE.TEST :
# _doapply = [test]
# elif _mode == MODE.TEST_AND_CHECK :
# _doapply = [has,test]
# _status = True
# _plugin = {"name":_name}
# for _pointer in _doapply :
# _plugin[_pointer.__name__] = _pointer(name=_name,path=_path)
# if not _plugin[_pointer.__name__] :
# _status = False
# break
# _plugin['loaded'] = _status
# return _plugin
# def load(**_args):
# """
# This function will load all the plugins given an set of arguments :
# path file
# name list of functions to export
# mode 1- CHECK ONLY, 2 - TEST ONLY, 3- TEST_AND_CHECK
# """
# _path = _args ['path']
# _names= _args['names']
# _mode= _args ['mode'] if 'mode' in _args else MODE.TEST_AND_CHECK
# _doapply = []
# if _mode == MODE.CHECK :
# _doapply = [has]
# elif _mode == MODE.TEST :
# _doapply = [test]
# elif _mode == MODE.TEST_AND_CHECK :
# _doapply = [has,test]
# # _plugins = []
# _plugins = {}
# for _name in _names :
# _plugin = {"name":_name}
# if 'inspect' in _args and _args['inspect'] :
# _plugin = inspect(name=_name,mode=_mode,path=_path)
# else:
# _plugin["method"] = ""
# _status = True
# _plugin['loaded'] = _status
# if _plugin['loaded'] :
# _plugin['pointer'] = get(name=_name,path=_path)
# else:
# _plugin['pointer'] = None
# # def test (**_args):
# # """
# # This function will test a plugin to insure the plugin conforms to the norm we are setting here
# # :pointer function to call
# # """
# # _params = {}
# # try:
# # if 'pointer' in _args :
# # _caller = _args['pointer']
# # else:
# # _name = _args['name']
# # _path = _args['path'] if 'path' in _args else None
# # _caller = get(name=_name,path=_path)
# # _params = _caller()
# # #
# # # the expected result is a list of field names [field_o,field_i]
# # #
# # return [_item for _item in _params if _item not in ['',None] and type(_item) == str]
# # except Exception as e :
# # return []
# # pass
# # def inspect(**_args):
# # _mode = _args['mode']
# # _name= _args['name']
# # _path= _args['path']
# # if _mode == MODE.CHECK :
# # _doapply = [has]
# # elif _mode == MODE.TEST :
# # _doapply = [test]
# # elif _mode == MODE.TEST_AND_CHECK :
# # _doapply = [has,test]
# # _status = True
# # _plugin = {"name":_name}
# # for _pointer in _doapply :
# # _plugin[_pointer.__name__] = _pointer(name=_name,path=_path)
# # if not _plugin[_pointer.__name__] :
# # _status = False
# # break
# # _plugin['loaded'] = _status
# # return _plugin
# # def load(**_args):
# # """
# # This function will load all the plugins given an set of arguments :
# # path file
# # name list of functions to export
# # mode 1- CHECK ONLY, 2 - TEST ONLY, 3- TEST_AND_CHECK
# # """
# # _path = _args ['path']
# # _names= _args['names']
# # _mode= _args ['mode'] if 'mode' in _args else MODE.TEST_AND_CHECK
# # _doapply = []
# # if _mode == MODE.CHECK :
# # _doapply = [has]
# # elif _mode == MODE.TEST :
# # _doapply = [test]
# # elif _mode == MODE.TEST_AND_CHECK :
# # _doapply = [has,test]
# # # _plugins = []
# # _plugins = {}
# # for _name in _names :
# # _plugin = {"name":_name}
# # if 'inspect' in _args and _args['inspect'] :
# # _plugin = inspect(name=_name,mode=_mode,path=_path)
# # else:
# # _plugin["method"] = ""
# # _status = True
# # _plugin['loaded'] = _status
# # if _plugin['loaded'] :
# # _plugin['pointer'] = get(name=_name,path=_path)
# # else:
# # _plugin['pointer'] = None
# # _plugins.append(_plugin)
# _plugins[_name] = _plugin
# return _plugins
# # # _plugins.append(_plugin)
# # _plugins[_name] = _plugin
# # return _plugins
# def parse(**_args):
# """
# This function will apply a function against a given function, and data
# :row claim/remits pre-processed
# :plugins list of plugins
# :conifg configuration associated with
# """
# _row = _args['row']
# _document = _args['document']
# _config = _args['config']
# """
# "apply":"@path:name"
# """
# # def parse(**_args):
# # """
# # This function will apply a function against a given function, and data
# # :row claim/remits pre-processed
# # :plugins list of plugins
# # :conifg configuration associated with
# # """
# # _row = _args['row']
# # _document = _args['document']
# # _config = _args['config']
# # """
# # "apply":"@path:name"
# # """
# _info = _args['config']['apply']
# _plug_conf = _args['config']['plugin'] if 'plugin' in _args['config'] else {}
# if _info.startswith('@') :
# _path = '' #-- get this from general configuration
# elif _info.startswith('!'):
# _path = _info.split('!')[0][1:]
# _name = _info.split(':')[-1]
# _name = _args['config']['apply'].split(_path)
# # _info = _args['config']['apply']
# # _plug_conf = _args['config']['plugin'] if 'plugin' in _args['config'] else {}
# # if _info.startswith('@') :
# # _path = '' #-- get this from general configuration
# # elif _info.startswith('!'):
# # _path = _info.split('!')[0][1:]
# # _name = _info.split(':')[-1]
# # _name = _args['config']['apply'].split(_path)

View File

@ -106,11 +106,15 @@ def init(**_args):
_indexes = np.array_split(np.arange(_df.shape[0]),SEGMENTS)
jobs = []
for _ii in _indexes :
_data = format(rows= _df.iloc[_ii].to_dict(orient='records'),x12=_file_type,primary_key=_pkey)
_thread = Process(target=post,args=({'store':_store['target'],'data':_data,'default':_default,'x12':_file_type},))
jobs.append(_thread)
try:
_data = format(rows= _df.iloc[_ii].to_dict(orient='records'),x12=_file_type,primary_key=_pkey)
_thread = Process(target=post,args=({'store':_store['target'],'data':_data,'default':_default,'x12':_file_type},))
jobs.append(_thread)
except Exception as e:
#
# Log: sigment,
pass
if jobs :
jobs[0].start()
jobs[0].join()

View File

@ -204,7 +204,6 @@ class Builder:
_parent = None
_data = {}
# _document = _args['document']
if not _pointer :
return None,None
#

View File

@ -2,21 +2,22 @@
This is a build file for the
"""
from setuptools import setup, find_packages
from healthcareio import version
import os
import sys
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
args = {
"name":"healthcareio","version":"1.6.4.8",
"author":"Vanderbilt University Medical Center",
"name":"healthcareio","version":version.__version__,
"author":version.__author__,
"author_email":"steve.l.nyemba@vumc.org",
"include_package_data":True,
"license":"MIT",
"license":version.__license__,
"packages":find_packages(),
"keywords":["healthcare","edi","x12","analytics","835","837","data","transport","protocol"]
}
args["install_requires"] = ['flask-socketio','seaborn','jinja2','jsonmerge', 'weasyprint','data-transport@git+https://healthcareio.the-phi.com/git/code/transport.git','pymongo','numpy','cloudant','pika','boto','botocore','flask-session','smart_open','smart-top@git+https://healthcareio.the-phi.com/git/code/smart-top.git@data-collector']
args["install_requires"] = ['typer','flask-socketio','seaborn','jinja2','jsonmerge', 'weasyprint','data-transport@git+https://healthcareio.the-phi.com/git/code/transport.git','pymongo','numpy','cloudant','pika','boto','botocore','flask-session','smart_open','smart-top@git+https://healthcareio.the-phi.com/git/code/smart-top.git@data-collector']
args['url'] = 'https://hiplab.mc.vanderbilt.edu'
args['scripts']= ['healthcareio/healthcare-io.py']
# args['entry_points'] = {