parser/healthcareio/healthcare-io.py

406 lines
15 KiB
Python
Raw Normal View History

2020-08-21 04:49:13 +00:00
#!/usr/bin/env python3
"""
(c) 2019 Claims Toolkit,
Health Information Privacy Lab, Vanderbilt University Medical Center
Steve L. Nyemba <steve.l.nyemba@vanderbilt.edu>
Khanhly Nguyen <khanhly.t.nguyen@gmail.com>
This code is intended to process and parse healthcare x12 837 (claims) and x12 835 (remittances) into human readable JSON format.
The claims/outpout can be forwarded to a NoSQL Data store like couchdb and mongodb
Usage :
Commandline :
python edi-parser --scope --config <path> --folder <path> --store <[mongo|disk|couch]> --<db|path]> <id|path>
with :
--scope <claims|remits>
--config path of the x12 to be parsed i.e it could be 835, or 837
--folder location of the files (they must be decompressed)
--store data store could be disk, mongodb, couchdb
--db|path name of the folder to store the output or the database name
Embedded in Code :
import edi.parser
import json
file = '/data/claim_1.x12'
conf = json.loads(open('config/837.json').read())
edi.parser.get_content(filename,conf)
"""
2020-08-21 14:28:24 +00:00
from healthcareio.params import SYS_ARGS
2020-08-21 04:49:13 +00:00
from transport import factory
import requests
2020-09-26 20:32:06 +00:00
from healthcareio import analytics
from healthcareio import server
2020-08-21 14:28:24 +00:00
from healthcareio.parser import get_content
2020-08-21 04:49:13 +00:00
import os
import json
import sys
2020-09-26 20:32:06 +00:00
import numpy as np
from multiprocessing import Process
import time
2020-10-06 16:47:20 +00:00
from healthcareio import x12
from healthcareio.export import export
2020-12-11 12:45:10 +00:00
import smart
2020-12-11 19:08:54 +00:00
from healthcareio.server import proxy
2020-12-11 12:45:10 +00:00
import pandas as pd
2020-09-26 20:32:06 +00:00
2020-08-21 04:49:13 +00:00
PATH = os.sep.join([os.environ['HOME'],'.healthcareio'])
OUTPUT_FOLDER = os.sep.join([os.environ['HOME'],'healthcare-io'])
INFO = None
URL = "https://healthcareio.the-phi.com"
if not os.path.exists(PATH) :
os.mkdir(PATH)
import platform
import sqlite3 as lite
# PATH = os.sep.join([os.environ['HOME'],'.edi-parser'])
HELP_MESSAGE = """
cli:
healthcare-io.py --<[signup|init]> <email> --store <sqlite|mongo> [--batch <value>]
healthcare-io.py --parse --folder <path> [--batch <value>] [--resume]
healthcare-io.py --check-update
healthcare-io.py --export <835|837> --config <config-path>
action :
--signup|init signup user and get configuration file
--parse starts parsing
--check checks for updates
--export export data of a 835 or 837 into another database
parameters :
--<[signup|init]> signup or get a configuration file from a parsing server
--folder location of the files (the program will recursively traverse it)
--store data store mongo or sqlite or mongodb
--resume will attempt to resume if there was an interruption
"""
def signup (**args) :
2020-08-21 04:49:13 +00:00
"""
:email user's email address
:url url of the provider to signup
2020-08-21 04:49:13 +00:00
"""
email = args['email']
url = args['url'] if 'url' in args else URL
folders = [PATH,OUTPUT_FOLDER]
for path in folders :
if not os.path.exists(path) :
os.mkdir(path)
#
#
2020-09-26 20:32:06 +00:00
store = args['store'] if 'store' in args else 'sqlite'
headers = {"email":email,"client":platform.node(),"store":store,"db":args['db']}
2020-08-21 04:49:13 +00:00
http = requests.session()
r = http.post(url,headers=headers)
#
# store = {"type":"disk.DiskWriter","args":{"path":OUTPUT_FOLDER}}
# if 'store' in args :
# store = args['store']
filename = (os.sep.join([PATH,'config.json']))
info = r.json() #{"parser":r.json(),"store":store}
info = dict({"owner":email},**info)
info['store']['args']['path'] =os.sep.join([OUTPUT_FOLDER,'healthcare-io.db3']) #-- sql
info['out-folder'] = OUTPUT_FOLDER
file = open( filename,'w')
file.write( json.dumps(info))
file.close()
#
# Create the sqlite3 database to
def log(**args):
"""
This function will perform a log of anything provided to it
"""
pass
def init():
"""
read all the configuration from the
"""
filename = os.sep.join([PATH,'config.json'])
info = None
if os.path.exists(filename):
file = open(filename)
info = json.loads(file.read())
if not os.path.exists(info['out-folder']) :
os.mkdir(info['out-folder'])
2020-08-21 14:28:24 +00:00
if info['store']['type'] == 'disk.SQLiteWriter' and not os.path.exists(info['store']['args']['path']) :
2020-08-21 04:49:13 +00:00
conn = lite.connect(info['store']['args']['path'],isolation_level=None)
for key in info['schema'] :
_sql = info['schema'][key]['create']
# r = conn.execute("select * from sqlite_master where name in ('claims','remits')")
conn.execute(_sql)
conn.commit()
conn.close()
return info
#
# Global variables that load the configuration files
# def parse(**args):
# """
# This function will parse the content of a claim or remittance (x12 format) give the following parameters
# :filename absolute path of the file to be parsed
# :type claims|remits in x12 format
# """
# global INFO
# if not INFO :
# INFO = init()
# if args['type'] == 'claims' :
# CONFIG = INFO['parser']['837']
# elif args['type'] == 'remits' :
# CONFIG = INFO['parser']['835']
# else:
# CONFIG = None
# if CONFIG :
# # CONFIG = CONFIG[-1] if 'version' not in args and (args['version'] < len(CONFIG)) else CONFIG[0]
# CONFIG = CONFIG[int(args['version'])-1] if 'version' in SYS_ARGS and int(SYS_ARGS['version']) < len(CONFIG) else CONFIG[-1]
# SECTION = CONFIG['SECTION']
# os.environ['HEALTHCAREIO_SALT'] = INFO['owner']
2020-08-21 04:49:13 +00:00
# return get_content(args['filename'],CONFIG,SECTION)
# def resume (files,id,config):
# _args = config['store'].copy()
# if 'mongo' in config['store']['type'] :
# _args['type'] = 'mongo.MongoReader'
# reader = factory.instance(**_args)
# _files = []
# if 'resume' in config['analytics'] :
# _args = config['analytics']['resume'][id]
# _files = reader.read(**_args)
# _files = [item['name'] for item in _files if item['name'] != None]
# return list(set(files) - set(_files))
2020-10-01 16:43:57 +00:00
# return files
# pass
# def apply(files,store_info,logger_info=None):
# """
# :files list of files to be processed in this given thread/process
# :store_info information about data-store, for now disk isn't thread safe
# :logger_info information about where to store the logs
# """
2020-08-21 04:49:13 +00:00
# if not logger_info :
# logger = factory.instance(type='disk.DiskWriter',args={'path':os.sep.join([info['out-folder'],SYS_ARGS['parse']+'.log'])})
# else:
# logger = factory.instance(**logger_info)
2020-09-26 20:32:06 +00:00
# writer = factory.instance(**store_info)
# for filename in files :
2020-09-26 20:32:06 +00:00
# if filename.strip() == '':
# continue
# # content,logs = get_content(filename,CONFIG,CONFIG['SECTION'])
# #
# try:
# content,logs = parse(filename = filename,type=SYS_ARGS['parse'])
2020-10-01 16:43:57 +00:00
# if content :
# writer.write(content)
# if logs :
# [logger.write(dict(_row,**{"parse":SYS_ARGS['parse']})) for _row in logs]
# else:
# logger.write({"parse":SYS_ARGS['parse'],"name":filename,"completed":True,"rows":len(content)})
# except Exception as e:
2020-10-01 16:43:57 +00:00
# logger.write({"parse":SYS_ARGS['parse'],"filename":filename,"completed":False,"rows":-1,"msg":e.args[0]})
# # print ([filename,len(content)])
# #
# # @TODO: forward this data to the writer and log engine
2020-09-26 20:32:06 +00:00
#
2020-08-21 04:49:13 +00:00
def upgrade(**args):
"""
:email provide us with who you are
:key upgrade key provided by the server for a given email
"""
url = args['url'] if 'url' in args else URL+"/upgrade"
headers = {"key":args['key'],"email":args["email"],"url":url}
def check(**_args):
"""
This function will check if there is an update available (versions are in the configuration file)
:param url
"""
url = _args['url'][:-1] if _args['url'].endswith('/') else _args['url']
url = url + "/version"
if 'version' not in _args :
version = {"_id":"version","current":0.0}
else:
version = _args['version']
http = requests.session()
r = http.get(url)
return r.json()
2020-08-21 04:49:13 +00:00
if __name__ == '__main__' :
info = init()
if 'out-folder' in SYS_ARGS :
OUTPUT_FOLDER = SYS_ARGS['out-folder']
SYS_ARGS['url'] = SYS_ARGS['url'] if 'url' in SYS_ARGS else URL
2020-08-21 04:49:13 +00:00
if set(list(SYS_ARGS.keys())) & set(['signup','init']):
#
# This command will essentially get a new copy of the configurations
# @TODO: Tie the request to a version ?
#
email = SYS_ARGS['signup'].strip() if 'signup' in SYS_ARGS else SYS_ARGS['init']
url = SYS_ARGS['url'] if 'url' in SYS_ARGS else URL
2020-09-26 20:32:06 +00:00
store = SYS_ARGS['store'] if 'store' in SYS_ARGS else 'sqlite'
db='healthcareio' if 'db' not in SYS_ARGS else SYS_ARGS['db']
signup(email=email,url=url,store=store,db=db)
2020-08-21 04:49:13 +00:00
# else:
# m = """
# usage:
# healthcareio --signup --email myemail@provider.com [--url <host>]
# """
# print (m)
elif 'upgrade' in SYS_ARGS :
#
# perform an upgrade i.e some code or new parsers information will be provided
#
pass
elif 'parse' in SYS_ARGS and info:
"""
In this section of the code we are expecting the user to provide :
:folder location of the files to process or file to process
:
"""
files = []
if 'file' in SYS_ARGS :
files = [SYS_ARGS['file']] if not os.path.isdir(SYS_ARGS['file']) else []
if 'folder' in SYS_ARGS and os.path.exists(SYS_ARGS['folder']):
for root,_dir,f in os.walk(SYS_ARGS['folder']) :
if f :
files += [os.sep.join([root,name]) for name in f]
# names = os.listdir(SYS_ARGS['folder'])
# files += [os.sep.join([SYS_ARGS['folder'],name]) for name in names if not os.path.isdir(os.sep.join([SYS_ARGS['folder'],name]))]
2020-08-21 04:49:13 +00:00
else:
#
# raise an error
2020-08-21 04:49:13 +00:00
pass
#
2020-10-01 16:43:57 +00:00
# if the user has specified to resume, we should look into the logs and pull the files processed and those that haven't
#
if 'resume' in SYS_ARGS :
2020-12-11 19:08:54 +00:00
store_config = json.loads( (open(os.sep.join([PATH,'config.json']))).read() )
files = proxy.get.resume(files,store_config )
# print (["Found ",len(files)," files unprocessed"])
2020-10-01 16:43:57 +00:00
#
2020-08-21 04:49:13 +00:00
# @TODO: Log this here so we know what is being processed or not
SCOPE = None
if files : #and ('claims' in SYS_ARGS['parse'] or 'remits' in SYS_ARGS['parse']):
2020-09-26 20:32:06 +00:00
BATCH_COUNT = 1 if 'batch' not in SYS_ARGS else int (SYS_ARGS['batch'])
files = np.array_split(files,BATCH_COUNT)
procs = []
index = 0
for row in files :
row = row.tolist()
# logger.write({"process":index,"parse":SYS_ARGS['parse'],"file_count":len(row)})
# proc = Process(target=apply,args=(row,info['store'],_info,))
parser = x12.Parser(os.sep.join([PATH,'config.json']))
parser.set.files(row)
parser.start()
procs.append(parser)
# index = index + 1
2020-09-26 20:32:06 +00:00
while len(procs) > 0 :
procs = [proc for proc in procs if proc.is_alive()]
time.sleep(2)
2020-08-21 04:49:13 +00:00
pass
2020-09-26 20:32:06 +00:00
elif 'analytics' in SYS_ARGS :
PORT = int(SYS_ARGS['port']) if 'port' in SYS_ARGS else 5500
DEBUG= int(SYS_ARGS['debug']) if 'debug' in SYS_ARGS else 0
SYS_ARGS['context'] = SYS_ARGS['context'] if 'context' in SYS_ARGS else ''
#
#
# PATH= SYS_ARGS['config'] if 'config' in SYS_ARGS else os.sep.join([os.environ['HOME'],'.healthcareio','config.json'])
2020-12-11 12:45:10 +00:00
if os.path.exists(os.sep.join([PATH,'config.json'])) :
e = analytics.engine(os.sep.join([PATH,'config.json'])) #--@TODO: make the configuration file globally accessible
e.apply(type='claims',serialize=True)
SYS_ARGS['engine'] = e
SYS_ARGS['config'] = json.loads(open(os.sep.join([PATH,'config.json'])).read())
else:
SYS_ARGS['config'] = {"owner":None,"store":None}
if 'args' not in SYS_ARGS['config'] :
SYS_ARGS['config']["args"] = {"batch":1,"resume":True,"folder":"/data"}
2020-09-26 20:32:06 +00:00
2020-12-11 12:45:10 +00:00
me = pd.DataFrame(smart.top.read(name='healthcare-io.py')).args.unique().tolist()
SYS_ARGS['me'] = me[0] #-- This key will identify the current process
2020-09-26 20:32:06 +00:00
pointer = lambda : server.app.run(host='0.0.0.0',port=PORT,debug=DEBUG,threaded=False)
pthread = Process(target=pointer,args=())
pthread.start()
elif 'check-update' in SYS_ARGS :
_args = {"url":SYS_ARGS['url']}
try:
if os.path.exists(os.sep.join([PATH,'config.json'])) :
SYS_ARGS['config'] = json.loads((open(os.sep.join([PATH,'config.json']))).read())
else:
SYS_ARGS['config'] = {}
if 'version' in SYS_ARGS['config'] :
_args['version'] = SYS_ARGS['config']['version']
version = check(**_args)
_version = {"current":0.0}if 'version' not in SYS_ARGS['config'] else SYS_ARGS['config']['version']
if _version['current'] != version['current'] :
print ()
print ("You need to upgrade your system to version to ",version['current'])
print ("\t- signup (for new configuration)")
print ("\t- use pip to upgrade the codebase")
else:
print ()
2020-12-22 07:22:08 +00:00
print ("You are running the current configuraiton version ",_version['current'])
except Exception as e:
print (e)
pass
2020-09-26 20:32:06 +00:00
2020-08-21 04:49:13 +00:00
elif 'export' in SYS_ARGS:
#
# this function is designed to export the data to csv
#
path = SYS_ARGS['config']
TYPE = SYS_ARGS['export'] if 'export' in SYS_ARGS else '835'
if not os.path.exists(path) or TYPE not in ['835','837']:
print (HELP_MESSAGE)
else:
#
# Let's run the export function ..., This will push files into a data-store of choice Redshift, PostgreSQL, MySQL ...
#
_store = {"type":"sql.SQLWriter","args":json.loads( (open(path) ).read())}
2020-08-21 04:49:13 +00:00
pipes = export.Factory.instance(type=TYPE,write_store=_store) #"inspect":0,"cast":0}})
# pipes[0].run()
for thread in pipes:
thread.start()
time.sleep(1)
while pipes :
pipes = [thread for thread in pipes if thread.is_alive()]
time.sleep(1)
2020-08-21 04:49:13 +00:00
else:
print(HELP_MESSAGE)