2020-10-06 16:05:35 +00:00
|
|
|
"""
|
|
|
|
(c) 2019 Healthcare/IO 1.0
|
|
|
|
Vanderbilt University Medical Center, Health Information Privacy Laboratory
|
|
|
|
https://hiplab.mc.vanderbilt.edu/healthcareio
|
|
|
|
|
|
|
|
|
|
|
|
Authors:
|
|
|
|
Khanhly Nguyen,
|
|
|
|
Steve L. Nyemba<steve.l.nyemba@vanderbilt.edu>
|
|
|
|
|
|
|
|
License:
|
|
|
|
MIT, terms are available at https://opensource.org/licenses/MIT
|
|
|
|
|
|
|
|
This parser was originally written by Khanhly Nguyen for her internship and is intended to parse x12 835,837 and others provided the appropriate configuration
|
|
|
|
USAGE :
|
|
|
|
- COMMAND LINE
|
|
|
|
|
|
|
|
- EMBEDDED
|
|
|
|
"""
|
|
|
|
import hashlib
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
from itertools import islice
|
|
|
|
from multiprocessing import Process
|
|
|
|
import transport
|
2020-12-22 05:28:22 +00:00
|
|
|
import jsonmerge
|
2021-02-16 22:06:13 +00:00
|
|
|
|
|
|
|
import copy
|
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
class void :
|
|
|
|
pass
|
|
|
|
class Formatters :
|
|
|
|
def __init__(self):
|
|
|
|
# self.config = config
|
|
|
|
self.get = void()
|
|
|
|
self.get.config = self.get_config
|
|
|
|
|
|
|
|
self.parse = void()
|
|
|
|
self.parse.sv3 = self.sv3
|
|
|
|
self.parse.sv2 = self.sv2
|
2021-02-08 22:24:15 +00:00
|
|
|
self.sv2_parser = self.sv2
|
|
|
|
self.sv3_parser = self.sv3
|
2020-10-06 16:05:35 +00:00
|
|
|
self.sv3_parse = self.sv3
|
2020-10-06 16:51:03 +00:00
|
|
|
self.format_proc = self.procedure
|
|
|
|
self.format_diag = self.diagnosis
|
2020-10-06 16:05:35 +00:00
|
|
|
self.parse.procedure = self.procedure
|
|
|
|
self.parse.diagnosis = self.diagnosis
|
|
|
|
self.parse.date = self.date
|
|
|
|
self.format_date = self.date
|
|
|
|
self.format_pos = self.pos
|
|
|
|
self.format_time = self.time
|
|
|
|
def split(self,row,sep='*',prefix='HI') :
|
|
|
|
"""
|
|
|
|
This function is designed to split an x12 row and
|
|
|
|
"""
|
2020-12-11 12:45:10 +00:00
|
|
|
value = []
|
2020-10-06 16:05:35 +00:00
|
|
|
if row.startswith(prefix) is False:
|
2020-12-11 12:45:10 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
|
|
|
|
for row_value in row.replace('~','').split(sep) :
|
|
|
|
|
2021-01-18 20:49:56 +00:00
|
|
|
if '>' in row_value and not row_value.startswith('HC'):
|
|
|
|
# if row_value.startswith('HC') or row_value.startswith('AD'):
|
|
|
|
if row_value.startswith('AD'):
|
2020-10-06 16:05:35 +00:00
|
|
|
|
|
|
|
value += row_value.split('>')[:2]
|
2021-01-18 20:49:56 +00:00
|
|
|
pass
|
2020-10-06 16:05:35 +00:00
|
|
|
else:
|
|
|
|
|
2021-01-19 06:21:33 +00:00
|
|
|
|
|
|
|
value += [row_value]
|
|
|
|
# value += row_value.split('>') if row.startswith('CLM') is False else [row_value]
|
2020-10-06 16:05:35 +00:00
|
|
|
|
|
|
|
else :
|
|
|
|
|
|
|
|
value.append(row_value.replace('\n',''))
|
2020-12-11 12:45:10 +00:00
|
|
|
value = [xchar.replace('\r','') for xchar in value] #row.replace('~','').split(sep)
|
2020-10-06 16:05:35 +00:00
|
|
|
else:
|
|
|
|
|
2020-12-11 12:45:10 +00:00
|
|
|
value = [ [prefix]+ self.split(item,'>') for item in row.replace('~','').split(sep)[1:] ]
|
2020-12-22 05:28:22 +00:00
|
|
|
|
2020-12-11 12:45:10 +00:00
|
|
|
return value if type(value) == list and type(value[0]) != list else value[0]
|
2020-10-06 16:05:35 +00:00
|
|
|
def get_config(self,config,row):
|
|
|
|
"""
|
|
|
|
This function will return the meaningfull parts of the configuration for a given item
|
|
|
|
"""
|
|
|
|
|
|
|
|
_row = list(row) if type(row[0]) == str else list(row[0])
|
|
|
|
|
|
|
|
_info = config[_row[0]] if _row[0] in config else {}
|
2021-02-16 22:06:13 +00:00
|
|
|
_rinfo = {}
|
2020-10-06 16:05:35 +00:00
|
|
|
key = None
|
|
|
|
if '@ref' in _info:
|
2021-02-16 22:06:13 +00:00
|
|
|
keys = list(set(_row) & set(_info['@ref'].keys()))
|
|
|
|
if keys :
|
|
|
|
_rinfo = {}
|
|
|
|
for key in keys :
|
|
|
|
_rinfo = jsonmerge.merge(_rinfo,_info['@ref'][key])
|
|
|
|
return _rinfo
|
|
|
|
# key = key[0]
|
|
|
|
# return _info['@ref'][key]
|
2020-10-06 16:05:35 +00:00
|
|
|
else:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
if not _info and 'SIMILAR' in config:
|
|
|
|
#
|
|
|
|
# Let's look for the nearest key using the edit distance
|
|
|
|
if _row[0] in config['SIMILAR'] :
|
|
|
|
key = config['SIMILAR'][_row[0]]
|
|
|
|
_info = config[key]
|
|
|
|
return _info
|
|
|
|
|
|
|
|
def hash(self,value):
|
|
|
|
salt = os.environ['HEALTHCAREIO_SALT'] if 'HEALTHCAREIO_SALT' in os.environ else ''
|
|
|
|
_value = str(value)+ salt
|
|
|
|
if sys.version_info[0] > 2 :
|
|
|
|
return hashlib.md5(_value.encode('utf-8')).hexdigest()
|
|
|
|
else:
|
|
|
|
return hashlib.md5(_value).hexdigest()
|
|
|
|
|
|
|
|
def suppress (self,value):
|
|
|
|
return 'N/A'
|
|
|
|
def date(self,value):
|
2021-05-07 14:56:40 +00:00
|
|
|
value = value if type(value) != list else "-".join(value)
|
2020-10-06 16:05:35 +00:00
|
|
|
if len(value) > 8 or '-' in value:
|
2021-05-07 14:56:40 +00:00
|
|
|
#
|
|
|
|
# This is the case of a thru date i.e the first part should be provided in a 435 entry
|
|
|
|
#
|
|
|
|
fdate = "-".join([value[:8][:4],value[:8][4:6],value[:8][6:8]])
|
|
|
|
tdate = "-".join([value[9:][:4],value[9:][4:6],value[9:][6:8]])
|
|
|
|
|
|
|
|
return {"from":fdate,"to":tdate}
|
2020-10-06 16:05:35 +00:00
|
|
|
|
|
|
|
if len(value) == 8 :
|
|
|
|
year = value[:4]
|
|
|
|
month = value[4:6]
|
|
|
|
day = value[6:]
|
|
|
|
return "-".join([year,month,day])[:10] #{"year":year,"month":month,"day":day}
|
|
|
|
elif len(value) == 6 :
|
|
|
|
year = '20' + value[:2]
|
|
|
|
month = value[2:4]
|
|
|
|
day = value[4:]
|
2021-05-03 04:50:41 +00:00
|
|
|
elif value.isnumeric() and len(value) >= 10:
|
|
|
|
#
|
|
|
|
# Here I a will assume we have a numeric vale
|
|
|
|
year = value[:4]
|
|
|
|
month= value[4:6]
|
|
|
|
day = value[6:8]
|
|
|
|
else:
|
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
#
|
|
|
|
# We have a date formatting issue
|
2021-05-03 18:42:38 +00:00
|
|
|
return value
|
|
|
|
return "-".join([year,month,day])
|
2020-10-06 16:05:35 +00:00
|
|
|
def time(self,value):
|
|
|
|
pass
|
|
|
|
def sv3(self,value):
|
|
|
|
if '>' in value [1]:
|
|
|
|
terms = value[1].split('>')
|
|
|
|
return {'type':terms[0],'code':terms[1],"amount":float(value[2])}
|
|
|
|
else:
|
2020-12-11 12:45:10 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
return {"code":value[2],"type":value[1],"amount":float(value[3])}
|
|
|
|
def sv2(self,value):
|
|
|
|
#
|
|
|
|
# @TODO: Sometimes there's a suffix (need to inventory all the variations)
|
|
|
|
#
|
|
|
|
if '>' in value or ':' in value:
|
|
|
|
xchar = '>' if '>' in value else ':'
|
|
|
|
_values = value.split(xchar)
|
|
|
|
modifier = {}
|
|
|
|
|
|
|
|
if len(_values) > 2 :
|
|
|
|
|
|
|
|
modifier= {"code":_values[2]}
|
|
|
|
if len(_values) > 3 :
|
|
|
|
modifier['type'] = _values[3]
|
|
|
|
_value = {"code":_values[1],"type":_values[0]}
|
|
|
|
if modifier :
|
|
|
|
_value['modifier'] = modifier
|
|
|
|
|
|
|
|
return _value
|
|
|
|
else:
|
|
|
|
return value
|
2020-10-06 19:12:43 +00:00
|
|
|
|
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
def procedure(self,value):
|
2021-01-18 20:49:56 +00:00
|
|
|
|
|
|
|
for xchar in [':','<','|','>'] :
|
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
if xchar in value and len(value.split(xchar)) > 1 :
|
|
|
|
#_value = {"type":value.split(':')[0].strip(),"code":value.split(':')[1].strip()}
|
|
|
|
_value = {"type":value.split(xchar)[0].strip(),"code":value.split(xchar)[1].strip()}
|
2021-01-18 20:49:56 +00:00
|
|
|
|
|
|
|
if len(value.split(xchar)) >2 :
|
2021-01-19 06:21:33 +00:00
|
|
|
index = 1;
|
|
|
|
for modifier in value.split(xchar)[2:] :
|
|
|
|
_value['modifier_'+str(index)] = modifier
|
|
|
|
index += 1
|
2020-10-06 16:05:35 +00:00
|
|
|
break
|
2021-01-19 06:21:33 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
else:
|
|
|
|
_value = str(value)
|
|
|
|
return _value
|
2021-01-18 20:49:56 +00:00
|
|
|
def diagnosis(self,value):
|
2020-10-06 16:05:35 +00:00
|
|
|
|
|
|
|
return [ {"code":item[2], "type":item[1]} for item in value if len(item) > 1]
|
|
|
|
def pos(self,value):
|
|
|
|
"""
|
|
|
|
formatting place of service information within a segment (REF)
|
2021-01-12 17:40:40 +00:00
|
|
|
@TODO: In order to accomodate the other elements they need to be specified in the configuration
|
|
|
|
Otherwise it causes problems on export
|
2020-10-06 16:05:35 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
xchar = '>' if '>' in value else ':'
|
|
|
|
x = value.split(xchar)
|
2021-05-03 16:15:23 +00:00
|
|
|
x = {"place_of_service":x[0],"indicator":x[1],"frequency":x[2]} if len(x) == 3 else {"place_of_service":x[0],"indicator":None,"frequency":None}
|
|
|
|
return x
|
2020-10-06 16:05:35 +00:00
|
|
|
class Parser (Process):
|
|
|
|
def __init__(self,path):
|
2020-12-22 05:28:22 +00:00
|
|
|
"""
|
|
|
|
:path path of the configuration file (it can be absolute)
|
|
|
|
"""
|
2020-10-06 16:05:35 +00:00
|
|
|
Process.__init__(self)
|
|
|
|
self.utils = Formatters()
|
|
|
|
self.get = void()
|
|
|
|
self.get.value = self.get_map
|
|
|
|
self.get.default_value = self.get_default_value
|
|
|
|
_config = json.loads(open(path).read())
|
2020-12-22 05:28:22 +00:00
|
|
|
self._custom_config = self.get_custom(path)
|
2020-10-06 16:05:35 +00:00
|
|
|
self.config = _config['parser']
|
|
|
|
self.store = _config['store']
|
2021-03-09 15:57:01 +00:00
|
|
|
self.cache = {}
|
|
|
|
self.files = []
|
|
|
|
self.set = void()
|
2020-10-06 16:05:35 +00:00
|
|
|
self.set.files = self.set_files
|
2020-12-11 12:45:10 +00:00
|
|
|
self.emit = void()
|
|
|
|
self.emit.pre = None
|
|
|
|
self.emit.post = None
|
2020-12-22 05:28:22 +00:00
|
|
|
def get_custom(self,path) :
|
|
|
|
"""
|
|
|
|
:path path of the configuration file (it can be absolute)
|
|
|
|
"""
|
|
|
|
#
|
|
|
|
#
|
|
|
|
_path = path.replace('config.json','')
|
|
|
|
if _path.endswith(os.sep) :
|
|
|
|
_path = _path[:-1]
|
|
|
|
|
|
|
|
_config = {}
|
|
|
|
_path = os.sep.join([_path,'custom'])
|
|
|
|
if os.path.exists(_path) :
|
|
|
|
|
|
|
|
files = os.listdir(_path)
|
|
|
|
if files :
|
2021-01-18 20:49:56 +00:00
|
|
|
fullname = os.sep.join([_path,files[0]])
|
2020-12-22 05:28:22 +00:00
|
|
|
_config = json.loads ( (open(fullname)).read() )
|
|
|
|
return _config
|
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
def set_files(self,files):
|
|
|
|
self.files = files
|
|
|
|
def get_map(self,row,config,version=None):
|
|
|
|
|
|
|
|
# label = config['label'] if 'label' in config else None
|
|
|
|
handler = Formatters()
|
2021-02-08 22:24:15 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
if 'map' not in config and hasattr(handler,config['apply']):
|
|
|
|
|
|
|
|
pointer = getattr(handler,config['apply'])
|
2021-02-08 22:24:15 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
object_value = pointer(row)
|
|
|
|
return object_value
|
2021-01-19 06:21:33 +00:00
|
|
|
#
|
|
|
|
# Pull the goto configuration that skips rows
|
|
|
|
#
|
2020-10-06 16:05:35 +00:00
|
|
|
|
|
|
|
omap = config['map'] if not version or version not in config else config[version]
|
|
|
|
anchors = config['anchors'] if 'anchors' in config else []
|
2021-01-19 06:21:33 +00:00
|
|
|
rewrite = config['rewrite'] if 'rewrite' in config else {}
|
2020-10-06 16:05:35 +00:00
|
|
|
if type(row[0]) == str:
|
|
|
|
object_value = {}
|
|
|
|
for key in omap :
|
|
|
|
|
|
|
|
index = omap[key]
|
|
|
|
if anchors and set(anchors) & set(row):
|
|
|
|
_key = list(set(anchors) & set(row))[0]
|
|
|
|
|
|
|
|
aindex = row.index(_key)
|
|
|
|
index = aindex + index
|
|
|
|
|
|
|
|
if index < len(row) :
|
|
|
|
value = row[index]
|
|
|
|
|
|
|
|
if 'cast' in config and key in config['cast'] and value.strip() != '' :
|
2021-05-03 20:06:45 +00:00
|
|
|
if config['cast'][key] in ['float','int']:
|
|
|
|
try:
|
|
|
|
value = eval(config['cast'][key])(value)
|
|
|
|
except Exception as e:
|
|
|
|
pass
|
|
|
|
#
|
|
|
|
# Sometimes shit hits the fan when the anchor is missing
|
|
|
|
# This is typical but using the hardened function helps circumvent this (SV2,SV3)
|
|
|
|
#
|
2020-10-06 16:05:35 +00:00
|
|
|
elif hasattr(handler,config['cast'][key]):
|
2021-01-18 20:49:56 +00:00
|
|
|
|
|
|
|
pointer = getattr(handler,config['cast'][key])
|
2020-10-06 16:05:35 +00:00
|
|
|
value = pointer(value)
|
2021-01-19 06:21:33 +00:00
|
|
|
|
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
else:
|
2021-01-19 06:21:33 +00:00
|
|
|
print ("Missing Pointer ",key,config['cast'])
|
2020-10-06 16:05:35 +00:00
|
|
|
|
2021-01-19 06:21:33 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
|
|
|
|
if type(value) == dict :
|
|
|
|
for objkey in value :
|
|
|
|
|
|
|
|
if type(value[objkey]) == dict :
|
|
|
|
continue
|
|
|
|
if 'syn' in config and value[objkey] in config['syn'] :
|
2021-02-13 00:28:26 +00:00
|
|
|
# value[objkey] = config['syn'][ value[objkey]]
|
|
|
|
pass
|
2021-01-19 06:21:33 +00:00
|
|
|
|
|
|
|
if key in rewrite :
|
|
|
|
|
|
|
|
_key = rewrite[key]
|
|
|
|
if _key in value :
|
|
|
|
value = value[_key]
|
|
|
|
else:
|
|
|
|
value = ""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
value = {key:value} if key not in value else value
|
|
|
|
|
|
|
|
|
2020-12-22 05:28:22 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
else:
|
2021-01-19 06:21:33 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
if 'syn' in config and value in config['syn'] :
|
2021-02-13 00:28:26 +00:00
|
|
|
# value = config['syn'][value]
|
|
|
|
pass
|
2021-01-19 06:21:33 +00:00
|
|
|
|
|
|
|
if type(value) == dict :
|
2020-10-06 16:05:35 +00:00
|
|
|
|
2021-02-16 22:06:13 +00:00
|
|
|
# object_value = dict(object_value, **value)
|
|
|
|
object_value = jsonmerge.merge(object_value, value)
|
2020-12-22 05:28:22 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
else:
|
|
|
|
|
|
|
|
object_value[key] = value
|
2020-12-22 05:28:22 +00:00
|
|
|
|
2021-01-19 06:21:33 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
else:
|
|
|
|
#
|
|
|
|
# we are dealing with a complex object
|
|
|
|
object_value = []
|
|
|
|
|
|
|
|
for row_item in row :
|
|
|
|
|
|
|
|
value = self.get.value(row_item,config,version)
|
|
|
|
object_value.append(value)
|
2021-01-19 06:21:33 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
#
|
|
|
|
# We need to add the index of the object it matters in determining the claim types
|
|
|
|
#
|
|
|
|
|
|
|
|
# object_value.append( list(get_map(row_item,config,version)))
|
|
|
|
# object_value = {label:object_value}
|
2020-10-06 19:12:43 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
return object_value
|
2021-03-09 15:57:01 +00:00
|
|
|
def set_cache(self,tmp,_info) :
|
|
|
|
"""
|
|
|
|
insert into cache a value that the, these are in reference to a loop
|
|
|
|
"""
|
|
|
|
if 'cache' in _info :
|
|
|
|
key = _info['cache']['key']
|
|
|
|
value=_info['cache']['value']
|
|
|
|
field = _info['cache']['field']
|
|
|
|
if value in tmp :
|
|
|
|
self.cache [key] = {field:tmp[value]}
|
|
|
|
pass
|
|
|
|
def get_cache(self,row) :
|
|
|
|
"""
|
|
|
|
retrieve cache element for a current
|
|
|
|
"""
|
|
|
|
key = row[0]
|
|
|
|
return self.cache[key] if key in self.cache else {}
|
2021-03-09 16:36:11 +00:00
|
|
|
|
2020-10-06 19:12:43 +00:00
|
|
|
def apply(self,content,_code) :
|
2020-10-06 16:05:35 +00:00
|
|
|
"""
|
2020-12-22 05:28:22 +00:00
|
|
|
:content content of a file i.e a segment with the envelope
|
|
|
|
:_code 837 or 835 (helps get the appropriate configuration)
|
2020-10-06 16:05:35 +00:00
|
|
|
"""
|
|
|
|
util = Formatters()
|
2020-10-06 19:12:43 +00:00
|
|
|
# header = default_value.copy()
|
2020-10-06 16:05:35 +00:00
|
|
|
value = {}
|
2020-12-22 05:28:22 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
for row in content[:] :
|
|
|
|
|
2020-12-22 05:28:22 +00:00
|
|
|
|
2020-10-20 17:31:09 +00:00
|
|
|
row = util.split(row.replace('\n','').replace('~',''))
|
2021-01-19 06:21:33 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
_info = util.get.config(self.config[_code][0],row)
|
2020-12-22 05:28:22 +00:00
|
|
|
if self._custom_config and _code in self._custom_config:
|
|
|
|
_cinfo = util.get.config(self._custom_config[_code],row)
|
|
|
|
else:
|
|
|
|
_cinfo = {}
|
2021-04-28 19:40:22 +00:00
|
|
|
|
2021-02-08 22:24:15 +00:00
|
|
|
if _info or _cinfo:
|
2020-12-22 05:28:22 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
try:
|
2021-01-19 06:21:33 +00:00
|
|
|
|
|
|
|
|
|
|
|
_info = jsonmerge.merge(_info,_cinfo)
|
2020-10-06 16:05:35 +00:00
|
|
|
tmp = self.get.value(row,_info)
|
2021-04-28 19:40:22 +00:00
|
|
|
|
|
|
|
|
2021-02-08 22:24:15 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
if not tmp :
|
|
|
|
continue
|
2021-03-09 15:57:01 +00:00
|
|
|
#
|
|
|
|
# At this point we have the configuration and the row parsed into values
|
|
|
|
# We should check to see if we don't have anything in the cache to be added to it
|
|
|
|
#
|
|
|
|
if row[0] in self.cache :
|
|
|
|
tmp = jsonmerge.merge(tmp,self.get_cache(row))
|
2021-02-16 22:06:13 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
if 'label' in _info :
|
|
|
|
label = _info['label']
|
2020-10-06 19:12:43 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
if type(tmp) == list :
|
2021-02-13 00:00:41 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
value[label] = tmp if label not in value else value[label] + tmp
|
|
|
|
|
|
|
|
else:
|
2021-02-13 00:00:41 +00:00
|
|
|
# if 'DTM' in row :
|
|
|
|
# print ([label,tmp,label in value])
|
|
|
|
if label not in value :
|
|
|
|
value[label] = []
|
|
|
|
value[label].append(tmp)
|
|
|
|
# if label not in value:
|
2021-01-19 06:21:33 +00:00
|
|
|
|
2021-02-13 00:00:41 +00:00
|
|
|
# value[label] = [tmp]
|
|
|
|
# else:
|
|
|
|
# value[label].append(tmp)
|
2021-01-19 06:21:33 +00:00
|
|
|
|
2021-03-14 02:38:17 +00:00
|
|
|
if '_index' not in tmp :
|
|
|
|
#
|
|
|
|
# In case we asked it to be overriden, then this will not apply
|
|
|
|
# X12 occasionally requires references to other elements in a loop (alas)
|
|
|
|
#
|
|
|
|
tmp['_index'] = len(value[label]) -1
|
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
elif 'field' in _info :
|
2021-01-19 06:21:33 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
name = _info['field']
|
2020-12-22 05:28:22 +00:00
|
|
|
# value[name] = tmp
|
2021-02-16 22:06:13 +00:00
|
|
|
# value = jsonmerge.merge(value,{name:tmp})
|
|
|
|
value = dict(value,**{name:tmp})
|
2020-12-22 05:28:22 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
else:
|
|
|
|
value = dict(value,**tmp)
|
|
|
|
|
|
|
|
pass
|
|
|
|
except Exception as e :
|
2021-02-08 22:24:15 +00:00
|
|
|
print (e.args[0])
|
|
|
|
# print ('__',(dir(e.args)))
|
2020-10-06 16:05:35 +00:00
|
|
|
pass
|
2021-03-09 15:57:01 +00:00
|
|
|
#
|
|
|
|
# At this point the object is completely built,
|
|
|
|
# if there ar any attributes to be cached it will be done here
|
|
|
|
#
|
2020-10-06 19:12:43 +00:00
|
|
|
|
2021-03-09 15:57:01 +00:00
|
|
|
if 'cache' in _info :
|
|
|
|
|
|
|
|
self.set_cache(tmp,_info)
|
|
|
|
|
2020-10-06 19:12:43 +00:00
|
|
|
return value if value else {}
|
2020-10-06 16:05:35 +00:00
|
|
|
|
|
|
|
def get_default_value(self,content,_code):
|
2020-12-22 05:28:22 +00:00
|
|
|
|
2021-02-16 18:43:27 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
util = Formatters()
|
2021-02-16 16:52:14 +00:00
|
|
|
TOP_ROW = content[1].split('*')
|
2021-02-16 18:43:27 +00:00
|
|
|
|
2021-02-16 16:52:14 +00:00
|
|
|
SUBMITTED_DATE = util.parse.date(TOP_ROW[4])
|
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
CATEGORY= content[2].split('*')[1].strip()
|
2021-02-16 16:52:14 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
VERSION = content[1].split('*')[-1].replace('~','').replace('\n','')
|
2021-02-16 16:52:14 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
SENDER_ID = TOP_ROW[2]
|
|
|
|
row = util.split(content[3])
|
|
|
|
|
|
|
|
_info = util.get_config(self.config[_code][0],row)
|
|
|
|
|
|
|
|
value = self.get.value(row,_info,VERSION) if _info else {}
|
2021-02-16 16:52:14 +00:00
|
|
|
value['category'] = {"setid": _code,"version":'X'+VERSION.split('X')[1],"id":VERSION.split('X')[0].strip()}
|
2020-10-06 16:05:35 +00:00
|
|
|
value["submitted"] = SUBMITTED_DATE
|
2021-02-08 21:19:22 +00:00
|
|
|
value['sender_id'] = SENDER_ID
|
2021-02-16 22:06:13 +00:00
|
|
|
|
|
|
|
value = dict(value,**self.apply(content,_code))
|
2020-12-22 05:28:22 +00:00
|
|
|
# Let's parse this for default values
|
2021-02-16 22:06:13 +00:00
|
|
|
return value #jsonmerge.merge(value,self.apply(content,_code))
|
2020-10-06 16:05:35 +00:00
|
|
|
|
|
|
|
def read(self,filename) :
|
|
|
|
"""
|
|
|
|
:formerly get_content
|
|
|
|
This function returns the of the EDI file parsed given the configuration specified. it is capable of identifying a file given the content
|
|
|
|
:section loop prefix (HL, CLP)
|
|
|
|
:config configuration with formatting rules, labels ...
|
|
|
|
:filename location of the file
|
|
|
|
"""
|
|
|
|
# section = section if section else config['SECTION']
|
|
|
|
logs = []
|
|
|
|
claims = []
|
2021-05-03 20:09:54 +00:00
|
|
|
_code = 'UNKNOWN'
|
2020-10-06 16:05:35 +00:00
|
|
|
try:
|
2021-03-09 16:36:11 +00:00
|
|
|
self.cache = {}
|
2021-02-16 22:06:13 +00:00
|
|
|
file = open(filename.strip())
|
2021-02-16 16:52:14 +00:00
|
|
|
file = file.read().split('CLP')
|
|
|
|
_code = '835'
|
|
|
|
section = 'CLP'
|
2021-02-16 18:43:27 +00:00
|
|
|
|
2021-02-16 16:52:14 +00:00
|
|
|
if len(file) == 1 :
|
2021-02-16 18:43:27 +00:00
|
|
|
|
2021-05-03 16:15:23 +00:00
|
|
|
file = file[0].split('CLM') #.split('HL')
|
2021-02-16 16:52:14 +00:00
|
|
|
_code = '837'
|
2021-05-03 16:15:23 +00:00
|
|
|
section = 'CLM' #'HL'
|
2021-02-16 18:43:27 +00:00
|
|
|
|
2021-02-16 16:52:14 +00:00
|
|
|
INITIAL_ROWS = file[0].split(section)[0].split('\n')
|
2021-02-16 18:43:27 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
if len(INITIAL_ROWS) == 1 :
|
2021-02-16 18:43:27 +00:00
|
|
|
|
|
|
|
INITIAL_ROWS = INITIAL_ROWS[0].split('~')
|
|
|
|
|
|
|
|
# for item in file[1:] :
|
|
|
|
# item = item.replace('~','\n')
|
|
|
|
# print (INITIAL_ROWS)
|
|
|
|
|
2021-02-16 16:52:14 +00:00
|
|
|
DEFAULT_VALUE = self.get.default_value(INITIAL_ROWS,_code)
|
2020-10-06 16:05:35 +00:00
|
|
|
DEFAULT_VALUE['name'] = filename.strip()
|
2021-02-16 18:43:27 +00:00
|
|
|
|
2021-02-16 22:06:13 +00:00
|
|
|
|
2021-02-16 16:52:14 +00:00
|
|
|
file = section.join(file).split('\n')
|
|
|
|
if len(file) == 1:
|
2021-02-16 18:43:27 +00:00
|
|
|
|
2021-02-16 16:52:14 +00:00
|
|
|
file = file[0].split('~')
|
2020-10-06 16:05:35 +00:00
|
|
|
#
|
|
|
|
# In the initial rows, there's redundant information (so much for x12 standard)
|
|
|
|
# index 1 identifies file type i.e CLM for claim and CLP for remittance
|
|
|
|
segment = []
|
|
|
|
index = 0;
|
2020-12-22 05:28:22 +00:00
|
|
|
_toprows = []
|
2021-02-16 02:11:49 +00:00
|
|
|
_default = None
|
2020-10-06 16:05:35 +00:00
|
|
|
for row in file :
|
2021-02-16 04:36:23 +00:00
|
|
|
|
2020-12-22 05:28:22 +00:00
|
|
|
row = row.replace('\r','')
|
2021-02-16 04:36:23 +00:00
|
|
|
# if not segment and not row.startswith(section):
|
|
|
|
# _toprows += [row]
|
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
if row.startswith(section) and not segment:
|
|
|
|
|
|
|
|
segment = [row]
|
2020-12-22 05:28:22 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
continue
|
|
|
|
|
2020-10-06 19:12:43 +00:00
|
|
|
elif segment and not row.startswith(section):
|
2021-02-16 04:36:23 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
segment.append(row)
|
2020-12-22 05:28:22 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
|
|
|
|
if len(segment) > 1 and row.startswith(section):
|
|
|
|
#
|
|
|
|
# process the segment somewhere (create a thread maybe?)
|
|
|
|
#
|
2021-02-08 22:24:15 +00:00
|
|
|
|
2020-10-06 19:12:43 +00:00
|
|
|
_claim = self.apply(segment,_code)
|
2020-12-22 05:28:22 +00:00
|
|
|
|
2020-10-06 19:12:43 +00:00
|
|
|
if _claim :
|
2020-10-20 17:31:09 +00:00
|
|
|
_claim['index'] = index #len(claims)
|
2021-02-16 03:10:18 +00:00
|
|
|
# claims.append(dict(DEFAULT_VALUE,**_claim))
|
|
|
|
#
|
2021-02-16 19:47:17 +00:00
|
|
|
# schema = [ {key:{"mergeStrategy":"append" if list( type(_claim[key])) else "overwrite"}} for key in _claim.keys()] # if type(_claim[key]) == list]
|
|
|
|
# _schema = set(DEFAULT_VALUE.keys()) - schema
|
|
|
|
# if schema :
|
|
|
|
# schema = {"properties":dict.fromkeys(schema,{"mergeStrategy":"append"})}
|
2021-02-16 03:10:18 +00:00
|
|
|
|
2021-02-16 19:47:17 +00:00
|
|
|
# else:
|
|
|
|
|
|
|
|
# schema = {"properties":{}}
|
|
|
|
|
|
|
|
# schema = jsonmerge.merge(schema['properties'],dict.fromkeys(_schema,{"mergeStrategy":"overwrite"}))
|
|
|
|
schema = {"properties":{}}
|
|
|
|
for attr in _claim.keys() :
|
|
|
|
schema['properties'][attr] = {"mergeStrategy": "append" if type(_claim[attr]) == list else "overwrite" }
|
2021-02-16 22:06:13 +00:00
|
|
|
|
|
|
|
|
2021-02-16 18:43:27 +00:00
|
|
|
merger = jsonmerge.Merger(schema)
|
2021-02-16 19:47:17 +00:00
|
|
|
_baseclaim = None
|
2021-02-16 22:06:13 +00:00
|
|
|
_baseclaim = merger.merge(_baseclaim,copy.deepcopy(DEFAULT_VALUE))
|
2021-02-16 19:47:17 +00:00
|
|
|
_claim = merger.merge(_baseclaim,_claim)
|
2021-02-16 18:43:27 +00:00
|
|
|
|
2021-02-16 19:47:17 +00:00
|
|
|
# _claim = merger.merge(DEFAULT_VALUE.copy(),_claim)
|
|
|
|
|
2021-02-16 16:52:14 +00:00
|
|
|
claims.append( _claim)
|
2020-10-06 16:05:35 +00:00
|
|
|
segment = [row]
|
2021-02-16 16:52:14 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
index += 1
|
|
|
|
|
|
|
|
|
|
|
|
pass
|
|
|
|
#
|
|
|
|
# Handling the last claim found
|
2021-02-16 04:36:23 +00:00
|
|
|
|
2021-02-16 16:52:14 +00:00
|
|
|
if segment and segment[0].startswith(section) :
|
2021-02-16 04:36:23 +00:00
|
|
|
# default_claim = dict({"name":index},**DEFAULT_VALUE)
|
2021-04-28 19:40:22 +00:00
|
|
|
|
2020-10-06 19:12:43 +00:00
|
|
|
claim = self.apply(segment,_code)
|
|
|
|
if claim :
|
|
|
|
claim['index'] = len(claims)
|
2021-02-16 19:47:17 +00:00
|
|
|
# schema = [key for key in claim.keys() if type(claim[key]) == list]
|
|
|
|
# if schema :
|
|
|
|
# schema = {"properties":dict.fromkeys(schema,{"mergeStrategy":"append"})}
|
2021-02-13 00:28:26 +00:00
|
|
|
|
2021-02-16 19:47:17 +00:00
|
|
|
# else:
|
|
|
|
# print (claim.keys())
|
|
|
|
# schema = {}
|
2021-02-16 18:45:34 +00:00
|
|
|
#
|
|
|
|
# @TODO: Fix merger related to schema (drops certain fields ... NOT cool)
|
2021-02-16 19:47:17 +00:00
|
|
|
|
|
|
|
# merger = jsonmerge.Merger(schema)
|
2021-02-16 02:56:54 +00:00
|
|
|
# top_row_claim = self.apply(_toprows,_code)
|
|
|
|
|
2021-02-16 04:36:23 +00:00
|
|
|
# claim = merger.merge(claim,self.apply(_toprows,_code))
|
2021-02-16 03:10:18 +00:00
|
|
|
# claims.append(dict(DEFAULT_VALUE,**claim))
|
2021-02-16 19:47:17 +00:00
|
|
|
schema = {"properties":{}}
|
2021-02-17 00:57:59 +00:00
|
|
|
for attr in claim.keys() :
|
|
|
|
schema['properties'][attr] = {"mergeStrategy": "append" if type(claim[attr]) == list else "overwrite" }
|
2021-02-17 01:38:14 +00:00
|
|
|
merger = jsonmerge.Merger(schema)
|
2021-02-16 19:47:17 +00:00
|
|
|
_baseclaim = None
|
2021-02-16 22:06:13 +00:00
|
|
|
_baseclaim = merger.merge(_baseclaim,copy.deepcopy(DEFAULT_VALUE))
|
2021-02-16 19:47:17 +00:00
|
|
|
claim = merger.merge(_baseclaim,claim)
|
|
|
|
claims.append(claim)
|
|
|
|
|
|
|
|
# claims.append(merger.merge(DEFAULT_VALUE.copy(),claim))
|
2020-10-06 16:05:35 +00:00
|
|
|
if type(file) != list :
|
|
|
|
file.close()
|
|
|
|
|
|
|
|
# x12_file = open(filename.strip(),errors='ignore').read().split('\n')
|
|
|
|
except Exception as e:
|
2020-12-22 05:28:22 +00:00
|
|
|
|
2020-10-06 22:08:57 +00:00
|
|
|
logs.append ({"parse":_code,"completed":False,"name":filename,"msg":e.args[0]})
|
|
|
|
return [],logs,None
|
2020-10-06 16:05:35 +00:00
|
|
|
|
|
|
|
rate = 0 if len(claims) == 0 else (1 + index)/len(claims)
|
|
|
|
logs.append ({"parse":"claims" if _code == '837' else 'remits',"completed":True,"name":filename,"rate":rate})
|
|
|
|
# self.finish(claims,logs,_code)
|
|
|
|
return claims,logs,_code
|
|
|
|
def run(self):
|
2020-12-11 12:45:10 +00:00
|
|
|
if self.emit.pre :
|
|
|
|
self.emit.pre()
|
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
for filename in self.files :
|
|
|
|
content,logs,_code = self.read(filename)
|
|
|
|
self.finish(content,logs,_code)
|
|
|
|
def finish(self,content,logs,_code) :
|
|
|
|
args = self.store
|
|
|
|
_args = json.loads(json.dumps(self.store))
|
|
|
|
if args['type'] == 'mongo.MongoWriter' :
|
|
|
|
args['args']['doc'] = 'claims' if _code == '837' else 'remits'
|
|
|
|
_args['args']['doc'] = 'logs'
|
2020-12-11 12:45:10 +00:00
|
|
|
else:
|
|
|
|
args['args']['table'] = 'claims' if _code == '837' else 'remits'
|
|
|
|
_args['args']['table'] = 'logs'
|
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
if content :
|
|
|
|
writer = transport.factory.instance(**args)
|
|
|
|
writer.write(content)
|
|
|
|
writer.close()
|
|
|
|
if logs :
|
2020-12-11 12:45:10 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
logger = transport.factory.instance(**_args)
|
|
|
|
logger.write(logs)
|
2020-12-11 12:45:10 +00:00
|
|
|
|
2020-10-06 16:05:35 +00:00
|
|
|
logger.close()
|
2020-12-11 12:45:10 +00:00
|
|
|
if self.emit.post :
|
|
|
|
self.emit.post(content,logs)
|
2020-10-06 16:05:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
|