parser/healthcareio/x12/parser.py

175 lines
6.8 KiB
Python
Raw Normal View History

2023-05-18 14:22:48 +00:00
"""
This class refactors the default parsing class (better & streamlined implementation)
2023-11-21 18:58:06 +00:00
The class will rely on the new plug/play architectural style perform parsing
2023-05-18 14:22:48 +00:00
"""
from multiprocessing import Process, RLock
import os
import json
2023-11-21 18:58:06 +00:00
# from healthcareio.x12.util
from healthcareio import x12
2024-02-06 03:56:46 +00:00
from healthcareio.x12.util import file, document
2023-11-21 18:58:06 +00:00
import numpy as np
import transport
import copy
# from healthcareio.x12.util import file as File, document as Document
2023-12-07 19:00:16 +00:00
from datetime import datetime
from healthcareio.logger import X12Logger
2023-11-21 18:58:06 +00:00
import time
2024-02-06 03:56:46 +00:00
import pandas as pd
from transport import providers
2024-02-06 03:56:46 +00:00
2023-11-21 18:58:06 +00:00
class BasicParser (Process) :
def __init__(self,**_args):
super().__init__()
self._plugins = _args['plugins']
self._parents = _args['parents']
2023-12-01 03:38:06 +00:00
self._files = _args['files']
2024-02-06 03:56:46 +00:00
self._store = dict(_args['store'],**{'lock':True})
2023-12-07 19:00:16 +00:00
self._template = x12.util.template(plugins=self._plugins)
self._logger = X12Logger(store = self._store)
if self._logger :
_info = { key:len(self._plugins[key].keys())for key in self._plugins}
2024-02-06 03:56:46 +00:00
_data = {'plugins':_info,'files': len(self._files),'model': self._template}
2023-12-07 19:00:16 +00:00
self._logger.log(module='BasicParser',action='init',data=_data)
def log (self,**_args):
"""
This function logs data into a specified location in JSON format
datetime,module,action,data
"""
2024-02-06 03:56:46 +00:00
if self._logger :
self._logger.log(**_args)
2023-12-07 19:00:16 +00:00
pass
2023-11-21 18:58:06 +00:00
def apply(self,**_args):
2023-12-07 19:00:16 +00:00
"""
:content raw claim i.e CLP/CLM Loops and related content
:x12 file type 837|835
:document document template with attributes pre-populated
"""
2023-12-01 03:38:06 +00:00
_content = _args['content']
_filetype = _args['x12']
_doc = _args['document'] #{}
2024-02-06 03:56:46 +00:00
_documentHandler = x12.util.document.Builder(plugins = self._plugins,parents=self._parents, logger=self._logger)
2023-11-21 18:58:06 +00:00
try:
2024-02-06 03:56:46 +00:00
_tmp = {}
2023-11-21 18:58:06 +00:00
for _row in _content :
# _data = None
_data,_meta = _documentHandler.bind(row=_row,x12=_filetype)
if _data and _meta :
2024-02-06 03:56:46 +00:00
2023-11-21 18:58:06 +00:00
_doc = _documentHandler.build(data=_data,document=_doc,meta=_meta,row=_row)
# print (['*** ',_doc])
pass
except Exception as e:
2023-05-18 14:22:48 +00:00
#
2023-11-21 18:58:06 +00:00
# Log something here ....
2024-02-06 03:56:46 +00:00
# print (_row)
2023-11-21 18:58:06 +00:00
print (e)
# print (_row,_doc.keys())
pass
return _doc
def run(self):
2024-02-06 03:56:46 +00:00
_handleContent = file.Content() #x12.util.file.Content()
_handleDocument = document.Builder(plugins = self._plugins,parents=self._parents,logger=self._logger)
2023-12-07 19:00:16 +00:00
_template = self._template #x12.util.template(plugins=self._plugins)
2024-02-06 03:56:46 +00:00
2023-12-07 19:00:16 +00:00
#
# @TODO: starting initializing parsing jobs :
# - number of files, plugins meta data
_log = {}
2023-11-21 18:58:06 +00:00
for _absolute_path in self._files :
try:
_content = _handleContent.read(filename=_absolute_path)
_content,_filetype = _handleContent.split(_content)
2023-12-07 19:00:16 +00:00
#
# LOG: filename with claims found in it
2023-11-21 18:58:06 +00:00
#
# The first row is the header (it will be common to all claims)
_header = copy.deepcopy(_template[_filetype])
_header = self.apply(content=_content[0],x12=_filetype, document=_header)
_docs = []
2024-02-06 03:56:46 +00:00
_ids = []
2023-11-21 18:58:06 +00:00
for _rawclaim in _content[1:] :
_document = copy.deepcopy(_header) #copy.deepcopy(_template[_filetype])
2024-02-06 03:56:46 +00:00
if 'claim_id' in _document :
#
# @TODO: Have a way to get the attribute for CLP or CLM
_ids.append(_document['claim_id'])
2023-11-21 18:58:06 +00:00
# _document = dict(_document,**_header)
if type(_absolute_path) == str:
_document['filename'] = _absolute_path
_doc = self.apply(content=_rawclaim,x12=_filetype, document=_document)
2023-12-07 19:00:16 +00:00
if _doc :
2023-11-21 18:58:06 +00:00
_docs.append(_doc)
else:
# print (['wtf ...',_rawclaim])
pass
#
2023-12-07 19:00:16 +00:00
# LOG: information abou the file that has just been processed.
_location = _absolute_path if type(_absolute_path) == str else 'In-Memory'
_data = {'filename':_location, 'available':len(_content[1:]),'x12':_filetype}
_args = {'module':'parse','action':'parse','data':_data}
_data['parsed'] = len(_docs)
2024-02-06 03:56:46 +00:00
self.log(**_args)
self.log(module='parse',action='file-count', data={'file_name':_absolute_path,'file_type':_filetype,'claims':_ids, 'claim_count':len(_ids)})
2023-12-07 19:00:16 +00:00
#
2023-11-21 18:58:06 +00:00
# Let us submit the batch we have thus far
#
2023-12-07 19:00:16 +00:00
self.post(documents=_docs,x12=_filetype,filename=_location)
2023-11-21 18:58:06 +00:00
2023-05-18 14:22:48 +00:00
2023-11-21 18:58:06 +00:00
except Exception as e:
2023-12-07 19:00:16 +00:00
#
# LOG: We have filename and segment of the claim within filename
#
2023-11-21 18:58:06 +00:00
print (e)
def post(self,**_args):
pass
class X12Parser(BasicParser):
def __init__(self,**_args):
super().__init__(**_args)
self._store = _args['store']
def post(self,**_args):
"""
Writing the files to a persistent storage in JSON format (hopefully)
"""
_documents = _args['documents']
if _documents :
2024-02-06 03:56:46 +00:00
_store = copy.deepcopy(self._store)
2023-12-07 19:00:16 +00:00
TABLE = 'claims' if _args['x12'] in ['837','claims'] else 'remits'
2023-11-21 18:58:06 +00:00
_store['table'] = TABLE
2024-02-06 03:56:46 +00:00
_store['cotnext'] = 'write'
_writer = transport.factory.instance(**_store)
# if _store['provider'] not in [providers.MONGODB, providers.COUCHDB] :
for _document in _documents :
for field in _document :
if type(_document[field]) in [dict,list] :
_document[field] = json.dumps(_document[field],default=str)
2024-02-06 03:56:46 +00:00
_writer.write(_documents,table=TABLE)
2023-11-21 18:58:06 +00:00
if getattr(_writer,'close') :
_writer.close()
2023-12-07 19:00:16 +00:00
#
# LOG: report what was written
_data = {'x12':_args['x12'], 'documents':len(_documents),'filename':_args['filename']}
2024-02-06 03:56:46 +00:00
# self._logger.log(module='write',action='write',data=_data)
self.log(module='parse',action='write',data=_data)