bug fix: export import bug
This commit is contained in:
parent
995ea21320
commit
ddddb897d0
|
@ -192,18 +192,23 @@ class Apex :
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def scalar(item):
|
def scalar(item):
|
||||||
_df = item['data']
|
_df = item['data']
|
||||||
|
value = '0'
|
||||||
name = _df.columns.tolist()[0]
|
unit = ''
|
||||||
value = _df[name].values.round(2)[0]
|
|
||||||
html = '<div class="scalar"><div class="value">:value</div><div class="label">:label</div></div>'
|
html = '<div class="scalar"><div class="value">:value</div><div class="label">:label</div></div>'
|
||||||
if value > 999 and value < 1000000 :
|
if _df.shape[0] > 0 :
|
||||||
value = " ".join([str(np.divide(value,1000).round(2)),"K"])
|
print (_df)
|
||||||
elif value > 999999 :
|
print ('_____________________________________')
|
||||||
#@ Think of considering the case of a billion ...
|
name = _df.columns.tolist()[0]
|
||||||
value = " ".join([str(np.divide(value,1000000).round(2)),"M"])
|
value = _df[name].values[0]
|
||||||
else:
|
|
||||||
value = str(value)
|
if value > 999 and value < 1000000 :
|
||||||
unit = name.replace('_',' ') if 'unit' not in item else item['unit']
|
value = " ".join([str(np.divide(value,1000).round(2)),"K"])
|
||||||
|
elif value > 999999 :
|
||||||
|
#@ Think of considering the case of a billion ...
|
||||||
|
value = " ".join([str(np.divide(value,1000000).round(2)),"M"])
|
||||||
|
else:
|
||||||
|
value = str(value)
|
||||||
|
unit = name.replace('_',' ') if 'unit' not in item else item['unit']
|
||||||
return {'html':html.replace(':value',value).replace(":label",unit)}
|
return {'html':html.replace(':value',value).replace(":label",unit)}
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def column(item):
|
def column(item):
|
||||||
|
@ -319,8 +324,9 @@ class Apex :
|
||||||
values = df[x_cols].values.round(2).tolist()
|
values = df[x_cols].values.round(2).tolist()
|
||||||
else:
|
else:
|
||||||
labels = [name.upper().replace('_',' ') for name in df.columns.tolist()]
|
labels = [name.upper().replace('_',' ') for name in df.columns.tolist()]
|
||||||
df = df.astype(float)
|
# df = df.astype(float)
|
||||||
values = df.values.round(2).tolist()[0] if df.shape[1] > 1 else df.values.round(2).tolist()
|
# values = df.values.round(2).tolist()[0] if df.shape[1] > 1 else df.values.round(2).tolist()
|
||||||
|
values = df[[name for name in df.columns if df[name].dtype in [float,int]] ].values.round(2).tolist()
|
||||||
|
|
||||||
colors = COLORS[:len(values)]
|
colors = COLORS[:len(values)]
|
||||||
options = {"series":values,"colors":colors,"labels":labels,"dataLabels":{"enabled":True,"style":{"colors":["#000000"]},"dropShadow":{"enabled":False}},"chart":{"type":"donut","width":200},"plotOptions":{"pie":{"customScale":.9}},"legend":{"position":"right"}}
|
options = {"series":values,"colors":colors,"labels":labels,"dataLabels":{"enabled":True,"style":{"colors":["#000000"]},"dropShadow":{"enabled":False}},"chart":{"type":"donut","width":200},"plotOptions":{"pie":{"customScale":.9}},"legend":{"position":"right"}}
|
||||||
|
@ -343,10 +349,10 @@ class engine :
|
||||||
self.store_config = _config['store']
|
self.store_config = _config['store']
|
||||||
self.info = _config['analytics']
|
self.info = _config['analytics']
|
||||||
_args = self.store_config
|
_args = self.store_config
|
||||||
if self.store_config['type'] == 'mongo.MongoWriter' :
|
if 'type' not in self.store_config :
|
||||||
_args['type'] = 'mongo.MongoReader'
|
#
|
||||||
else:
|
# This is the newer version of data-transport
|
||||||
_args['type'] = 'disk.SQLiteReader'
|
self.store_config['context'] = 'read'
|
||||||
self.store_config = _args ;
|
self.store_config = _args ;
|
||||||
|
|
||||||
def filter (self,**args):
|
def filter (self,**args):
|
||||||
|
@ -367,8 +373,8 @@ class engine :
|
||||||
# conn = lite.connect(self.store_config['args']['path'],isolation_level=None)
|
# conn = lite.connect(self.store_config['args']['path'],isolation_level=None)
|
||||||
# conn.create_aggregate("stdev",1,stdev)
|
# conn.create_aggregate("stdev",1,stdev)
|
||||||
DB_TYPE = 'mongo' if (type(self.reader) == transport.mongo.MongoReader) else 'sql'
|
DB_TYPE = 'mongo' if (type(self.reader) == transport.mongo.MongoReader) else 'sql'
|
||||||
if DB_TYPE == 'mongo' :
|
# if DB_TYPE == 'mongo' :
|
||||||
self.store_config['args']['doc'] = args['type']
|
# self.store_config['args']['doc'] = args['type']
|
||||||
|
|
||||||
self.reader = transport.factory.instance(**self.store_config)
|
self.reader = transport.factory.instance(**self.store_config)
|
||||||
r = []
|
r = []
|
||||||
|
@ -414,20 +420,8 @@ class engine :
|
||||||
_analytics = [_analytics[index]]
|
_analytics = [_analytics[index]]
|
||||||
|
|
||||||
_info = list(_analytics) if 'filter' not in args else [item for item in analytics if args['filter'] == item['id']]
|
_info = list(_analytics) if 'filter' not in args else [item for item in analytics if args['filter'] == item['id']]
|
||||||
# conn = lite.connect(self.store_config['args']['path'],isolation_level=None)
|
|
||||||
# conn.create_aggregate("stdev",1,stdev)
|
|
||||||
#
|
|
||||||
# @TODO: Find a better way to handle database variance
|
|
||||||
#
|
|
||||||
# DB_TYPE = 'mongo' if (type(self.reader) == transport.mongo.MongoReader) else 'sql'
|
|
||||||
|
|
||||||
if 'mongo' in self.store_config['type'] :
|
|
||||||
DB_TYPE='mongo'
|
|
||||||
else:
|
|
||||||
DB_TYPE='sql'
|
|
||||||
self.store_config['args']['table'] = args['type']
|
|
||||||
|
|
||||||
self.reader = transport.factory.instance(**self.store_config)
|
self.reader = transport.factory.instance(**self.store_config)
|
||||||
|
DB_TYPE = 'mongo' if self.store_config ['provider'] in ['mongodb','mongo'] else 'sql'
|
||||||
r = []
|
r = []
|
||||||
for row in _info :
|
for row in _info :
|
||||||
pipeline = row['pipeline']
|
pipeline = row['pipeline']
|
||||||
|
@ -440,14 +434,22 @@ class engine :
|
||||||
continue
|
continue
|
||||||
if DB_TYPE == 'sql' :
|
if DB_TYPE == 'sql' :
|
||||||
query = {"sql":query}
|
query = {"sql":query}
|
||||||
|
else:
|
||||||
|
query = {DB_TYPE:query}
|
||||||
|
|
||||||
item['data'] = self.reader.read(**query) #item)
|
_df = self.reader.read(**query) #item)
|
||||||
|
print (query)
|
||||||
|
print (self.reader)
|
||||||
if 'serialize' in args :
|
if 'serialize' in args :
|
||||||
|
|
||||||
# item['data'] = json.dumps(item['data'].to_dict(orient='record')) if type(item['data']) == pd.DataFrame else item['data']
|
# item['data'] = json.dumps(item['data'].to_dict(orient='record')) if type(item['data']) == pd.DataFrame else item['data']
|
||||||
item['data'] = json.dumps(item['data'].to_dict('record')) if type(item['data']) == pd.DataFrame else item['data']
|
item['data'] = json.dumps(_df.to_dict(orient='record'))
|
||||||
else:
|
else:
|
||||||
item['data'] = (pd.DataFrame(item['data']))
|
# item['data'] = (pd.DataFrame(item['data']))
|
||||||
|
item['data'] = _df
|
||||||
|
pass
|
||||||
|
print (_df.head())
|
||||||
|
break
|
||||||
pipeline[index] = item
|
pipeline[index] = item
|
||||||
index += 1
|
index += 1
|
||||||
#
|
#
|
||||||
|
|
|
@ -225,6 +225,7 @@ if __name__ == '__main__' :
|
||||||
PATH= SYS_ARGS['config'] if 'config' in SYS_ARGS else os.sep.join([os.environ['HOME'],'.healthcareio','config.json'])
|
PATH= SYS_ARGS['config'] if 'config' in SYS_ARGS else os.sep.join([os.environ['HOME'],'.healthcareio','config.json'])
|
||||||
#
|
#
|
||||||
# Adjusting configuration with parameters (batch,folder,resume)
|
# Adjusting configuration with parameters (batch,folder,resume)
|
||||||
|
SYS_ARGS['config'] = json.loads(open(PATH).read())
|
||||||
if 'args' not in SYS_ARGS['config'] :
|
if 'args' not in SYS_ARGS['config'] :
|
||||||
SYS_ARGS['config']["args"] = {"batch":1,"resume":True,"folder":"/data"}
|
SYS_ARGS['config']["args"] = {"batch":1,"resume":True,"folder":"/data"}
|
||||||
|
|
||||||
|
|
|
@ -6,15 +6,16 @@ import json
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
@app.route("/favicon.ico")
|
@app.route("/favicon.ico")
|
||||||
def _icon():
|
def _icon():
|
||||||
return send_from_directory(os.path.join([app.root_path, 'static','img','logo.svg']),
|
return send_from_directory(os.path.join([app.root_path, 'static/img/logo.svg']),
|
||||||
'favicon.ico', mimetype='image/vnd.microsoft.icon')
|
'favicon.ico', mimetype='image/vnd.microsoft.icon')
|
||||||
@app.route("/")
|
@app.route("/")
|
||||||
def init():
|
def init():
|
||||||
e = SYS_ARGS['engine']
|
e = SYS_ARGS['engine']
|
||||||
sections = {"remits":e.info['835'],"claims":e.info['837']}
|
sections = {"remits":e.info['835'],"claims":e.info['837']}
|
||||||
_args = {"sections":sections,"store":SYS_ARGS['config']['store']}
|
|
||||||
print (SYS_ARGS['config']['store'])
|
_args = {"sections":sections,"store":SYS_ARGS['config']['store'],'args':{'batch':5}}
|
||||||
return render_template("setup.html",**_args)
|
|
||||||
|
return render_template("index.html",**_args)
|
||||||
@app.route("/format/<id>/<index>",methods=['POST'])
|
@app.route("/format/<id>/<index>",methods=['POST'])
|
||||||
def _format(id,index):
|
def _format(id,index):
|
||||||
|
|
||||||
|
@ -73,13 +74,16 @@ def reload():
|
||||||
|
|
||||||
if __name__ == '__main__' :
|
if __name__ == '__main__' :
|
||||||
PORT = int(SYS_ARGS['port']) if 'port' in SYS_ARGS else 5500
|
PORT = int(SYS_ARGS['port']) if 'port' in SYS_ARGS else 5500
|
||||||
DEBUG= int(SYS_ARGS['debug']) if 'debug' in SYS_ARGS else 0
|
DEBUG= int(SYS_ARGS['debug']) if 'debug' in SYS_ARGS else 1
|
||||||
SYS_ARGS['context'] = SYS_ARGS['context'] if 'context' in SYS_ARGS else ''
|
SYS_ARGS['context'] = SYS_ARGS['context'] if 'context' in SYS_ARGS else ''
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
PATH= SYS_ARGS['config'] if 'config' in SYS_ARGS else os.sep.join([os.environ['HOME'],'.healthcareio','config.json'])
|
PATH= SYS_ARGS['config'] if 'config' in SYS_ARGS else os.sep.join([os.environ['HOME'],'.healthcareio','config.json'])
|
||||||
|
#
|
||||||
|
if os.path.exists(PATH) :
|
||||||
|
SYS_ARGS['config'] = json.loads(open(PATH).read())
|
||||||
e = healthcareio.analytics.engine(PATH)
|
e = healthcareio.analytics.engine(PATH)
|
||||||
# e.apply(type='claims',serialize=True)
|
# e.apply(type='claims',serialize=True)
|
||||||
SYS_ARGS['engine'] = e
|
SYS_ARGS['engine'] = e
|
||||||
|
|
||||||
app.run(host='0.0.0.0',port=PORT,debug=DEBUG,threaded=True)
|
app.run(host='0.0.0.0',port=PORT,debug=DEBUG,threaded=True)
|
|
@ -37,14 +37,17 @@ class get :
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def processes(_args):
|
def processes(_args):
|
||||||
_info = pd.DataFrame(smart.top.read(name='healthcare-io.py'))[['name','cpu','mem']]
|
APP_NAME ='healthcare-io'
|
||||||
|
_info = smart.top.read(name=APP_NAME) #pd.DataFrame(smart.top.read(name='healthcare-io'))[['name','cpu','mem']]
|
||||||
|
|
||||||
|
|
||||||
if _info.shape[0] == 0 :
|
if _info.shape[0] == 0 :
|
||||||
_info = pd.DataFrame({"name":["healthcare-io.py"],"cpu":[0],"mem":[0]})
|
_info = pd.DataFrame({"name":[APP_NAME],"cpu":[0],"mem":[0]})
|
||||||
# _info = pd.DataFrame(_info.groupby(['name']).sum())
|
# _info = pd.DataFrame(_info.groupby(['name']).sum())
|
||||||
# _info['name'] = ['healthcare-io.py']
|
# _info['name'] = ['healthcare-io.py']
|
||||||
m = {'cpu':'CPU','mem':'RAM','name':'name'}
|
m = {'cpu':'CPU','mem':'RAM','name':'name'}
|
||||||
_info.columns = [m[name] for name in _info.columns.tolist()]
|
_info = _info.rename(columns=m)
|
||||||
|
# _info.columns = [m[name] for name in _info.columns.tolist() if name in m]
|
||||||
_info.index = np.arange(_info.shape[0])
|
_info.index = np.arange(_info.shape[0])
|
||||||
|
|
||||||
charts = []
|
charts = []
|
||||||
|
@ -56,23 +59,20 @@ class get :
|
||||||
{"data":df, "chart":{"type":"radial","axis":{"x":label,"y":"name"}}}
|
{"data":df, "chart":{"type":"radial","axis":{"x":label,"y":"name"}}}
|
||||||
)['apex']
|
)['apex']
|
||||||
)
|
)
|
||||||
#
|
|
||||||
# This will update the counts for the processes, upon subsequent requests so as to show the change
|
return {"process":{"chart":charts,"counts":_info.shape[0]}}
|
||||||
#
|
|
||||||
N = 0
|
|
||||||
lprocs = []
|
|
||||||
for proc in get.PROCS :
|
|
||||||
if proc.is_alive() :
|
|
||||||
lprocs.append(proc)
|
|
||||||
N = len(lprocs)
|
|
||||||
get.PROCS = lprocs
|
|
||||||
return {"process":{"chart":charts,"counts":N}}
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def files (_args):
|
def files (_args):
|
||||||
_info = smart.folder.read(path='/data')
|
folder = _args['args']['folder']
|
||||||
|
_info = smart.folder.read(path=folder)
|
||||||
|
|
||||||
N = _info.files.tolist()[0]
|
N = _info.files.tolist()[0]
|
||||||
if 'mongo' in _args['store']['type'] :
|
store_args = _args['store'].copy()
|
||||||
store_args = dict(_args['store'].copy(),**{"type":"mongo.MongoReader"})
|
store_args['context'] = 'read'
|
||||||
|
|
||||||
|
# if 'mongo' in _args['store']['type'] :
|
||||||
|
if _args['store']['provider'] in ['mongo', 'mongodb']:
|
||||||
|
# store_args = dict(_args['store'].copy(),**{"type":"mongo.MongoReader"})
|
||||||
# reader = transport.factory.instance(**_args)
|
# reader = transport.factory.instance(**_args)
|
||||||
|
|
||||||
pipeline = [{"$group":{"_id":"$name","count":{"$sum":{"$cond":[{"$eq":["$completed",True]},1,0]}} }},{"$group":{"_id":None,"count":{"$sum":"$count"}}},{"$project":{"_id":0,"status":"completed","count":1}}]
|
pipeline = [{"$group":{"_id":"$name","count":{"$sum":{"$cond":[{"$eq":["$completed",True]},1,0]}} }},{"$group":{"_id":None,"count":{"$sum":"$count"}}},{"$project":{"_id":0,"status":"completed","count":1}}]
|
||||||
|
@ -83,12 +83,15 @@ class get :
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
store_args = dict(_args['store'].copy(),**{"type":"disk.SQLiteReader"})
|
# store_args = dict(_args['store'].copy(),**{"type":"disk.SQLiteReader"})
|
||||||
store_args['args']['table'] = 'logs'
|
|
||||||
|
# store_args['args']['table'] = 'logs'
|
||||||
|
store_args['table'] = 'logs'
|
||||||
query= {"sql":"select count(distinct json_extract(data,'$.name')) as count, 'completed' status from logs where json_extract(data,'$.completed') = true"}
|
query= {"sql":"select count(distinct json_extract(data,'$.name')) as count, 'completed' status from logs where json_extract(data,'$.completed') = true"}
|
||||||
_query={"sql":"select json_extract(data,'$.parse') as type,count(distinct json_extract(data,'$.name')) as count from logs group by type"} #-- distribution claim/remits
|
_query={"sql":"select json_extract(data,'$.parse') as type,count(distinct json_extract(data,'$.name')) as count from logs group by type"} #-- distribution claim/remits
|
||||||
reader = transport.factory.instance(**store_args)
|
reader = transport.factory.instance(**store_args)
|
||||||
_info = pd.DataFrame(reader.read(**query))
|
_info = pd.DataFrame(reader.read(**query))
|
||||||
|
|
||||||
if not _info.shape[0] :
|
if not _info.shape[0] :
|
||||||
_info = pd.DataFrame({"status":["completed"],"count":[0]})
|
_info = pd.DataFrame({"status":["completed"],"count":[0]})
|
||||||
_info['count'] = np.round( (_info['count'] * 100 )/N,2)
|
_info['count'] = np.round( (_info['count'] * 100 )/N,2)
|
||||||
|
@ -97,11 +100,6 @@ class get :
|
||||||
#
|
#
|
||||||
# Let us classify the files now i.e claims / remits
|
# Let us classify the files now i.e claims / remits
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
# pipeline = [{"$group":{"_id":"$parse","claims":{"$addToSet":"$name"}}},{"$project":{"_id":0,"type":"$_id","count":{"$size":"$claims"}}}]
|
|
||||||
# _args = {"aggregate":"logs","cursor":{},"allowDiskUse":True,"pipeline":pipeline}
|
|
||||||
# r = pd.DataFrame(reader.read(mongo=_args))
|
|
||||||
r = pd.DataFrame(reader.read(**_query)) #-- distribution claims/remits
|
r = pd.DataFrame(reader.read(**_query)) #-- distribution claims/remits
|
||||||
r = Apex.apply({"chart":{"type":"donut","axis":{"x":"count","y":"type"}},"data":r})['apex']
|
r = Apex.apply({"chart":{"type":"donut","axis":{"x":"count","y":"type"}},"data":r})['apex']
|
||||||
r['chart']['height'] = '100%'
|
r['chart']['height'] = '100%'
|
||||||
|
|
|
@ -63,10 +63,10 @@
|
||||||
//
|
//
|
||||||
// We should insure the listeners are enabled
|
// We should insure the listeners are enabled
|
||||||
if(monitor.listen.handler == null){
|
if(monitor.listen.handler == null){
|
||||||
monitor.listen.handler = setInterval(
|
/*monitor.listen.handler = setInterval(
|
||||||
function(){
|
function(){
|
||||||
console.log('running ...')
|
console.log('running ...')
|
||||||
monitor.data()},5000)
|
monitor.data()},5000)*/
|
||||||
|
|
||||||
}
|
}
|
||||||
}else{
|
}else{
|
||||||
|
|
|
@ -24,6 +24,7 @@ import sys
|
||||||
from itertools import islice
|
from itertools import islice
|
||||||
from multiprocessing import Process
|
from multiprocessing import Process
|
||||||
import transport
|
import transport
|
||||||
|
from transport import providers
|
||||||
import jsonmerge
|
import jsonmerge
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
@ -236,6 +237,53 @@ class Parser (Process):
|
||||||
_config[_id] = [_config[_id]]
|
_config[_id] = [_config[_id]]
|
||||||
config['parser'] = _config
|
config['parser'] = _config
|
||||||
return config
|
return config
|
||||||
|
@staticmethod
|
||||||
|
def init(**_args):
|
||||||
|
"""
|
||||||
|
This function allows to initialize the database that will store the claims if need be
|
||||||
|
:path configuration file
|
||||||
|
"""
|
||||||
|
PATH = os.sep.join([os.environ['HOME'],'.healthcareio'])
|
||||||
|
filename = os.sep.join([PATH,'config.json'])
|
||||||
|
|
||||||
|
filename = _args['path'] if 'path' in _args else filename
|
||||||
|
info = None
|
||||||
|
if os.path.exists(filename):
|
||||||
|
#
|
||||||
|
# Loading the configuration file (JSON format)
|
||||||
|
file = open(filename)
|
||||||
|
info = json.loads(file.read())
|
||||||
|
|
||||||
|
|
||||||
|
OUTPUT_FOLDER = info['out-folder']
|
||||||
|
if 'output-folder' not in info and not os.path.exists(OUTPUT_FOLDER) :
|
||||||
|
os.mkdir(OUTPUT_FOLDER)
|
||||||
|
elif 'output-folder' in info and not os.path.exists(info['out-folder']) :
|
||||||
|
os.mkdir(info['out-folder'])
|
||||||
|
# if 'type' in info['store'] :
|
||||||
|
lwriter = None
|
||||||
|
IS_SQL = False
|
||||||
|
if'type' in info['store'] and info['store']['type'] == 'disk.SQLiteWriter' :
|
||||||
|
lwriter = transport.factory.instance(**info['store'])
|
||||||
|
IS_SQL = True
|
||||||
|
elif 'provider' in info['store'] and info['store']['provider'] == 'sqlite' :
|
||||||
|
lwriter = transport.instance(**info['store']) ;
|
||||||
|
IS_SQL = [providers.SQLITE,providers.POSTGRESQL,providers.NETEZZA,providers.MYSQL,providers.MARIADB]
|
||||||
|
|
||||||
|
if lwriter and IS_SQL:
|
||||||
|
for key in info['schema'] :
|
||||||
|
if key != 'logs' :
|
||||||
|
_id = 'claims' if key == '837' else 'remits'
|
||||||
|
else:
|
||||||
|
_id = key
|
||||||
|
|
||||||
|
if not lwriter.has(table=_id) :
|
||||||
|
lwriter.apply(info['schema'][key]['create'])
|
||||||
|
|
||||||
|
# [lwriter.apply( info['schema'][key]['create']) for key in info['schema'] if not lwriter.has(table=key)]
|
||||||
|
lwriter.close()
|
||||||
|
|
||||||
|
return info
|
||||||
|
|
||||||
def __init__(self,path):
|
def __init__(self,path):
|
||||||
"""
|
"""
|
||||||
|
|
Loading…
Reference in New Issue