bug fix ...

This commit is contained in:
Steve Nyemba 2024-02-16 11:54:05 -06:00
parent ea30cd1c00
commit 278d639fbf
2 changed files with 14 additions and 5 deletions

View File

@ -320,7 +320,7 @@ class Generator (Learner):
self.network_args['candidates'] = int(_args['candidates']) if 'candidates' in _args else 1
# filename = os.sep.join([self.network_args['logs'],'output',self.network_args['context'],'map.json'])
_suffix = self.network_args['context']
filename = os.sep.join([self.network_args['logs'],'output',self.network_args['context'],'meta-',_suffix,'.json'])
filename = os.sep.join([self.network_args['logs'],'output',self.network_args['context'],'meta-',_suffix+'.json'])
self.log(**{'action':'init-map','input':{'filename':filename,'exists':os.path.exists(filename)}})
if os.path.exists(filename):
@ -439,7 +439,7 @@ class Generator (Learner):
"""
_columns = [_item['name'] for _item in _schema]
_columns = [_item['name'] for _item in _schema if _item['name'] in _df.columns]
_map = {'INT64':np.int64,'FLOAT64':np.float64,'DATE':np.datetime64,'TIMESTAMP':(lambda field: pd.to_datetime(field).dt.tz_localize(None))}
# pd.to_datetime(_df.measurement_datetime).dt.tz_localize(None)
@ -476,7 +476,7 @@ class Generator (Learner):
# bqw = transport.factory.instance(**_store['target'])
# bqw.write(_df,schema=_schema)
return _df[_columns]
return _df #[_columns]
def post(self,_candidates):
if 'target' in self.store :
@ -492,7 +492,16 @@ class Generator (Learner):
_haslist = np.sum([type(_item)==list for _item in self.columns]) > 0
_schema = self.get_schema()
#
# If the schema doesn't match the data we need to skip it
# This happens when the data comes from a query, the post processing needs to handle this
#
# _names = [_field['name'] for _field in _schema]
# _columns = _candidates[0].columns.tolist()
# _common = list( set(_columns) & set(_names) )
# if not (len(_common) == len(_columns) and len(_names) == len(_common)) :
# _schema = None
for _iodf in _candidates :
_df = self._df.copy()

View File

@ -20,7 +20,7 @@ import os
class State :
@staticmethod
def apply(_data,lpointers):
def apply(_data,lpointers,_config={}):
"""
This function applies a pipeline against a given data-frame, the calling code must decide whether it is a pre/post
:_data data-frame