bug fixes: mongodb, common, nextcloud
This commit is contained in:
parent
56bdda17b7
commit
d74372f645
|
@ -27,6 +27,7 @@ import json
|
|||
import importlib
|
||||
import sys
|
||||
import sqlalchemy
|
||||
from datetime import datetime
|
||||
if sys.version_info[0] > 2 :
|
||||
# from transport.common import Reader, Writer,Console #, factory
|
||||
from transport import disk
|
||||
|
@ -83,16 +84,19 @@ import os
|
|||
# PGSQL = POSTGRESQL
|
||||
# import providers
|
||||
|
||||
class IEncoder (json.JSONEncoder):
|
||||
def default (self,object):
|
||||
# class IEncoder (json.JSONEncoder):
|
||||
def IEncoder (self,object):
|
||||
if type(object) == np.integer :
|
||||
return int(object)
|
||||
elif type(object) == np.floating:
|
||||
return float(object)
|
||||
elif type(object) == np.ndarray :
|
||||
return object.tolist()
|
||||
elif type(object) == datetime :
|
||||
return o.isoformat()
|
||||
else:
|
||||
return super(IEncoder,self).default(object)
|
||||
|
||||
class factory :
|
||||
TYPE = {"sql":{"providers":["postgresql","mysql","neteeza","bigquery","mariadb","redshift"]}}
|
||||
PROVIDERS = {
|
||||
|
|
|
@ -25,7 +25,7 @@ from multiprocessing import RLock
|
|||
import queue
|
||||
# import couch
|
||||
# import mongo
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
class IO:
|
||||
def init(self,**args):
|
||||
|
@ -39,6 +39,19 @@ class IO:
|
|||
continue
|
||||
value = args[field]
|
||||
setattr(self,field,value)
|
||||
class IEncoder (json.JSONEncoder):
|
||||
def default (self,object):
|
||||
if type(object) == np.integer :
|
||||
return int(object)
|
||||
elif type(object) == np.floating:
|
||||
return float(object)
|
||||
elif type(object) == np.ndarray :
|
||||
return object.tolist()
|
||||
elif type(object) == datetime :
|
||||
return object.isoformat()
|
||||
else:
|
||||
return super(IEncoder,self).default(object)
|
||||
|
||||
class Reader (IO):
|
||||
"""
|
||||
This class is an abstraction of a read functionalities of a data store
|
||||
|
|
|
@ -12,6 +12,8 @@ import json
|
|||
import sqlite3
|
||||
import pandas as pd
|
||||
from multiprocessing import Lock
|
||||
from transport.common import Reader, Writer, IEncoder
|
||||
|
||||
class DiskReader(Reader) :
|
||||
"""
|
||||
This class is designed to read data from disk (location on hard drive)
|
||||
|
@ -221,6 +223,8 @@ class SQLiteWriter(SQLite,DiskWriter) :
|
|||
info = info.to_dict(orient='records')
|
||||
|
||||
if not self.fields :
|
||||
|
||||
|
||||
_rec = info[0]
|
||||
self.init(list(_rec.keys()))
|
||||
|
||||
|
@ -231,7 +235,8 @@ class SQLiteWriter(SQLite,DiskWriter) :
|
|||
sql = " " .join(["INSERT INTO ",self.table,"(", ",".join(self.fields) ,")", "values(:values)"])
|
||||
for row in info :
|
||||
stream =["".join(["",value,""]) if type(value) == str else value for value in row.values()]
|
||||
stream = json.dumps(stream).replace("[","").replace("]","")
|
||||
stream = json.dumps(stream,cls=IEncoder)
|
||||
stream = stream.replace("[","").replace("]","")
|
||||
|
||||
|
||||
self.conn.execute(sql.replace(":values",stream) )
|
||||
|
|
|
@ -15,7 +15,7 @@ import gridfs
|
|||
# from transport import Reader,Writer
|
||||
import sys
|
||||
if sys.version_info[0] > 2 :
|
||||
from transport.common import Reader, Writer
|
||||
from transport.common import Reader, Writer, IEncoder
|
||||
else:
|
||||
from common import Reader, Writer
|
||||
import json
|
||||
|
@ -102,7 +102,7 @@ class MongoReader(Mongo,Reader):
|
|||
if 'pipeline' in args :
|
||||
cmd['pipeline']= args['pipeline']
|
||||
if 'aggregate' not in cmd :
|
||||
cmd['aggregate'] = self.collection
|
||||
cmd['aggregate'] = self.uid
|
||||
if 'pipeline' not in args or 'aggregate' not in cmd :
|
||||
cmd = args['mongo'] if 'mongo' in args else args['cmd']
|
||||
if "aggregate" in cmd :
|
||||
|
@ -182,7 +182,7 @@ class MongoWriter(Mongo,Writer):
|
|||
for row in rows :
|
||||
if type(row['_id']) == ObjectId :
|
||||
row['_id'] = str(row['_id'])
|
||||
stream = Binary(json.dumps(collection).encode())
|
||||
stream = Binary(json.dumps(collection,cls=IEncoder).encode())
|
||||
collection.delete_many({})
|
||||
now = "-".join([str(datetime.now().year()),str(datetime.now().month), str(datetime.now().day)])
|
||||
name = ".".join([self.uid,'archive',now])+".json"
|
||||
|
|
|
@ -3,7 +3,7 @@ We are implementing transport to and from nextcloud (just like s3)
|
|||
"""
|
||||
import os
|
||||
import sys
|
||||
from transport.common import Reader,Writer
|
||||
from transport.common import Reader,Writer, IEncoder
|
||||
import pandas as pd
|
||||
from io import StringIO
|
||||
import json
|
||||
|
@ -73,7 +73,7 @@ class NextcloudWriter (Nextcloud,Writer):
|
|||
_data.to_csv(f,index=False)
|
||||
_content = f.getvalue()
|
||||
elif type(_data) == dict :
|
||||
_content = json.dumps(_data)
|
||||
_content = json.dumps(_data,cls=IEncoder)
|
||||
else:
|
||||
_content = str(_data)
|
||||
self._handler.put_file_contents(_uri,_content)
|
||||
|
|
Loading…
Reference in New Issue