bug fix: added info package with version & author

This commit is contained in:
Steve Nyemba 2024-01-15 15:08:18 -06:00
parent c27beb16cc
commit c6ff08ea6b
5 changed files with 62 additions and 45 deletions

View File

@ -4,7 +4,10 @@ This is a build file for the
from setuptools import setup, find_packages from setuptools import setup, find_packages
import os import os
import sys import sys
from version import __version__,__author__ # from version import __version__,__author__
from info import __version__, __author__
# __author__ = 'The Phi Technology' # __author__ = 'The Phi Technology'
# __version__= '1.8.0' # __version__= '1.8.0'
@ -15,7 +18,7 @@ args = {
"version":__version__, "version":__version__,
"author":__author__,"author_email":"info@the-phi.com", "author":__author__,"author_email":"info@the-phi.com",
"license":"MIT", "license":"MIT",
"packages":["transport"]} "packages":["transport","info"]}
args["keywords"]=['mongodb','couchdb','rabbitmq','file','read','write','s3','sqlite'] args["keywords"]=['mongodb','couchdb','rabbitmq','file','read','write','s3','sqlite']
args["install_requires"] = ['pyncclient','pymongo','sqlalchemy<2.0.0','pandas','typer','pandas-gbq','numpy','cloudant','pika','nzpy','boto3','boto','pyarrow','google-cloud-bigquery','google-cloud-bigquery-storage','flask-session','smart_open','botocore','psycopg2-binary','mysql-connector-python'] args["install_requires"] = ['pyncclient','pymongo','sqlalchemy<2.0.0','pandas','typer','pandas-gbq','numpy','cloudant','pika','nzpy','boto3','boto','pyarrow','google-cloud-bigquery','google-cloud-bigquery-storage','flask-session','smart_open','botocore','psycopg2-binary','mysql-connector-python']
args["url"] = "https://healthcareio.the-phi.com/git/code/transport.git" args["url"] = "https://healthcareio.the-phi.com/git/code/transport.git"

View File

@ -21,8 +21,8 @@ The configuration for the data-store is as follows :
provider:'mongodb',[port:27017],[host:localhost],db:<name>,doc:<_name>,context:<read|write> provider:'mongodb',[port:27017],[host:localhost],db:<name>,doc:<_name>,context:<read|write>
""" """
import pandas as pd # import pandas as pd
import numpy as np # import numpy as np
import json import json
import importlib import importlib
import sys import sys
@ -38,7 +38,8 @@ if sys.version_info[0] > 2 :
from transport import mongo as mongo from transport import mongo as mongo
from transport import sql as sql from transport import sql as sql
from transport import etl as etl from transport import etl as etl
from transport.version import __version__ # from transport.version import __version__
from info import __version__,__author__
from transport import providers from transport import providers
else: else:
from common import Reader, Writer,Console #, factory from common import Reader, Writer,Console #, factory
@ -51,10 +52,10 @@ else:
import etl import etl
from version import __version__ from version import __version__
import providers import providers
import psycopg2 as pg # import psycopg2 as pg
import mysql.connector as my # import mysql.connector as my
from google.cloud import bigquery as bq # from google.cloud import bigquery as bq
import nzpy as nz #--- netezza drivers # import nzpy as nz #--- netezza drivers
import os import os
# class providers : # class providers :
@ -85,42 +86,42 @@ import os
# import providers # import providers
# class IEncoder (json.JSONEncoder): # class IEncoder (json.JSONEncoder):
def IEncoder (self,object): # def IEncoder (self,object):
if type(object) == np.integer : # if type(object) == np.integer :
return int(object) # return int(object)
elif type(object) == np.floating: # elif type(object) == np.floating:
return float(object) # return float(object)
elif type(object) == np.ndarray : # elif type(object) == np.ndarray :
return object.tolist() # return object.tolist()
elif type(object) == datetime : # elif type(object) == datetime :
return o.isoformat() # return o.isoformat()
else: # else:
return super(IEncoder,self).default(object) # return super(IEncoder,self).default(object)
class factory : class factory :
TYPE = {"sql":{"providers":["postgresql","mysql","neteeza","bigquery","mariadb","redshift"]}} # TYPE = {"sql":{"providers":["postgresql","mysql","neteeza","bigquery","mariadb","redshift"]}}
PROVIDERS = { # PROVIDERS = {
"etl":{"class":{"read":etl.instance,"write":etl.instance}}, # "etl":{"class":{"read":etl.instance,"write":etl.instance}},
# "console":{"class":{"write":Console,"read":Console}}, # # "console":{"class":{"write":Console,"read":Console}},
"file":{"class":{"read":disk.DiskReader,"write":disk.DiskWriter}}, # "file":{"class":{"read":disk.DiskReader,"write":disk.DiskWriter}},
"sqlite":{"class":{"read":disk.SQLiteReader,"write":disk.SQLiteWriter}}, # "sqlite":{"class":{"read":disk.SQLiteReader,"write":disk.SQLiteWriter}},
"postgresql":{"port":5432,"host":"localhost","database":None,"driver":pg,"default":{"type":"VARCHAR"},"class":{"read":sql.SQLReader,"write":sql.SQLWriter}}, # "postgresql":{"port":5432,"host":"localhost","database":None,"driver":pg,"default":{"type":"VARCHAR"},"class":{"read":sql.SQLReader,"write":sql.SQLWriter}},
"redshift":{"port":5432,"host":"localhost","database":None,"driver":pg,"default":{"type":"VARCHAR"},"class":{"read":sql.SQLReader,"write":sql.SQLWriter}}, # "redshift":{"port":5432,"host":"localhost","database":None,"driver":pg,"default":{"type":"VARCHAR"},"class":{"read":sql.SQLReader,"write":sql.SQLWriter}},
"bigquery":{"class":{"read":sql.BQReader,"write":sql.BQWriter}}, # "bigquery":{"class":{"read":sql.BQReader,"write":sql.BQWriter}},
"mysql":{"port":3306,"host":"localhost","default":{"type":"VARCHAR(256)"},"driver":my,"class":{"read":sql.SQLReader,"write":sql.SQLWriter}}, # "mysql":{"port":3306,"host":"localhost","default":{"type":"VARCHAR(256)"},"driver":my,"class":{"read":sql.SQLReader,"write":sql.SQLWriter}},
"mariadb":{"port":3306,"host":"localhost","default":{"type":"VARCHAR(256)"},"driver":my,"class":{"read":sql.SQLReader,"write":sql.SQLWriter}}, # "mariadb":{"port":3306,"host":"localhost","default":{"type":"VARCHAR(256)"},"driver":my,"class":{"read":sql.SQLReader,"write":sql.SQLWriter}},
"mongo":{"port":27017,"host":"localhost","class":{"read":mongo.MongoReader,"write":mongo.MongoWriter}}, # "mongo":{"port":27017,"host":"localhost","class":{"read":mongo.MongoReader,"write":mongo.MongoWriter}},
"couch":{"port":5984,"host":"localhost","class":{"read":couch.CouchReader,"write":couch.CouchWriter}}, # "couch":{"port":5984,"host":"localhost","class":{"read":couch.CouchReader,"write":couch.CouchWriter}},
"netezza":{"port":5480,"driver":nz,"default":{"type":"VARCHAR(256)"},"class":{"read":sql.SQLReader,"write":sql.SQLWriter}}, # "netezza":{"port":5480,"driver":nz,"default":{"type":"VARCHAR(256)"},"class":{"read":sql.SQLReader,"write":sql.SQLWriter}},
"rabbitmq":{"port":5672,"host":"localhost","class":{"read":queue.QueueReader,"write":queue.QueueWriter,"listen":queue.QueueListener,"listener":queue.QueueListener},"default":{"type":"application/json"}}} # "rabbitmq":{"port":5672,"host":"localhost","class":{"read":queue.QueueReader,"write":queue.QueueWriter,"listen":queue.QueueListener,"listener":queue.QueueListener},"default":{"type":"application/json"}}}
# # #
# creating synonyms # # creating synonyms
PROVIDERS['mongodb'] = PROVIDERS['mongo'] # PROVIDERS['mongodb'] = PROVIDERS['mongo']
PROVIDERS['couchdb'] = PROVIDERS['couch'] # PROVIDERS['couchdb'] = PROVIDERS['couch']
PROVIDERS['bq'] = PROVIDERS['bigquery'] # PROVIDERS['bq'] = PROVIDERS['bigquery']
PROVIDERS['sqlite3'] = PROVIDERS['sqlite'] # PROVIDERS['sqlite3'] = PROVIDERS['sqlite']
PROVIDERS['rabbit'] = PROVIDERS['rabbitmq'] # PROVIDERS['rabbit'] = PROVIDERS['rabbitmq']
PROVIDERS['rabbitmq-server'] = PROVIDERS['rabbitmq'] # PROVIDERS['rabbitmq-server'] = PROVIDERS['rabbitmq']
@staticmethod @staticmethod
def instance(**_args): def instance(**_args):

View File

@ -15,12 +15,26 @@ import gridfs
# from transport import Reader,Writer # from transport import Reader,Writer
import sys import sys
if sys.version_info[0] > 2 : if sys.version_info[0] > 2 :
from transport.common import Reader, Writer, IEncoder from transport.common import Reader, Writer
else: else:
from common import Reader, Writer from common import Reader, Writer
import json import json
import re import re
from multiprocessing import Lock, RLock from multiprocessing import Lock, RLock
def IEncoder (self,object):
if type(object) == np.integer :
return int(object)
elif type(object) == np.floating:
return float(object)
elif type(object) == np.ndarray :
return object.tolist()
elif type(object) == datetime :
return o.isoformat()
else:
return super(IEncoder,self).default(object)
class Mongo : class Mongo :
lock = RLock() lock = RLock()
""" """

View File

@ -1 +0,0 @@
transport/version.py