Merge pull request 'dev' (#2) from dev into master
Reviewed-on: https://dev.the-phi.com/git/utilities-tools/data-transport/pulls/2
This commit is contained in:
commit
563f3375d7
3
setup.py
3
setup.py
|
@ -4,11 +4,12 @@ This is a build file for the
|
||||||
from setuptools import setup, find_packages
|
from setuptools import setup, find_packages
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from transport import __version__
|
||||||
def read(fname):
|
def read(fname):
|
||||||
return open(os.path.join(os.path.dirname(__file__), fname)).read()
|
return open(os.path.join(os.path.dirname(__file__), fname)).read()
|
||||||
args = {
|
args = {
|
||||||
"name":"data-transport",
|
"name":"data-transport",
|
||||||
"version":"1.7.6",
|
"version":__version__,
|
||||||
"author":"The Phi Technology LLC","author_email":"info@the-phi.com",
|
"author":"The Phi Technology LLC","author_email":"info@the-phi.com",
|
||||||
"license":"MIT",
|
"license":"MIT",
|
||||||
"packages":["transport"]}
|
"packages":["transport"]}
|
||||||
|
|
|
@ -21,6 +21,7 @@ The configuration for the data-store is as follows :
|
||||||
provider:'mongodb',[port:27017],[host:localhost],db:<name>,doc:<_name>,context:<read|write>
|
provider:'mongodb',[port:27017],[host:localhost],db:<name>,doc:<_name>,context:<read|write>
|
||||||
"""
|
"""
|
||||||
__author__ = 'The Phi Technology'
|
__author__ = 'The Phi Technology'
|
||||||
|
__version__= '1.7.8'
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import json
|
import json
|
||||||
|
|
|
@ -298,7 +298,17 @@ class SQLWriter(SQLRW,Writer):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
table = _args['table'] if 'table' in _args else self.table
|
table = _args['table'] if 'table' in _args else self.table
|
||||||
self.schema = _args['schema'] if 'schema' in _args else self.schema
|
#
|
||||||
|
# In SQL, schema can stand for namespace or the structure of a table
|
||||||
|
# In case we have a list, we are likely dealing with table structure
|
||||||
|
#
|
||||||
|
if 'schema' in _args :
|
||||||
|
if type(_args['schema']) == str :
|
||||||
|
self.schema = _args['schema'] if 'schema' in _args else self.schema
|
||||||
|
elif type(_args['schema']) == list:
|
||||||
|
self.make(schema=_args['schema'])
|
||||||
|
pass
|
||||||
|
# self.schema = _args['schema'] if 'schema' in _args else self.schema
|
||||||
table = self._tablename(table)
|
table = self._tablename(table)
|
||||||
|
|
||||||
_sql = "INSERT INTO :table (:fields) VALUES (:values)".replace(":table",table) #.replace(":table",self.table).replace(":fields",_fields)
|
_sql = "INSERT INTO :table (:fields) VALUES (:values)".replace(":table",table) #.replace(":table",self.table).replace(":fields",_fields)
|
||||||
|
@ -385,7 +395,7 @@ class BigQuery:
|
||||||
try:
|
try:
|
||||||
if table :
|
if table :
|
||||||
_dataset = self.dataset if 'dataset' not in _args else _args['dataset']
|
_dataset = self.dataset if 'dataset' not in _args else _args['dataset']
|
||||||
sql = f"""SELECT column_name as field_name, data_type as field_type FROM {_dataset}.INFORMATION_SCHEMA.COLUMNS WHERE table_name = '{table}' """
|
sql = f"""SELECT column_name as name, data_type as type FROM {_dataset}.INFORMATION_SCHEMA.COLUMNS WHERE table_name = '{table}' """
|
||||||
return self.read(sql=sql).to_dict(orient='records')
|
return self.read(sql=sql).to_dict(orient='records')
|
||||||
# ref = self.client.dataset(self.dataset).table(table)
|
# ref = self.client.dataset(self.dataset).table(table)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue