Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add pyexasol datasource, ensure that integer dont overflow in javascript #4378

Merged
merged 2 commits into from
Nov 27, 2019
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added client/app/assets/images/db-logos/exasol.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
142 changes: 142 additions & 0 deletions redash/query_runner/exasol.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
import datetime

from redash.query_runner import *
from redash.utils import json_dumps

def _exasol_type_mapper(val, data_type):
if val is None:
return None
elif data_type['type'] == 'DECIMAL':
if data_type['scale'] == 0 and data_type['precision'] < 16:
return int(val)
elif data_type['scale'] == 0 and data_type['precision'] >= 16:
return val
else:
return float(val)
elif data_type['type'] == 'DATE':
return datetime.date(int(val[0:4]), int(val[5:7]), int(val[8:10]))
elif data_type['type'] == 'TIMESTAMP':
return datetime.datetime(int(val[0:4]), int(val[5:7]), int(val[8:10]), # year, month, day
int(val[11:13]), int(val[14:16]), int(val[17:19]), # hour, minute, second
int(val[20:26].ljust(6, '0')) if len(val) > 20 else 0) # microseconds (if available)
else:
return val

def _type_mapper(data_type):
if data_type['type'] == 'DECIMAL':
if data_type['scale'] == 0 and data_type['precision'] < 16:
return TYPE_INTEGER
elif data_type['scale'] == 0 and data_type['precision'] >= 16:
return TYPE_STRING
else:
return TYPE_FLOAT
elif data_type['type'] == 'DATE':
return TYPE_DATE
elif data_type['type'] == 'TIMESTAMP':
return TYPE_DATETIME
else:
return TYPE_STRING

try:
import pyexasol
enabled = True
except ImportError:
enabled = False


class Exasol(BaseQueryRunner):
noop_query = 'SELECT 1 FROM DUAL'

@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'user': {
'type': 'string'
},
'password': {
'type': 'string'
},
'host': {
'type': 'string'
},
'port': {
'type': 'number',
'default': 8563
},
},
'required': ['host', 'port', 'user', 'password'],
'order': ['host', 'port', 'user', 'password'],
'secret': ['password']
}

def _get_connection(self):
exahost = "%s:%s" % (self.configuration.get('host', None), self.configuration.get('port', 8563))
return pyexasol.connect(
dsn=exahost,
user=self.configuration.get('user', None) ,
password=self.configuration.get('password', None),
compression=True,
json_lib='rapidjson',
fetch_mapper=_exasol_type_mapper,
)

def run_query(self, query, user):
connection = self._get_connection()
statement = None
error = None
try:
statement = connection.execute(query)
columns = [{'name': n, 'friendly_name': n,'type': _type_mapper(t)} for (n, t) in statement.columns().items()]
cnames = statement.column_names()

rows = [dict(zip(cnames, row)) for row in statement]
data = {'columns': columns, 'rows': rows}
json_data = json_dumps(data)
finally:
if statement is not None:
statement.close()

connection.close()

return json_data, error



def get_schema(self, get_stats=False):
query = """
SELECT
COLUMN_SCHEMA,
COLUMN_TABLE,
COLUMN_NAME
FROM EXA_ALL_COLUMNS
"""

connection = self._get_connection()
statement = None
try:
statement = connection.execute(query)
result = {}

for (schema, table_name, column) in statement:
table_name_with_schema = '%s.%s' % (schema,table_name)

if table_name_with_schema not in result:
result[table_name_with_schema] = {'name': table_name_with_schema, 'columns': []}

result[table_name_with_schema]['columns'].append(column)
finally:
if statement is not None:
statement.close()

connection.close()

return result.values()

@classmethod
def enabled(cls):
return enabled


register(Exasol)
1 change: 1 addition & 0 deletions redash/settings/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,6 +303,7 @@ def email_server_is_configured():
'redash.query_runner.cass',
'redash.query_runner.dgraph',
'redash.query_runner.azure_kusto',
'redash.query_runner.exasol',
]

enabled_query_runners = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join(default_query_runners)))
Expand Down
2 changes: 2 additions & 0 deletions requirements_all_ds.txt
Original file line number Diff line number Diff line change
Expand Up @@ -32,3 +32,5 @@ phoenixdb==0.7
certifi>=2019.9.11
pydgraph==2.0.2
azure-kusto-data==0.0.35
pyexasol==0.9.1
python-rapidjson==0.8.0
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Did you notice any noticeable performance difference when using this?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Its recommended in the best practices of pyexasol, i have not done any benchmarking on this by myself.

https://github.com/exasol/pyexasol/blob/master/docs/BEST_PRACTICES.md#consider-faster-json-parsing-libraries