From 1d930035537e61898789b375d798a5442a04d72c Mon Sep 17 00:00:00 2001 From: Griffin Qiu Date: Thu, 17 Sep 2015 17:14:56 +0800 Subject: [PATCH 01/13] added vertica hook --- airflow/hooks/__init__.py | 1 + airflow/hooks/vertica_hoook.py | 38 +++++++++ airflow/models.py | 2 + airflow/operators/vertica_to_hive.py | 112 +++++++++++++++++++++++++++ airflow/utils.py | 8 ++ airflow/www/app.py | 1 + docs/installation.rst | 3 + setup.py | 4 +- 8 files changed, 168 insertions(+), 1 deletion(-) create mode 100644 airflow/hooks/vertica_hoook.py create mode 100644 airflow/operators/vertica_to_hive.py diff --git a/airflow/hooks/__init__.py b/airflow/hooks/__init__.py index 9bf5410affdae..af2a7a5747724 100644 --- a/airflow/hooks/__init__.py +++ b/airflow/hooks/__init__.py @@ -24,6 +24,7 @@ 'dbapi_hook': ['DbApiHook'], 'mssql_hook': ['MsSqlHook'], 'oracle_hook': ['OracleHook'], + 'vertica_hook': ['VerticaHook'], } _import_module_attrs(globals(), _hooks) diff --git a/airflow/hooks/vertica_hoook.py b/airflow/hooks/vertica_hoook.py new file mode 100644 index 0000000000000..d5982c494c7ff --- /dev/null +++ b/airflow/hooks/vertica_hoook.py @@ -0,0 +1,38 @@ +import MySQLdb +import MySQLdb.cursors +from vertica_python import connect + +from airflow.hooks.dbapi_hook import DbApiHook + +class VerticaHook(DbApiHook): + ''' + Interact with Vertica. + + You can specify charset in the extra field of your connection + as ``{"charset": "utf8"}``. Also you can choose cursor as + ``{"cursor": "SSCursor"}``. Refer to the MySQLdb.cursors for more details. + ''' + + conn_name_attr = 'vertica_conn_id' + default_conn_name = 'vertica_default' + supports_autocommit = True + + def get_conn(self): + """ + Returns verticaql connection object + """ + conn = self.get_connection(self.vertica_conn_id) + conn_config = { + "user": conn.login, + "password": conn.password, + "database": conn.schema, + } + + conn_config["host"] = conn.host or 'localhost' + if not conn.port: + conn_config["port"] = 5433 + else: + conn_config["port"] = int(conn.port) + + conn = connect(**conn_config) + return conn diff --git a/airflow/models.py b/airflow/models.py index c2b86293459e6..eb2825d33ae18 100644 --- a/airflow/models.py +++ b/airflow/models.py @@ -390,6 +390,8 @@ def get_hook(self): return hooks.MsSqlHook(mssql_conn_id=self.conn_id) elif self.conn_type == 'oracle': return hooks.OracleHook(oracle_conn_id=self.conn_id) + elif self.conn_type == 'vertica': + return hooks.VerticaHook(vertica_conn_id=self.conn_id) except: return None diff --git a/airflow/operators/vertica_to_hive.py b/airflow/operators/vertica_to_hive.py new file mode 100644 index 0000000000000..f880d4c67a3e2 --- /dev/null +++ b/airflow/operators/vertica_to_hive.py @@ -0,0 +1,112 @@ +from builtins import chr +from collections import OrderedDict +import unicodecsv as csv +import logging +from tempfile import NamedTemporaryFile + +from vertical_python import datatypes + + +from airflow.hooks import HiveCliHook, VerticaHook +from airflow.models import BaseOperator +from airflow.utils import apply_defaults + + +class VerticaToHiveTransfer(BaseOperator): + """ + Moves data from Microsoft SQL Server to Hive. The operator runs + your query against Microsoft SQL Server, stores the file locally + before loading it into a Hive table. If the ``create`` or + ``recreate`` arguments are set to ``True``, + a ``CREATE TABLE`` and ``DROP TABLE`` statements are generated. + Hive data types are inferred from the cursor's metadata. + Note that the table generated in Hive uses ``STORED AS textfile`` + which isn't the most efficient serialization format. If a + large amount of data is loaded and/or if the table gets + queried considerably, you may want to use this operator only to + stage the data into a temporary table before loading it into its + final destination using a ``HiveOperator``. + :param sql: SQL query to execute against the Microsoft SQL Server database + :type sql: str + :param hive_table: target Hive table, use dot notation to target a + specific database + :type hive_table: str + :param create: whether to create the table if it doesn't exist + :type create: bool + :param recreate: whether to drop and recreate the table at every execution + :type recreate: bool + :param partition: target partition as a dict of partition columns and values + :type partition: dict + :param delimiter: field delimiter in the file + :type delimiter: str + :param vertica_conn_id: source Vertica connection + :type vertica_conn_id: str + :param hive_conn_id: destination hive connection + :type hive_conn_id: str + """ + + template_fields = ('sql', 'partition', 'hive_table') + template_ext = ('.sql',) + ui_color = '#a0e08c' + + @apply_defaults + def __init__( + self, + sql, + hive_table, + create=True, + recreate=False, + partition=None, + delimiter=chr(1), + vertica_conn_id='vertica_default', + hive_cli_conn_id='hive_cli_default', + *args, **kwargs): + super(VerticaToHiveTransfer, self).__init__(*args, **kwargs) + self.sql = sql + self.hive_table = hive_table + self.partition = partition + self.create = create + self.recreate = recreate + self.delimiter = delimiter + self.vertica_conn_id = vertica_conn_id + self.hive_cli_conn_id = hive_cli_conn_id + self.partition = partition or {} + + @classmethod + def type_map(cls, vertica_type): + t = datatypes + d = { + t.BINARY.value: 'INT', + t.NUMBER.value: 'INT', + } + return d[vertica_type] if vertica_type in d else 'STRING' + + def execute(self, context): + hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id) + vertica = VerticaHook(vertica_conn_id=self.vertica_conn_id) + + logging.info("Dumping Microsoft SQL Server query results to local file") + conn = vertica.get_conn() + cursor = conn.cursor() + cursor.execute(self.sql) + with NamedTemporaryFile("w") as f: + csv_writer = csv.writer(f, delimiter=self.delimiter, encoding='utf-8') + field_dict = OrderedDict() + col_count = 0 + for field in cursor.description: + col_count += 1 + col_position = "Column{position}".format(position=col_count) + field_dict[col_position if field[0] == '' else field[0]] = self.type_map(field[1]) + csv_writer.writerows(cursor) + f.flush() + cursor.close() + conn.close() + logging.info("Loading file into Hive") + hive.load_file( + f.name, + self.hive_table, + field_dict=field_dict, + create=self.create, + partition=self.partition, + delimiter=self.delimiter, + recreate=self.recreate) diff --git a/airflow/utils.py b/airflow/utils.py index 391039cc62c69..4bfb5ed09af63 100644 --- a/airflow/utils.py +++ b/airflow/utils.py @@ -186,6 +186,14 @@ def initdb(): host='localhost', port=1433)) session.commit() + conn = session.query(C).filter(C.conn_id == 'vertica_default').first() + if not conn: + session.add( + models.Connection( + conn_id='vertica_default', conn_type='vertica', + host='localhost', port=5433)) + session.commit() + # Known event types KET = models.KnownEventType if not session.query(KET).filter(KET.know_event_type == 'Holiday').first(): diff --git a/airflow/www/app.py b/airflow/www/app.py index 0aa9ab480ff06..a27080da49296 100644 --- a/airflow/www/app.py +++ b/airflow/www/app.py @@ -1728,6 +1728,7 @@ class ConnectionModelView(wwwutils.SuperUserMixin, AirflowModelView): ('mysql', 'MySQL',), ('postgres', 'Postgres',), ('oracle', 'Oracle',), + ('vertica', 'Vertica',), ('presto', 'Presto',), ('s3', 'S3',), ('samba', 'Samba',), diff --git a/docs/installation.rst b/docs/installation.rst index c8c1025f20ed9..9fbcf42e81af9 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -42,6 +42,9 @@ Here's the list of the subpackages and what they enable: | mssql | ``pip install airflow[mssql]`` | Microsoft SQL operators and hook, | | | | support as an Airflow backend | +-------------+------------------------------------+------------------------------------------------+ +| vertica | ``pip install airflow[vertica]`` | Vertica hook | +| | | support as an Airflow backend | ++-------------+------------------------------------+------------------------------------------------+ | slack | ``pip install airflow[slack]`` | ``SlackAPIPostOperator`` | +-------------+------------------------------------+------------------------------------------------+ | all | ``pip install airflow[all]`` | All Airflow features known to man | diff --git a/setup.py b/setup.py index 0a5cc4e38d00a..87c29c395db98 100644 --- a/setup.py +++ b/setup.py @@ -27,8 +27,9 @@ slack = ['slackclient>=0.15'] crypto = ['cryptography>=0.9.3'] oracle = ['cx_Oracle>=5.1.2'] +vertica = ['vertica-python>=0.5.1'] -all_dbs = postgres + mysql + hive + mssql + hdfs +all_dbs = postgres + mysql + hive + mssql + hdfs + vertica devel = all_dbs + doc + samba + s3 + ['nose'] + slack + crypto + oracle setup( @@ -80,6 +81,7 @@ 'slack': slack, 'crypto': crypto, 'oracle': oracle, + 'vertica': vertica, }, author='Maxime Beauchemin', author_email='maximebeauchemin@gmail.com', From 70f54988a4c5e485fb5a53e46c3c513f5318ae44 Mon Sep 17 00:00:00 2001 From: Griffin Qiu Date: Thu, 17 Sep 2015 17:59:18 +0800 Subject: [PATCH 02/13] issue fix for vertica --- airflow/hooks/{vertica_hoook.py => vertica_hook.py} | 0 airflow/operators/vertica_to_hive.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename airflow/hooks/{vertica_hoook.py => vertica_hook.py} (100%) diff --git a/airflow/hooks/vertica_hoook.py b/airflow/hooks/vertica_hook.py similarity index 100% rename from airflow/hooks/vertica_hoook.py rename to airflow/hooks/vertica_hook.py diff --git a/airflow/operators/vertica_to_hive.py b/airflow/operators/vertica_to_hive.py index f880d4c67a3e2..d9852fb0faa5a 100644 --- a/airflow/operators/vertica_to_hive.py +++ b/airflow/operators/vertica_to_hive.py @@ -4,7 +4,7 @@ import logging from tempfile import NamedTemporaryFile -from vertical_python import datatypes +from vertica_python import datatypes from airflow.hooks import HiveCliHook, VerticaHook From 0d6418fa10fb9e374c63877d2655e3952586fb0f Mon Sep 17 00:00:00 2001 From: Griffin Qiu Date: Thu, 17 Sep 2015 18:29:34 +0800 Subject: [PATCH 03/13] added VerticaOperator --- airflow/operators/vertica_operator.py | 33 +++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 airflow/operators/vertica_operator.py diff --git a/airflow/operators/vertica_operator.py b/airflow/operators/vertica_operator.py new file mode 100644 index 0000000000000..a9a9322cd3a34 --- /dev/null +++ b/airflow/operators/vertica_operator.py @@ -0,0 +1,33 @@ +import logging + +from airflow.hooks import VerticaHook +from airflow.models import BaseOperator +from airflow.utils import apply_defaults + + +class VerticaOperator(BaseOperator): + """ + Executes sql code in a specific Vertica database + + :param vertica_conn_id: reference to a specific Vertica database + :type vertica_conn_id: string + :param sql: the sql code to be executed + :type sql: Can receive a str representing a sql statement, + a list of str (sql statements), or reference to a template file. + Template reference are recognized by str ending in '.sql' + """ + + template_fields = ('sql',) + template_ext = ('.sql',) + ui_color = '#004372' + + @apply_defaults + def __init__(self, sql, vertica_conn_id='vertica_default', *args, **kwargs): + super(VerticaOperator, self).__init__(*args, **kwargs) + self.vertica_conn_id = vertica_conn_id + self.sql = sql + + def execute(self, context): + logging.info('Executing: ' + str(self.sql)) + hook = VerticaOperator(vertica_conn_id=self.vertica_conn_id) + hook.run(self.sql) From 32641f5d3ed574b26855e25290063909913ba472 Mon Sep 17 00:00:00 2001 From: Griffin Qiu Date: Thu, 17 Sep 2015 18:32:33 +0800 Subject: [PATCH 04/13] added vertica_operator include path --- airflow/operators/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/airflow/operators/__init__.py b/airflow/operators/__init__.py index a6c810abf8deb..cfa7df70ca6a9 100644 --- a/airflow/operators/__init__.py +++ b/airflow/operators/__init__.py @@ -26,6 +26,7 @@ 'email_operator': ['EmailOperator'], 'hive_to_samba_operator': ['Hive2SambaOperator'], 'mysql_operator': ['MySqlOperator'], + 'vertica_operator': ['VerticaOperator'], 'sqlite_operator': ['SqliteOperator'], 'mysql_to_hive': ['MySqlToHiveTransfer'], 'postgres_operator': ['PostgresOperator'], From 1948f5c45917462fee68c617aca42031a9a1b324 Mon Sep 17 00:00:00 2001 From: Griffin Qiu Date: Thu, 17 Sep 2015 18:39:14 +0800 Subject: [PATCH 05/13] changed vertica_operator color --- airflow/operators/vertica_operator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airflow/operators/vertica_operator.py b/airflow/operators/vertica_operator.py index a9a9322cd3a34..b8740cc3d4e1d 100644 --- a/airflow/operators/vertica_operator.py +++ b/airflow/operators/vertica_operator.py @@ -19,7 +19,7 @@ class VerticaOperator(BaseOperator): template_fields = ('sql',) template_ext = ('.sql',) - ui_color = '#004372' + ui_color = '#b4e0ff' @apply_defaults def __init__(self, sql, vertica_conn_id='vertica_default', *args, **kwargs): From 5551eba128488369a5fa6324b7eb67b58d34e5cd Mon Sep 17 00:00:00 2001 From: Griffin Qiu Date: Thu, 17 Sep 2015 18:41:20 +0800 Subject: [PATCH 06/13] bug fix for verticaoperator --- airflow/operators/vertica_operator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airflow/operators/vertica_operator.py b/airflow/operators/vertica_operator.py index b8740cc3d4e1d..fdce789116cfb 100644 --- a/airflow/operators/vertica_operator.py +++ b/airflow/operators/vertica_operator.py @@ -29,5 +29,5 @@ def __init__(self, sql, vertica_conn_id='vertica_default', *args, **kwargs): def execute(self, context): logging.info('Executing: ' + str(self.sql)) - hook = VerticaOperator(vertica_conn_id=self.vertica_conn_id) + hook = VerticaHook(vertica_conn_id=self.vertica_conn_id) hook.run(self.sql) From 52c2ff437663facf5030b2e055d5fa3d886dd711 Mon Sep 17 00:00:00 2001 From: Griffin Qiu Date: Thu, 17 Sep 2015 18:45:18 +0800 Subject: [PATCH 07/13] changed vertica to hive color --- airflow/operators/vertica_to_hive.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/airflow/operators/vertica_to_hive.py b/airflow/operators/vertica_to_hive.py index d9852fb0faa5a..a28e1959c8ac9 100644 --- a/airflow/operators/vertica_to_hive.py +++ b/airflow/operators/vertica_to_hive.py @@ -6,16 +6,14 @@ from vertica_python import datatypes - from airflow.hooks import HiveCliHook, VerticaHook from airflow.models import BaseOperator from airflow.utils import apply_defaults - class VerticaToHiveTransfer(BaseOperator): """ - Moves data from Microsoft SQL Server to Hive. The operator runs - your query against Microsoft SQL Server, stores the file locally + Moves data from Vertia to Hive. The operator runs + your query against Vertia, stores the file locally before loading it into a Hive table. If the ``create`` or ``recreate`` arguments are set to ``True``, a ``CREATE TABLE`` and ``DROP TABLE`` statements are generated. @@ -26,7 +24,7 @@ class VerticaToHiveTransfer(BaseOperator): queried considerably, you may want to use this operator only to stage the data into a temporary table before loading it into its final destination using a ``HiveOperator``. - :param sql: SQL query to execute against the Microsoft SQL Server database + :param sql: SQL query to execute against the Vertia database :type sql: str :param hive_table: target Hive table, use dot notation to target a specific database @@ -47,7 +45,7 @@ class VerticaToHiveTransfer(BaseOperator): template_fields = ('sql', 'partition', 'hive_table') template_ext = ('.sql',) - ui_color = '#a0e08c' + ui_color = '#b4e0ff' @apply_defaults def __init__( @@ -85,7 +83,7 @@ def execute(self, context): hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id) vertica = VerticaHook(vertica_conn_id=self.vertica_conn_id) - logging.info("Dumping Microsoft SQL Server query results to local file") + logging.info("Dumping Vertica query results to local file") conn = vertica.get_conn() cursor = conn.cursor() cursor.execute(self.sql) From b334a4df1d592bbeab226b5d0d2242eba0899611 Mon Sep 17 00:00:00 2001 From: Griffin Qiu Date: Thu, 17 Sep 2015 18:47:18 +0800 Subject: [PATCH 08/13] added vertica to hive path --- airflow/operators/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/airflow/operators/__init__.py b/airflow/operators/__init__.py index cfa7df70ca6a9..0f346c4c058a7 100644 --- a/airflow/operators/__init__.py +++ b/airflow/operators/__init__.py @@ -26,9 +26,10 @@ 'email_operator': ['EmailOperator'], 'hive_to_samba_operator': ['Hive2SambaOperator'], 'mysql_operator': ['MySqlOperator'], - 'vertica_operator': ['VerticaOperator'], 'sqlite_operator': ['SqliteOperator'], 'mysql_to_hive': ['MySqlToHiveTransfer'], + 'vertica_operator': ['VerticaOperator'], + 'vertica_to_hive': ['VerticaToHiveTransfer'], 'postgres_operator': ['PostgresOperator'], 'sensors': [ 'SqlSensor', From 5ebca93ea328c3aa80071b1f9762a3a6d104c45a Mon Sep 17 00:00:00 2001 From: Griffin Qiu Date: Thu, 17 Sep 2015 19:13:59 +0800 Subject: [PATCH 09/13] bug fix for verticaoperator --- airflow/operators/vertica_to_hive.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/airflow/operators/vertica_to_hive.py b/airflow/operators/vertica_to_hive.py index a28e1959c8ac9..fe168447b4031 100644 --- a/airflow/operators/vertica_to_hive.py +++ b/airflow/operators/vertica_to_hive.py @@ -65,7 +65,7 @@ def __init__( self.partition = partition self.create = create self.recreate = recreate - self.delimiter = delimiter + self.delimiter = str(delimiter) self.vertica_conn_id = vertica_conn_id self.hive_cli_conn_id = hive_cli_conn_id self.partition = partition or {} @@ -74,9 +74,10 @@ def __init__( def type_map(cls, vertica_type): t = datatypes d = { - t.BINARY.value: 'INT', - t.NUMBER.value: 'INT', + t.BINARY: 'INT', + t.NUMBER: 'INT', } + logging.info(vertica_type) return d[vertica_type] if vertica_type in d else 'STRING' def execute(self, context): From 1a27ea7cf43d7ced151361da3106240c25575e33 Mon Sep 17 00:00:00 2001 From: Griffin Qiu Date: Thu, 17 Sep 2015 19:17:31 +0800 Subject: [PATCH 10/13] bug fix for vertica_to_hvie --- airflow/operators/vertica_to_hive.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airflow/operators/vertica_to_hive.py b/airflow/operators/vertica_to_hive.py index fe168447b4031..d0b0674455316 100644 --- a/airflow/operators/vertica_to_hive.py +++ b/airflow/operators/vertica_to_hive.py @@ -96,7 +96,7 @@ def execute(self, context): col_count += 1 col_position = "Column{position}".format(position=col_count) field_dict[col_position if field[0] == '' else field[0]] = self.type_map(field[1]) - csv_writer.writerows(cursor) + csv_writer.writerows(cursor.iterate()) f.flush() cursor.close() conn.close() From 15efd6f9626793412cf2d85a8d34db3118ffeffa Mon Sep 17 00:00:00 2001 From: Griffin Qiu Date: Fri, 18 Sep 2015 10:05:27 +0800 Subject: [PATCH 11/13] fix for vertica_to_hive type mapping --- airflow/operators/vertica_to_hive.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/airflow/operators/vertica_to_hive.py b/airflow/operators/vertica_to_hive.py index d0b0674455316..e54e1bc51c24c 100644 --- a/airflow/operators/vertica_to_hive.py +++ b/airflow/operators/vertica_to_hive.py @@ -72,12 +72,16 @@ def __init__( @classmethod def type_map(cls, vertica_type): - t = datatypes + # vertica-python datatype.py donot provied the full type mapping access. + # Manual hack. Reference: https://github.com/uber/vertica-python/blob/master/vertica_python/vertica/column.py d = { - t.BINARY: 'INT', - t.NUMBER: 'INT', + 5: 'BOOLEAN', + 6: 'INT', + 7: 'FLOAT' + 8: 'STRING', + 9: 'STRING', + 16: 'FLOAT', } - logging.info(vertica_type) return d[vertica_type] if vertica_type in d else 'STRING' def execute(self, context): @@ -93,6 +97,7 @@ def execute(self, context): field_dict = OrderedDict() col_count = 0 for field in cursor.description: + logging.info(field) col_count += 1 col_position = "Column{position}".format(position=col_count) field_dict[col_position if field[0] == '' else field[0]] = self.type_map(field[1]) From af6eb0332e0e3b336d36b8cf1e81e7ce96d3b92e Mon Sep 17 00:00:00 2001 From: Griffin Qiu Date: Fri, 18 Sep 2015 10:10:37 +0800 Subject: [PATCH 12/13] remove debug log --- airflow/operators/vertica_to_hive.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/airflow/operators/vertica_to_hive.py b/airflow/operators/vertica_to_hive.py index e54e1bc51c24c..afc9d18b0c1f2 100644 --- a/airflow/operators/vertica_to_hive.py +++ b/airflow/operators/vertica_to_hive.py @@ -4,8 +4,6 @@ import logging from tempfile import NamedTemporaryFile -from vertica_python import datatypes - from airflow.hooks import HiveCliHook, VerticaHook from airflow.models import BaseOperator from airflow.utils import apply_defaults @@ -77,7 +75,7 @@ def type_map(cls, vertica_type): d = { 5: 'BOOLEAN', 6: 'INT', - 7: 'FLOAT' + 7: 'FLOAT', 8: 'STRING', 9: 'STRING', 16: 'FLOAT', @@ -97,7 +95,6 @@ def execute(self, context): field_dict = OrderedDict() col_count = 0 for field in cursor.description: - logging.info(field) col_count += 1 col_position = "Column{position}".format(position=col_count) field_dict[col_position if field[0] == '' else field[0]] = self.type_map(field[1]) From 0fff64b654207f9667a3dd3007173f4fb1c1d175 Mon Sep 17 00:00:00 2001 From: Griffin Qiu Date: Fri, 18 Sep 2015 10:12:38 +0800 Subject: [PATCH 13/13] remove mysql include --- airflow/hooks/vertica_hook.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/airflow/hooks/vertica_hook.py b/airflow/hooks/vertica_hook.py index d5982c494c7ff..45cd0d0e6f434 100644 --- a/airflow/hooks/vertica_hook.py +++ b/airflow/hooks/vertica_hook.py @@ -1,5 +1,3 @@ -import MySQLdb -import MySQLdb.cursors from vertica_python import connect from airflow.hooks.dbapi_hook import DbApiHook @@ -7,10 +5,6 @@ class VerticaHook(DbApiHook): ''' Interact with Vertica. - - You can specify charset in the extra field of your connection - as ``{"charset": "utf8"}``. Also you can choose cursor as - ``{"cursor": "SSCursor"}``. Refer to the MySQLdb.cursors for more details. ''' conn_name_attr = 'vertica_conn_id'