%load_ext autoreload
%autoreload 2
import sys
sys.path.append("..")
from optimus import Optimus
# Create optimus^
op = Optimus("pandas")
# Put your db credentials here
db = op.connect.mysql(
host="localhost",
database= "loving",
user= "root",
password = "")
mysql://root:@localhost:3306/loving
db.tables()
['wp_commentmeta', 'wp_comments', 'wp_datalist', 'wp_hplugin_root', 'wp_htheme_root', 'wp_hutility_default_storage_table', 'wp_links', 'wp_options', 'wp_postmeta', 'wp_posts', 'wp_revslider_css', 'wp_revslider_layer_animations', 'wp_revslider_navigations', 'wp_revslider_sliders', 'wp_revslider_slides', 'wp_revslider_static_slides', 'wp_statistics_exclusions', 'wp_statistics_historical', 'wp_statistics_pages', 'wp_statistics_search', 'wp_statistics_useronline', 'wp_statistics_visit', 'wp_statistics_visitor', 'wp_term_relationships', 'wp_term_taxonomy', 'wp_termmeta', 'wp_terms', 'wp_usermeta', 'wp_users', 'wp_woocommerce_api_keys', 'wp_woocommerce_attribute_taxonomies', 'wp_woocommerce_downloadable_product_permissions', 'wp_woocommerce_log', 'wp_woocommerce_order_itemmeta', 'wp_woocommerce_order_items', 'wp_woocommerce_payment_tokenmeta', 'wp_woocommerce_payment_tokens', 'wp_woocommerce_sessions', 'wp_woocommerce_shipping_zone_locations', 'wp_woocommerce_shipping_zone_methods', 'wp_woocommerce_shipping_zones', 'wp_woocommerce_tax_rate_locations', 'wp_woocommerce_tax_rates', 'wp_yoast_seo_links', 'wp_yoast_seo_meta']
# db.execute("SHOW KEYS FROM test_data WHERE key_name = 'PRIMARY'")
db.execute("SELECT * FROM wp_comments").display()
comment_ID
1 (int64)
|
comment_post_ID
2 (int64)
|
comment_author
3 (object)
|
comment_author_email
4 (object)
|
comment_author_url
5 (object)
|
comment_author_IP
6 (object)
|
comment_date
7 (datetime64[ns])
|
comment_date_gmt
8 (datetime64[ns])
|
comment_content
9 (object)
|
comment_karma
10 (int64)
|
comment_approved
11 (object)
|
comment_agent
12 (object)
|
comment_type
13 (object)
|
comment_parent
14 (int64)
|
user_id
15 (int64)
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
30
|
508
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-06-03⋅06:32:07
|
2018-06-03⋅06:32:07
|
IPN⋅payment⋅completed
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
31
|
508
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-06-03⋅06:32:08
|
2018-06-03⋅06:32:08
|
Order⋅status⋅changed⋅from⋅Pending⋅payment⋅to⋅Processing.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
21
|
420
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-14⋅15:48:09
|
2017-06-14⋅15:48:09
|
Order⋅status⋅changed⋅from⋅Pending⋅payment⋅to⋅Processing.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
23
|
420
|
Jorge⋅Troya
|
creativo@asmediaconnections.com
|
|
|
2017-06-14⋅16:15:10
|
2017-06-14⋅16:15:10
|
Order⋅status⋅changed⋅from⋅Processing⋅to⋅Completed.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
25
|
422
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-19⋅22:57:21
|
2017-06-19⋅22:57:21
|
Order⋅status⋅changed⋅from⋅Pending⋅payment⋅to⋅Processing.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
39
|
516
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-09-07⋅17:15:53
|
2018-09-07⋅17:15:53
|
Unpaid⋅order⋅cancelled⋅-⋅time⋅limit⋅reached.⋅Order⋅status⋅changed⋅...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
28
|
422
|
Jorge⋅Troya
|
creativo@asmediaconnections.com
|
|
|
2017-10-19⋅20:08:12
|
2017-10-19⋅20:08:12
|
Order⋅status⋅changed⋅from⋅Processing⋅to⋅Completed.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
36
|
514
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-07-23⋅06:56:48
|
2018-07-23⋅06:56:48
|
Unpaid⋅order⋅cancelled⋅-⋅time⋅limit⋅reached.⋅Order⋅status⋅changed⋅...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
34
|
512
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-06-14⋅15:43:12
|
2018-06-14⋅15:43:12
|
Unpaid⋅order⋅cancelled⋅-⋅time⋅limit⋅reached.⋅Order⋅status⋅changed⋅...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
42
|
517
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-10-26⋅03:54:39
|
2018-10-26⋅03:54:39
|
Unpaid⋅order⋅cancelled⋅-⋅time⋅limit⋅reached.⋅Order⋅status⋅changed⋅...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
43
|
518
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-12-05⋅21:15:13
|
2018-12-05⋅21:15:13
|
Unpaid⋅order⋅cancelled⋅-⋅time⋅limit⋅reached.⋅Order⋅status⋅changed⋅...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
db.execute("SELECT * FROM wp_comments", partition_column ="comment_ID", table_name = "wp_comments").display()
comment_post_ID
1 (int64)
|
comment_author
2 (object)
|
comment_author_email
3 (object)
|
comment_author_url
4 (object)
|
comment_author_IP
5 (object)
|
comment_date
6 (datetime64[ns])
|
comment_date_gmt
7 (datetime64[ns])
|
comment_content
8 (object)
|
comment_karma
9 (int64)
|
comment_approved
10 (object)
|
comment_agent
11 (object)
|
comment_type
12 (object)
|
comment_parent
13 (int64)
|
user_id
14 (int64)
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
420
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-14⋅15:48:08
|
2017-06-14⋅15:48:08
|
IPN⋅payment⋅completed
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
420
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-14⋅15:48:09
|
2017-06-14⋅15:48:09
|
Order⋅status⋅changed⋅from⋅Pending⋅payment⋅to⋅Processing.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
420
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-14⋅15:48:09
|
2017-06-14⋅15:48:09
|
WHITE⋅CLASSIC⋅MAN⋅T-SHIRT⋅-⋅S⋅(tsw00001)⋅stock⋅reduced⋅from⋅50...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
420
|
Jorge⋅Troya
|
creativo@asmediaconnections.com
|
|
|
2017-06-14⋅16:15:10
|
2017-06-14⋅16:15:10
|
Order⋅status⋅changed⋅from⋅Processing⋅to⋅Completed.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
422
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-19⋅22:57:21
|
2017-06-19⋅22:57:21
|
IPN⋅payment⋅completed
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
422
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-19⋅22:57:21
|
2017-06-19⋅22:57:21
|
Order⋅status⋅changed⋅from⋅Pending⋅payment⋅to⋅Processing.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
422
|
Jorge⋅Troya
|
creativo@asmediaconnections.com
|
|
|
2017-10-19⋅20:08:12
|
2017-10-19⋅20:08:12
|
Order⋅status⋅changed⋅from⋅Processing⋅to⋅Completed.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
460
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-10-19⋅20:45:13
|
2017-10-19⋅20:45:13
|
Order⋅cancelled⋅by⋅customer.⋅Order⋅status⋅changed⋅from⋅Pending⋅payment&...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
508
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-06-03⋅06:32:07
|
2018-06-03⋅06:32:07
|
IPN⋅payment⋅completed
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
508
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-06-03⋅06:32:08
|
2018-06-03⋅06:32:08
|
Order⋅status⋅changed⋅from⋅Pending⋅payment⋅to⋅Processing.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
508
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-06-03⋅06:32:08
|
2018-06-03⋅06:32:08
|
⋅stock⋅reduced⋅from⋅30⋅to⋅29." style="min-height: 14px;">
BLACK⋅CLASSIC⋅MAN⋅T-SHIRT⋅-⋅XL⋅(tsw00002)⋅st...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
db.table_to_df("wp_comments", partition_column ="id").display()
comment_ID
1 (int64)
|
comment_post_ID
2 (int64)
|
comment_author
3 (object)
|
comment_author_email
4 (object)
|
comment_author_url
5 (object)
|
comment_author_IP
6 (object)
|
comment_date
7 (datetime64[ns])
|
comment_date_gmt
8 (datetime64[ns])
|
comment_content
9 (object)
|
comment_karma
10 (int64)
|
comment_approved
11 (object)
|
comment_agent
12 (object)
|
comment_type
13 (object)
|
comment_parent
14 (int64)
|
user_id
15 (int64)
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
30
|
508
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-06-03⋅06:32:07
|
2018-06-03⋅06:32:07
|
IPN⋅payment⋅completed
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
31
|
508
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-06-03⋅06:32:08
|
2018-06-03⋅06:32:08
|
Order⋅status⋅changed⋅from⋅Pending⋅payment⋅to⋅Processing.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
20
|
420
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-14⋅15:48:08
|
2017-06-14⋅15:48:08
|
IPN⋅payment⋅completed
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
21
|
420
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-14⋅15:48:09
|
2017-06-14⋅15:48:09
|
Order⋅status⋅changed⋅from⋅Pending⋅payment⋅to⋅Processing.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
22
|
420
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-14⋅15:48:09
|
2017-06-14⋅15:48:09
|
WHITE⋅CLASSIC⋅MAN⋅T-SHIRT⋅-⋅S⋅(tsw00001)⋅stock⋅reduced⋅from⋅50...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
23
|
420
|
Jorge⋅Troya
|
creativo@asmediaconnections.com
|
|
|
2017-06-14⋅16:15:10
|
2017-06-14⋅16:15:10
|
Order⋅status⋅changed⋅from⋅Processing⋅to⋅Completed.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
24
|
422
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-19⋅22:57:21
|
2017-06-19⋅22:57:21
|
IPN⋅payment⋅completed
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
25
|
422
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-19⋅22:57:21
|
2017-06-19⋅22:57:21
|
Order⋅status⋅changed⋅from⋅Pending⋅payment⋅to⋅Processing.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
32
|
508
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-06-03⋅06:32:08
|
2018-06-03⋅06:32:08
|
⋅stock⋅reduced⋅from⋅30⋅to⋅29." style="min-height: 14px;">
BLACK⋅CLASSIC⋅MAN⋅T-SHIRT⋅-⋅XL⋅(tsw00002)⋅st...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
39
|
516
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-09-07⋅17:15:53
|
2018-09-07⋅17:15:53
|
Unpaid⋅order⋅cancelled⋅-⋅time⋅limit⋅reached.⋅Order⋅status⋅changed⋅...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
28
|
422
|
Jorge⋅Troya
|
creativo@asmediaconnections.com
|
|
|
2017-10-19⋅20:08:12
|
2017-10-19⋅20:08:12
|
Order⋅status⋅changed⋅from⋅Processing⋅to⋅Completed.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
df = db.table_to_df("wp_comments", limit=None)
df
comment_ID
1 (int64)
|
comment_post_ID
2 (int64)
|
comment_author
3 (object)
|
comment_author_email
4 (object)
|
comment_author_url
5 (object)
|
comment_author_IP
6 (object)
|
comment_date
7 (datetime64[ns])
|
comment_date_gmt
8 (datetime64[ns])
|
comment_content
9 (object)
|
comment_karma
10 (int64)
|
comment_approved
11 (object)
|
comment_agent
12 (object)
|
comment_type
13 (object)
|
comment_parent
14 (int64)
|
user_id
15 (int64)
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
30
|
508
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-06-03⋅06:32:07
|
2018-06-03⋅06:32:07
|
IPN⋅payment⋅completed
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
31
|
508
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-06-03⋅06:32:08
|
2018-06-03⋅06:32:08
|
Order⋅status⋅changed⋅from⋅Pending⋅payment⋅to⋅Processing.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
20
|
420
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-14⋅15:48:08
|
2017-06-14⋅15:48:08
|
IPN⋅payment⋅completed
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
21
|
420
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-14⋅15:48:09
|
2017-06-14⋅15:48:09
|
Order⋅status⋅changed⋅from⋅Pending⋅payment⋅to⋅Processing.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
22
|
420
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-14⋅15:48:09
|
2017-06-14⋅15:48:09
|
WHITE⋅CLASSIC⋅MAN⋅T-SHIRT⋅-⋅S⋅(tsw00001)⋅stock⋅reduced⋅from⋅50...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
23
|
420
|
Jorge⋅Troya
|
creativo@asmediaconnections.com
|
|
|
2017-06-14⋅16:15:10
|
2017-06-14⋅16:15:10
|
Order⋅status⋅changed⋅from⋅Processing⋅to⋅Completed.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
24
|
422
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-19⋅22:57:21
|
2017-06-19⋅22:57:21
|
IPN⋅payment⋅completed
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
25
|
422
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2017-06-19⋅22:57:21
|
2017-06-19⋅22:57:21
|
Order⋅status⋅changed⋅from⋅Pending⋅payment⋅to⋅Processing.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
32
|
508
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-06-03⋅06:32:08
|
2018-06-03⋅06:32:08
|
⋅stock⋅reduced⋅from⋅30⋅to⋅29." style="min-height: 14px;">
BLACK⋅CLASSIC⋅MAN⋅T-SHIRT⋅-⋅XL⋅(tsw00002)⋅st...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
39
|
516
|
WooCommerce
|
woocommerce@lovingbrickell.com
|
|
|
2018-09-07⋅17:15:53
|
2018-09-07⋅17:15:53
|
Unpaid⋅order⋅cancelled⋅-⋅time⋅limit⋅reached.⋅Order⋅status⋅changed⋅...
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
28
|
422
|
Jorge⋅Troya
|
creativo@asmediaconnections.com
|
|
|
2017-10-19⋅20:08:12
|
2017-10-19⋅20:08:12
|
Order⋅status⋅changed⋅from⋅Processing⋅to⋅Completed.
|
0
|
1
|
WooCommerce
|
order_note
|
0
|
0
|
db.tables_names_to_json()
INFO:optimus:(SELECT TABLE_NAME AS table_name FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = 'optimus' GROUP BY TABLE_NAME ) AS t INFO:optimus:jdbc:mysql://165.227.196.70:3306/optimus?currentSchema=public
['test_data']
# Put your db credentials here
db = op.connect(
driver="postgresql",
host="165.227.196.70",
database= "optimus",
user= "testuser",
password = "test")
INFO:optimus:jdbc:postgresql://165.227.196.70:5432/optimus?currentSchema=public
db.tables()
db.table_to_df("test_data").table()
db.tables_names_to_json()
INFO:optimus:( SELECT relname as table_name FROM pg_class C LEFT JOIN pg_namespace N ON (N.oid = C.relnamespace) WHERE nspname IN ('public') AND relkind='r' ORDER BY reltuples DESC) AS t INFO:optimus:jdbc:postgresql://165.227.196.70:5432/optimus?currentSchema=public
['test_data']
# Put your db credentials here
db = op.connect.mssql(
host="127.0.0.1",
database= "master",
user= "root",
password = "")
--------------------------------------------------------------------------- AttributeError Traceback (most recent call last) <ipython-input-12-7e35670d8453> in <module> 4 database= "master", 5 user= "root", ----> 6 password = "") ~\Documents\Optimus\optimus\engines\base\io\connect.py in mssql(self, host, database, user, password, port, schema) 180 def mssql(self, host=None, database=None, user=None, password=None, port=None, schema="public") -> 'ConnectionType': 181 return DaskBaseJDBC(host, database, user, password, port=port, driver=DriverProperties.SQLSERVER.value["name"], --> 182 schema=schema, op=self.op) 183 184 def redshift(self, host=None, database=None, user=None, password=None, port=None, schema="public") -> 'ConnectionType': ~\Documents\Optimus\optimus\engines\base\dask\io\jdbc.py in __init__(self, host, database, user, password, port, driver, schema, oracle_tns, oracle_service_name, oracle_sid, presto_catalog, cassandra_keyspace, cassandra_table, bigquery_project, bigquery_dataset, op) 75 presto_catalog=presto_catalog, 76 bigquery_project=bigquery_project, ---> 77 bigquery_dataset=bigquery_dataset 78 ) 79 ~\Documents\Optimus\optimus\engines\base\io\driver_context.py in uri(self, *args, **kwargs) 22 23 def uri(self, *args, **kwargs) -> str: ---> 24 return self._driver.uri(*args, **kwargs) 25 26 def url(self, *args, **kwargs) -> str: AttributeError: 'SQLServerDriver' object has no attribute 'uri'
db.tables()
INFO:optimus:(SELECT * FROM INFORMATION_SCHEMA.TABLES) AS t INFO:optimus:jdbc:sqlserver://165.227.196.70:1433;databaseName=optimus
TABLE_CATALOG
1 (string)
nullable
|
TABLE_SCHEMA
2 (string)
nullable
|
TABLE_NAME
3 (string)
nullable
|
TABLE_TYPE
4 (string)
nullable
|
---|---|---|---|
optimus
|
dbo
|
test_data
|
BASE⋅TABLE
|
db.table_to_df("test_data").table()
INFO:optimus:SELECT * FROM test_data INFO:optimus:(SELECT * FROM test_data) AS t INFO:optimus:jdbc:sqlserver://165.227.196.70:1433;databaseName=optimus
id
1 (int)
nullable
|
first_name
2 (string)
nullable
|
last_name
3 (string)
nullable
|
email
4 (string)
nullable
|
gender
5 (string)
nullable
|
ip_address
6 (string)
nullable
|
---|---|---|---|---|---|
1
|
Keenan
|
McAirt
|
kmcairt0@spotify.com
|
Male
|
68.97.227.147
|
2
|
Fredelia
|
Lemarie
|
flemarie1@furl.net
|
Female
|
16.145.123.46
|
1
|
Keenan
|
McAirt
|
kmcairt0@spotify.com
|
Male
|
68.97.227.147
|
2
|
Fredelia
|
Lemarie
|
flemarie1@furl.net
|
Female
|
16.145.123.46
|
1
|
Keenan
|
McAirt
|
kmcairt0@spotify.com
|
Male
|
68.97.227.147
|
2
|
Fredelia
|
Lemarie
|
flemarie1@furl.net
|
Female
|
16.145.123.46
|
1
|
Keenan
|
McAirt
|
kmcairt0@spotify.com
|
Male
|
68.97.227.147
|
2
|
Fredelia
|
Lemarie
|
flemarie1@furl.net
|
Female
|
16.145.123.46
|
2
|
Fredelia
|
Lemarie
|
flemarie1@furl.net
|
Female
|
16.145.123.46
|
1
|
Evyn
|
Abbey
|
eabbey0@mlb.com
|
Male
|
202.99.246.227
|
db.tables_names_to_json()
INFO:optimus:(SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES) AS t INFO:optimus:jdbc:sqlserver://165.227.196.70:1433;databaseName=optimus
['test_data']
# Put your db credentials here
db = op.connect(
driver="redshift",
host="165.227.196.70",
database= "optimus",
user= "testuser",
password = "test")
INFO:optimus:jdbc:redshift://redshift-cluster-1.chuvgsqx7epn.us-east-1.redshift.amazonaws.com:5439/dev?currentSchema=public
db.tables()
INFO:optimus:( SELECT relname as table_name,cast (reltuples as integer) AS count FROM pg_class C LEFT JOIN pg_namespace N ON (N.oid = C.relnamespace) WHERE nspname IN ('public') AND relkind='r' ORDER BY reltuples DESC) AS t INFO:optimus:jdbc:redshift://redshift-cluster-1.chuvgsqx7epn.us-east-1.redshift.amazonaws.com:5439/dev?currentSchema=public
--------------------------------------------------------------------------- Py4JJavaError Traceback (most recent call last) <ipython-input-3-cdef22199e9a> in <module> ----> 1 db.tables() ~\Documents\Optimus\optimus\io\jdbc.py in tables(self, schema, database, limit) 179 FROM user_tables ORDER BY table_name""" 180 --> 181 df = self.execute(query, limit) 182 return df.table(limit) 183 ~\Documents\Optimus\optimus\io\jdbc.py in execute(self, query, limit) 309 conf.options(table=self.cassandra_table, keyspace=self.cassandra_keyspace) 310 --> 311 return self._limit(conf.load(), limit) 312 313 def df_to_table(self, df, table, mode="overwrite"): ~\Anaconda3\lib\site-packages\pyspark\sql\readwriter.py in load(self, path, format, schema, **options) 170 return self._df(self._jreader.load(self._spark._sc._jvm.PythonUtils.toSeq(path))) 171 else: --> 172 return self._df(self._jreader.load()) 173 174 @since(1.4) ~\Anaconda3\lib\site-packages\py4j\java_gateway.py in __call__(self, *args) 1255 answer = self.gateway_client.send_command(command) 1256 return_value = get_return_value( -> 1257 answer, self.gateway_client, self.target_id, self.name) 1258 1259 for temp_arg in temp_args: ~\Anaconda3\lib\site-packages\pyspark\sql\utils.py in deco(*a, **kw) 61 def deco(*a, **kw): 62 try: ---> 63 return f(*a, **kw) 64 except py4j.protocol.Py4JJavaError as e: 65 s = e.java_exception.toString() ~\Anaconda3\lib\site-packages\py4j\protocol.py in get_return_value(answer, gateway_client, target_id, name) 326 raise Py4JJavaError( 327 "An error occurred while calling {0}{1}{2}.\n". --> 328 format(target_id, ".", name), value) 329 else: 330 raise Py4JError( Py4JJavaError: An error occurred while calling o41.load. : java.sql.SQLException: [Amazon](500150) Error setting/closing connection: Connection timed out: connect. at com.amazon.redshift.client.PGClient.connect(Unknown Source) at com.amazon.redshift.client.PGClient.<init>(Unknown Source) at com.amazon.redshift.core.PGJDBCConnection.connect(Unknown Source) at com.amazon.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source) at com.amazon.jdbc.common.AbstractDriver.connect(Unknown Source) at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$1.apply(JdbcUtils.scala:63) at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$1.apply(JdbcUtils.scala:54) at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:56) at org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation.<init>(JDBCRelation.scala:115) at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:52) at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:340) at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:239) at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:227) at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:164) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.GatewayConnection.run(GatewayConnection.java:238) Caused by: com.amazon.support.exceptions.GeneralException: [Amazon](500150) Error setting/closing connection: Connection timed out: connect. ... 24 more Caused by: java.net.ConnectException: Connection timed out: connect at sun.nio.ch.Net.connect0(Native Method) at sun.nio.ch.Net.connect(Net.java:454) at sun.nio.ch.Net.connect(Net.java:446) at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:648) at sun.nio.ch.SocketAdaptor.connect(SocketAdaptor.java:96) at com.amazon.redshift.client.PGClient.connect(Unknown Source) at com.amazon.redshift.client.PGClient.<init>(Unknown Source) at com.amazon.redshift.core.PGJDBCConnection.connect(Unknown Source) at com.amazon.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source) at com.amazon.jdbc.common.AbstractDriver.connect(Unknown Source) at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$1.apply(JdbcUtils.scala:63) at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$1.apply(JdbcUtils.scala:54) at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:56) at org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation.<init>(JDBCRelation.scala:115) at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:52) at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:340) at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:239) at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:227) at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:164) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.GatewayConnection.run(GatewayConnection.java:238) at java.lang.Thread.run(Thread.java:748)
db.table_to_df("test_data").table()
# Put your db credentials here
db = op.connect(
driver="oracle",
host="165.227.196.70",
database= "optimus",
user= "testuser",
password = "test")
# Put your db credentials here
db = op.connect(
driver="sqlite",
host="chinook.db",
database= "employes",
user= "testuser",
password = "test")
INFO:optimus:jdbc:sqlite:chinook.db
db.tables()
INFO:optimus:(SELECT name FROM sqlite_master WHERE type='table') AS t INFO:optimus:jdbc:sqlite:chinook.db
name
1 (string)
nullable
|
---|
albums
|
sqlite_sequence
|
artists
|
customers
|
employees
|
genres
|
invoices
|
invoice_items
|
media_types
|
playlists
|
db.table_to_df("albums",limit="all").table()
INFO:optimus:(SELECT COUNT(*) as COUNT FROM albums) AS t INFO:optimus:jdbc:sqlite:chinook.db
347 rows
INFO:optimus:SELECT * FROM albums INFO:optimus:(SELECT * FROM albums) AS t INFO:optimus:jdbc:sqlite:chinook.db
AlbumId
1 (int)
nullable
|
Title
2 (string)
nullable
|
ArtistId
3 (int)
nullable
|
---|---|---|
1
|
For⋅Those⋅About⋅To⋅Rock⋅We⋅Salute⋅You
|
1
|
2
|
Balls⋅to⋅the⋅Wall
|
2
|
3
|
Restless⋅and⋅Wild
|
2
|
4
|
Let⋅There⋅Be⋅Rock
|
1
|
5
|
Big⋅Ones
|
3
|
6
|
Jagged⋅Little⋅Pill
|
4
|
7
|
Facelift
|
5
|
8
|
Warner⋅25⋅Anos
|
6
|
9
|
Plays⋅Metallica⋅By⋅Four⋅Cellos
|
7
|
10
|
Audioslave
|
8
|
db.tables_names_to_json()
INFO:optimus:(SELECT name FROM sqlite_master WHERE type='table') AS t INFO:optimus:jdbc:sqlite:chinook.db
['albums', 'sqlite_sequence', 'artists', 'customers', 'employees', 'genres', 'invoices', 'invoice_items', 'media_types', 'playlists', 'playlist_track', 'tracks', 'sqlite_stat1']
df = op.load.csv("https://raw.githubusercontent.com/ironmussa/Optimus/master/examples/data/foo.csv", sep=",", header='true', infer_schema='true', charset="UTF-8", null_value="None")
INFO:optimus:Downloading foo.csv from https://raw.githubusercontent.com/ironmussa/Optimus/master/examples/data/foo.csv INFO:optimus:Downloaded 967 bytes INFO:optimus:Creating DataFrame for foo.csv. Please wait...
df.table()
id
1 (int)
nullable
|
firstName
2 (string)
nullable
|
lastName
3 (string)
nullable
|
billingId
4 (int)
nullable
|
product
5 (string)
nullable
|
price
6 (int)
nullable
|
birth
7 (string)
nullable
|
dummyCol
8 (string)
nullable
|
---|---|---|---|---|---|---|---|
1
|
Luis
|
Alvarez$$%!
|
123
|
Cake
|
10
|
1980/07/07
|
never
|
2
|
André
|
Ampère
|
423
|
piza
|
8
|
1950/07/08
|
gonna
|
3
|
NiELS
|
Böhr//((%%
|
551
|
pizza
|
8
|
1990/07/09
|
give
|
4
|
PAUL
|
dirac$
|
521
|
pizza
|
8
|
1954/07/10
|
you
|
5
|
Albert
|
Einstein
|
634
|
pizza
|
8
|
1990/07/11
|
up
|
6
|
Galileo
|
⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅GALiLEI
|
672
|
arepa
|
5
|
1930/08/12
|
never
|
7
|
CaRL
|
Ga%%%uss
|
323
|
taco
|
3
|
1970/07/13
|
gonna
|
8
|
David
|
H$$$ilbert
|
624
|
taaaccoo
|
3
|
1950/07/14
|
let
|
9
|
Johannes
|
KEPLER
|
735
|
taco
|
3
|
1920/04/22
|
you
|
10
|
JaMES
|
M$$ax%%well
|
875
|
taco
|
3
|
1923/03/12
|
down
|
# Put your db credentials here
db = op.connect(
driver="redis",
host="165.227.196.70",
port = 6379,
database= 1,
password = "")
db.df_to_table(df, "hola1", redis_primary_key="id")
INFO:optimus:`id`,`firstName`,`lastName`,`billingId`,`product`,`price`,`birth`,`dummyCol` column(s) was not processed because is/are not array,vector INFO:optimus:Outputting 0 columns after filtering. Is this expected? INFO:optimus:Using 'column_exp' to process column 'id' with function _cast_to INFO:optimus:Using 'column_exp' to process column 'firstName' with function _cast_to INFO:optimus:Using 'column_exp' to process column 'lastName' with function _cast_to INFO:optimus:Using 'column_exp' to process column 'billingId' with function _cast_to INFO:optimus:Using 'column_exp' to process column 'product' with function _cast_to INFO:optimus:Using 'column_exp' to process column 'price' with function _cast_to INFO:optimus:Using 'column_exp' to process column 'birth' with function _cast_to INFO:optimus:Using 'column_exp' to process column 'dummyCol' with function _cast_to
hola1
# https://stackoverflow.com/questions/56707978/how-to-write-from-a-pyspark-dstream-to-redis
db.table_to_df(0)
--------------------------------------------------------------------------- TypeError Traceback (most recent call last) <ipython-input-33-b3e61395c772> in <module> 1 # https://stackoverflow.com/questions/56707978/how-to-write-from-a-pyspark-dstream-to-redis 2 ----> 3 db.table_to_df(0) ~\Documents\Optimus\optimus\io\jdbc.py in table_to_df(self, table_name, columns, limit) 122 123 db_table = table_name --> 124 query = self.driver_context.count_query(db_table=db_table) 125 if limit == "all": 126 count = self.execute(query, "all").first()[0] ~\Documents\Optimus\optimus\io\driver_context.py in count_query(self, *args, **kwargs) 31 32 def count_query(self, *args, **kwargs) -> str: ---> 33 return self._driver.count_query(*args, **kwargs) ~\Documents\Optimus\optimus\io\sqlserver.py in count_query(self, *args, **kwargs) 24 25 def count_query(self, *args, **kwargs) -> str: ---> 26 return "SELECT COUNT(*) as COUNT FROM " + kwargs["db_table"] TypeError: can only concatenate str (not "int") to str