#!/usr/bin/env python # coding: utf-8 # In[1]: get_ipython().run_line_magic('load_ext', 'autoreload') get_ipython().run_line_magic('autoreload', '2') import sys sys.path.append("..") from optimus import Optimus # Create optimus op = Optimus(master="local", app_name= "optimus", verbose = True) # # Mysql # In[2]: # Put your db credentials here db = op.connect( driver="mysql", host="165.227.196.70", database= "optimus", user= "test", password = "test") # In[3]: db.tables() # In[4]: # db.execute("SHOW KEYS FROM test_data WHERE key_name = 'PRIMARY'") # In[5]: db.execute("SELECT * FROM test_data").ext.display() # In[6]: db.execute("SELECT * FROM test_data", partition_column ="id", table_name = "test_data").ext.display() # In[7]: db.table_to_df("test_data", partition_column ="id").ext.display() # In[8]: df = db.table_to_df("test_data", limit=None) # In[9]: db.tables_names_to_json() # # Postgres # In[10]: # Put your db credentials here db = op.connect( driver="postgresql", host="165.227.196.70", database= "optimus", user= "testuser", password = "test") # In[ ]: db.tables() # In[ ]: db.table_to_df("test_data").table() # In[69]: db.tables_names_to_json() # ## MSSQL # In[70]: # Put your db credentials here db = op.connect( driver="sqlserver", host="165.227.196.70", database= "optimus", user= "test", password = "test*0261") # In[71]: db.tables() # In[72]: db.table_to_df("test_data").table() # In[73]: db.tables_names_to_json() # ## Redshit # In[2]: # Put your db credentials here db = op.connect( driver="redshift", host="165.227.196.70", database= "optimus", user= "testuser", password = "test") # In[3]: db.tables() # In[ ]: db.table_to_df("test_data").table() # ## Oracle # In[ ]: # Put your db credentials here db = op.connect( driver="oracle", host="165.227.196.70", database= "optimus", user= "testuser", password = "test") # ## SQLlite # In[53]: # Put your db credentials here db = op.connect( driver="sqlite", host="chinook.db", database= "employes", user= "testuser", password = "test") # In[54]: db.tables() # In[55]: db.table_to_df("albums",limit="all").table() # In[60]: db.tables_names_to_json() # ## Redis # In[2]: df = op.load.csv("https://raw.githubusercontent.com/ironmussa/Optimus/master/examples/data/foo.csv", sep=",", header='true', infer_schema='true', charset="UTF-8", null_value="None") # In[3]: df.table() # In[30]: # Put your db credentials here db = op.connect( driver="redis", host="165.227.196.70", port = 6379, database= 1, password = "") # In[32]: db.df_to_table(df, "hola1", redis_primary_key="id") # In[33]: # https://stackoverflow.com/questions/56707978/how-to-write-from-a-pyspark-dstream-to-redis db.table_to_df(0) # In[ ]: