Download Kqlmagic from PyPI and install/update (if latest version is already installed you can skip this step)
#!pip install Kqlmagic --no-cache-dir --upgrade
KQLMAGIC_NOTEBOOK_APP - set the notebook application (default jupyternotebook)
KQLMAGIC_LOAD_MODE - set the kqlmagic load mode (default verbose). If silent, it will load without welcome output
KQLMAGIC_CONFIGURATION - modify the default configuration, key=value pairs must be separated by a semicolon
KQLMAGIC_CONNECTION_STR - set the default connection string (default None). If set it will connect to it on Kqlmagic load
# you can try it within the notebook, by setting the environment variable using %env magic
#%env KQLMAGIC_NOTEBOOK_APP=jupyterlab
#%env KQLMAGIC_LOAD_MODE=silent
#%env KQLMAGIC_CONFIGURATION="show_query_time=False;plot_package='plotly';display_limit=100"
#%env KQLMAGIC_CONFIGURATION="show_init_banner=True;check_magic_version=False;show_what_new=False"
#%env KQLMAGIC_CONNECTION_STR=AzureDataExplorer://username='michabin@microsoft.com';cluster='help';database='Samples'
#%env KQLMAGIC_CONNECTION_STR=AzureDataExplorer://code;cluster='help';database='Samples'
#%env KQLMAGIC_LOG_LEVEL=DEBUG
#%env KQLMAGIC_LOG_FILE_MODE=Append
#%env KQLMAGIC_LOG_FILE=michael.log
#%env KQLMAGIC_LOG_FILE_PREFIX=myLog
#%env KQLMAGIC_DEVICE_CODE_NOTIFICATION_EMAIL=SMTPEndPoint='endpoint';SMTPPort='port';sendFrom='from';sendFromPassword='password';sendTo='to';context='text'
# For more information about configuration options, you can run the following command:
#%kql --config
%reload_ext Kqlmagic
%kql --help "help" -popup_window
%kql --help "AzureDataExplorer"
there are few options to authenticate with Azure Data Explorer (Kusto):
%kql azure_data-Explorer://code;cluster='<cluster-name>';database='<database-name>';alias='<database-friendly-name>'
%kql azure_data-Explorer://tenant='<tenant-id>';clientid='<aad-appid>';clientsecret='<aad-appkey>';cluster='<cluster-name>';database='<database-name>';alias='<database-friendly-name>'
%kql azure_data-Explorer://tenant='<tenant-id>';certificate='<certificate>';certificate_thumbprint='<thumbprint>';cluster='<cluster-name>';database='<database-name>';alias='<database-friendly-name>'
%kql azure_data-Explorer://tenant='<tenant-id>';certificate_pem_file='<pem_filename>';certificate_thumbprint='<thumbprint>';cluster='<cluster-name>';database='<database-name>';alias='<database-friendly-name>'
%kql azure_data-Explorer://username='<username>';password='<password>';cluster='<cluster-name>';database='<database-name>';alias='<database-friendly-name>'
%kql azureDataExplorer://anonymous;cluster='<cluster-name>';database='<database-name>';alias='<database-friendly-name>'
Notes:
(1) username/password works only on corporate network.
(2) alias is optional.
(3) if credentials are missing, and a previous connection was established the credentials will be inherited.
(4) if secret (password / clientsecret / thumbprint) is missing, user will be prompted to provide it.
(5) if cluster is missing, and a previous connection was established the cluster will be inherited.
(6) if tenant is missing, and a previous connection was established the tenant will be inherited.
(7) if only the database change, a new connection can be set as follow:
<new-database-name>@<cluster-name>
(8) a not quoted value, is a python expression, that is evaluated and its result is used as the value. This is how you can parametrize the connection string
(9) anonymous authentication, is NO authentication, for the case that your cluster is local.
%kql azureDataExplorer://code;cluster='help';database='Samples' // -try_azcli_login
%kql --help "conn" -popup_window
%kql StormEvents | summarize count() by State | sort by count_ | limit 10
_
df = _.to_dataframe()
df
df.head()
df.State
df.min()
df.to_json()
df.columns
_
_kql_raw_result_
_
%kql -sql -qld='Kusto.Explorer' StormEvents | summarize count() by State | sort by count_ | limit 10 | render piechart title='my apple pie'
%kql my_bar_chart << StormEvents | summarize count() by State | sort by count_ | limit 10 | render barchart title='my bar chart'
my_bar_chart
my_bar_chart.show_table()
my_bar_chart[1]
my_bar_chart[1][1:]
my_bar_chart[1]['State']
my_bar_chart[1:5]
storm_events = %kql StormEvents | project StartTime, State, StormSummary | limit 3
storm_events
storm_events[1:3]
storm_events[1]['StartTime']
storm_events[1]['State']
storm_events[1]['StormSummary']
storm_events[1]['StormSummary']['Details']
storm_events[1]['StormSummary']['Details']['Description']
df = storm_events.to_dataframe()
df
df['StormSummary'][0]['Details']['Description']
df['StartTime'][1]
%kql bar_chart << StormEvents | summarize count() by State | sort by count_ | limit 10 | render barchart title='my bar chart'
print(bar_chart)
%kql pie_chart << StormEvents | summarize count() by State | sort by count_ | limit 10 | render piechart title='my pie chart'
display(bar_chart)
display(pie_chart)
%%kql
StormEvents
| summarize count() by State
| sort by count_
| limit 10
| render columnchart title='my column chart'
_.show_table()
_.popup()
_kql_raw_result_.popup_table()
%%kql
StormEvents
| summarize count() by State
| sort by count_
| extend count2 = count_*count_
| limit 10
| render piechart title='my pie chart #2'
dict_of_columns = _kql_raw_result_.to_dict()
print(dict_of_columns)
_kql_raw_result_.to_csv('csv_file.csv')
#%cat csv_file.csv
_kql_raw_result_.query
_kql_raw_result_.title
_kql_raw_result_.visualization
_kql_raw_result_.records_count
_kql_raw_result_.columns_name
_kql_raw_result_.start_time
_kql_raw_result_.end_time
_kql_raw_result_.elapsed_timespan
_kql_raw_result_.connection
_kql_raw_result_.options
_kql_raw_result_.completion_query_info
_kql_raw_result_.completion_query_resource_consumption
_kql_raw_result_.completion_query_resource_consumption['resource_usage']['memory']['peak_per_node']
_kql_raw_result_.dataSetCompletion
_kql_raw_result_.raw_json
_kql_raw_result_.raw_json[0]['Version']
_kql_raw_result_.raw_json[1]['Columns']
_kql_raw_result_.raw_json[1]['Columns'][1]['ColumnType']
%%kql -c2lv
StormEvents
| summarize count() by State
| sort by count_
| limit 10
| render columnchart title='my column chart'
State
count_
%%kql -!feedback
StormEvents
| summarize count() by State
| sort by count_
| limit 10
| render barchart title='my bar chart'
%config Kqlmagic.feedback
%config Kqlmagic.feedback=False
%config Kqlmagic.feedback
%%kql
StormEvents
| summarize count() by State
| sort by count_
| extend count2 = count_*count_
| limit 5
%config Kqlmagic.feedback=True
%config Kqlmagic.feedback
%config Kqlmagic
%%kql -pw
let randn = () {rand() + rand() + rand() + rand() + rand() + rand() + rand() + rand() + rand() + rand() + rand() + rand() + rand() + rand()};
range c from 1 to 1000 step 1
| limit 1000
| extend e1 = randn()
| extend e2 = randn()
| extend e3 = randn()
| extend e4 = randn()
| extend e5 = randn()
| project c, e1,e2,e3,e4,e5
| render scatterchart title="Michael's chart"
%kql StormEvents | summarize count() by State | sort by count_ | extend count2 = count_*count_ | limit 0
%%kql
StormEvents
| summarize count() by State
| sort by count_
| extend count2 = count_ * count_
| limit 0
| render piechart title='my chart'
len(_kql_raw_result_)
_kql_raw_result_.popup()
_kql_raw_result_.show_table()
df = _kql_raw_result_.to_dataframe()
df
df.to_json()
_kql_raw_result_.raw_json
%%kql
StormEvents
| summarize count() by State
| sort by count_
| extend count2 = count_ * count_
| fork (limit 10) (project State | limit 5)
_kql_raw_result_.fork_result(1)
_kql_raw_result_.submit()
_kql_raw_result_.fork_result(1)
_kql_raw_result_.fork_result(1).refresh()
_kql_raw_result_.fork_result(0)
_kql_raw_result_