#!/usr/bin/env python # coding: utf-8 # >### 🚩 *Create a free WhyLabs account to get more value out of whylogs!*
# >*Did you know you can store, visualize, and monitor whylogs profiles with the [WhyLabs Observability Platform](https://whylabs.ai/whylogs-free-signup?utm_source=whylogs-Github&utm_medium=whylogs-example&utm_campaign=Writing_Profiles)? Sign up for a [free WhyLabs account](https://whylabs.ai/whylogs-free-signup?utm_source=whylogs-Github&utm_medium=whylogs-example&utm_campaign=Writing_Profiles) to leverage the power of whylogs and WhyLabs together!* # # Writing profiles - Local/S3 # # [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/whylabs/whylogs/blob/mainline/python/examples/integrations/writers/Writing_Profiles.ipynb) # # Hello there! If you've come to this tutorial, perhaps you are wondering what can you do after you generated your first (or maybe not the first) profile. Well, a good practice is to store these profiles as *lightweight* files, which is one of the cool features `whylogs` brings to the table. # # Here we will check different flavors of writing, and you can check which one of these will meet your current needs. Shall we? # ## Installing whylogs # # Let's first install whylogs, if you don't have it installed already: # In[ ]: # Note: you may need to restart the kernel to use updated packages. import shutil get_ipython().run_line_magic('pip', 'install whylogs') # ## Creating simple profiles # # In order for us to get started, let's take a very simple example dataset and profile it. # In[ ]: import pandas as pd data = { "col_1": [1.0, 2.2, 0.1, 1.2], "col_2": ["some", "text", "column", "example"], "col_3": [4, 2, 3, 5] } df = pd.DataFrame(data) # In[ ]: df.head() # In[ ]: import whylogs as why profile_results = why.log(df) # In[ ]: type(profile_results) # And now we can check its collected metrics by transforming it into a `DatasetProfileView` # In[ ]: profile_view = profile_results.view() profile_view.to_pandas() # Cool! So now that we have a proper profile created, let's see how we can persist is as a file. # ## Local writer # # The first and most straight-forward way of persisting a `whylogs` profile as a file is to write it directly to your disk. Our API makes it possible with the following commands. You could either write it from the `ProfileResultSet` # In[ ]: profile_results.writer("local").write(dest="my_profile.bin") # If you want, you can also not specify a `dest`, but an optional `base_dir`, which will write your profile with its timestamp to this base directory you want. Let's see how: # In[ ]: import os os.makedirs("my_directory",exist_ok=True) # In[ ]: profile_results.writer("local").option(base_dir="my_directory").write() # Or from the `DatasetProfileView` directly, with a `path` # In[ ]: profile_view.write("my_profile.bin") # And if it couldn't get any more convenient, you can also use the **same logic for logging** to also write your profile, like: # In[ ]: why.write(profile=profile_view, base_dir="my_directory", filename="my_profile.bin") # In[ ]: import os os.listdir("./my_directory") # And that's it! Now you can go ahead and decide where and how to store these profiles, for further inspection and guaranteeing your data and ML model pipelines are generating useful and quality data for your end users. Let's delete those files not to clutter our environment then :) # In[ ]: os.remove("./my_profile.bin") shutil.rmtree("./my_directory") # ## s3 Writer # # From an enterprise perspective, it can be interesting to use `s3` buckets to store your profiles instead of manually deciding what to do with them from your local machine. And that is why we have created an integration to do just that! # # To try to maintain this example simple enough, we won't use an actual cloud-based storage, but we will mock one with the `moto` library. This way, you can test this anywhere without worrying about credentials too much :) In order to keep `whylogs` as light as possible, and allow users to extend as they need, we have made `s3` an extra dependency. # # So let's get started by creating this mocked `s3` bucket with the `moto` package. # # P.S.: if you haven't installed the `whylogs[s3]` extra dependency already, uncomment and run the cell below. # In[ ]: get_ipython().run_line_magic('pip', "install -q 'whylogs[s3]' moto") # In[ ]: import boto3 from moto import mock_s3 from moto.s3.responses import DEFAULT_REGION_NAME BUCKET_NAME = "my_great_bucket" mocks3 = mock_s3() mocks3.start() resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME) resource.create_bucket(Bucket=BUCKET_NAME) # Now that we have created our `s3` bucket we will already be able to communicate with the mocked storage object. A good practice here is to declare your access credentials as environment variables. For a production setting, this won't be persisted into code, but this will give you a sense of how to safely use our `s3` writer. # In[ ]: import os os.environ["AWS_ACCESS_KEY_ID"] = "my_key_id" os.environ["AWS_SECRET_ACCESS_KEY"] = "my_access_key" # In[ ]: profile_results.writer("s3").option(bucket_name=BUCKET_NAME).write() # And you've done it! Seems too good to be true. How would I know if the profiles are there? 🤔 Well, let's investigate them. # In[ ]: s3_client = boto3.client("s3") objects = s3_client.list_objects(Bucket=BUCKET_NAME) # In[ ]: objects.get("Name", []) # In[ ]: objects.get("Contents", []) # And there we have it, our local `s3` bucket has our profile written! # If we want to put our profile into a special "directory" - often referred to as prefix - we can do the following instead: # In[ ]: profile_results.writer("s3").option( bucket_name=BUCKET_NAME, object_name=f"my_prefix/somewhere/profile_{profile_view.creation_timestamp}.bin" ).write() # In[ ]: objects = s3_client.list_objects(Bucket=BUCKET_NAME) objects.get("Contents", []) # ### Wrapping up the s3 objects # # Now let's close our connection to our mocked `s3` object. # In[ ]: mocks3.stop() # And that's it, you have just written a profile to an s3 bucket! # ## GCS Writer # # We will do the same exercise as before to demonstrate how we can upload profiles to a GCS bucket and then verify if they landed there. For that we will use the # [GCP storage emulator library](https://github.com/oittaa/gcp-storage-emulator) to create a local endpoint. # In[ ]: get_ipython().run_line_magic('pip', "install -q gcp-storage-emulator 'whylogs[gcs]'") # In[ ]: import os from google.cloud import storage # type: ignore from gcp_storage_emulator.server import create_server # In[ ]: import random import socket def find_free_port(preferred_port, min_port, max_port): def is_port_free(port): with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: try: s.bind(('localhost', port)) return True except OSError: return False if is_port_free(preferred_port): return preferred_port else: while True: port = random.randint(min_port, max_port) if is_port_free(port): return port HOST = "localhost" PORT = find_free_port(9023, 9000, 9100) GCS_BUCKET = "test-bucket" server = create_server(HOST, PORT, in_memory=True, default_bucket=GCS_BUCKET) server.start() os.environ["STORAGE_EMULATOR_HOST"] = f"http://{HOST}:{PORT}" client = storage.Client() bucket = client.bucket(GCS_BUCKET) for blob in bucket.list_blobs(): content = blob.download_as_bytes() print(f"Blob [{blob.name}]: {content}") # And this is empty, because we have just created our test bucket :) # In[ ]: from whylogs.api.writer.gcs import GCSWriter os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "path/to/credentials.json" writer = GCSWriter() writer.option(bucket_name=GCS_BUCKET, object_name="my_object.bin").write(file=profile_view) # In[ ]: for blob in bucket.list_blobs(): content = blob.download_as_bytes() print(f"Blob [{blob.name}]") # In[ ]: server.stop() # If you want to check other integrations that we've made, please make sure to check out our [other examples](https://github.com/whylabs/whylogs/tree/mainline/python/examples) page. # # Hopefully this tutorial will help you get started to save your profiles and make sure to keep your Data and ML Pipelines always Robust and Responsible :)