# !pip install geemap scikit-learn import ee import geemap import pandas as pd from geemap import ml from sklearn import ensemble geemap.ee_initialize() # read the feature table to train our RandomForest model # data taken from ee.FeatureCollection('GOOGLE/EE/DEMOS/demo_landcover_labels') url = "https://raw.githubusercontent.com/gee-community/geemap/master/examples/data/rf_example.csv" df = pd.read_csv(url) df # specify the names of the features (i.e. band names) and label # feature names used to extract out features and define what bands feature_names = ["B2", "B3", "B4", "B5", "B6", "B7"] label = "landcover" # get the features and labels into separate variables X = df[feature_names] y = df[label] # create a classifier and fit n_trees = 10 rf = ensemble.RandomForestClassifier(n_trees).fit(X, y) # convert the estimator into a list of strings # this function also works with the ensemble.ExtraTrees estimator trees = ml.rf_to_strings(rf, feature_names) # print the first tree to see the result print(trees[0]) print(trees[1]) # number of trees we converted should equal the number of trees we defined for the model len(trees) == n_trees # create a ee classifier to use with ee objects from the trees ee_classifier = ml.strings_to_classifier(trees) # ee_classifier.getInfo() # Make a cloud-free Landsat 8 TOA composite (from raw imagery). l8 = ee.ImageCollection("LANDSAT/LC08/C01/T1") image = ee.Algorithms.Landsat.simpleComposite( collection=l8.filterDate("2018-01-01", "2018-12-31"), asFloat=True ) # classify the image using the classifier we created from the local training # note: here we select the feature_names from the image that way the classifier knows which bands to use classified = image.select(feature_names).classify(ee_classifier) # display results Map = geemap.Map(center=(37.75, -122.25), zoom=11) Map.addLayer( image, {"bands": ["B7", "B5", "B3"], "min": 0.05, "max": 0.55, "gamma": 1.5}, "image", ) Map.addLayer( classified, {"min": 0, "max": 2, "palette": ["red", "green", "blue"]}, "classification", ) Map user_id = geemap.ee_user_id() user_id # specify asset id where to save trees # be sure to change to your ee user name asset_id = user_id + "/random_forest_strings_test" asset_id # kick off an export process so it will be saved to the ee asset ml.export_trees_to_fc(trees, asset_id) # this will kick off an export task, so wait a few minutes before moving on # read the exported tree feature collection rf_fc = ee.FeatureCollection(asset_id) # convert it to a classifier, very similar to the `ml.trees_to_classifier` function another_classifier = ml.fc_to_classifier(rf_fc) # classify the image again but with the classifier from the persisted trees classified = image.select(feature_names).classify(another_classifier) # display results # we should get the exact same results as before Map = geemap.Map(center=(37.75, -122.25), zoom=11) Map.addLayer( image, {"bands": ["B7", "B5", "B3"], "min": 0.05, "max": 0.55, "gamma": 1.5}, "image", ) Map.addLayer( classified, {"min": 0, "max": 2, "palette": ["red", "green", "blue"]}, "classification", ) Map import os out_csv = os.path.expanduser("~/Downloads/trees.csv") ml.trees_to_csv(trees, out_csv) another_classifier = ml.csv_to_classifier(out_csv) classified = image.select(feature_names).classify(another_classifier) # display results # we should get the exact same results as before Map = geemap.Map(center=(37.75, -122.25), zoom=11) Map.addLayer( image, {"bands": ["B7", "B5", "B3"], "min": 0.05, "max": 0.55, "gamma": 1.5}, "image", ) Map.addLayer( classified, {"min": 0, "max": 2, "palette": ["red", "green", "blue"]}, "classification", ) Map