ONNXPredict using GLM model.¶
Setup¶
In [ ]:
# Import required libraries.
import tempfile
import getpass
from teradataml import DataFrame, load_example_data, create_context, \
db_drop_table, remove_context, save_byom, retrieve_byom, delete_byom, list_byom
from teradataml.options.configure import configure
In [ ]:
# Create the connection.
host = getpass.getpass("Host: ")
username = getpass.getpass("Username: ")
password = getpass.getpass("Password: ")
con = create_context(host=host, username=username, password=password)
Load example data and use sample() for splitting input data into testing and training dataset.¶
In [ ]:
# Load example data.
load_example_data("byom", "iris_input")
In [ ]:
iris_input = DataFrame("iris_input")
In [ ]:
# Create 2 samples of input data - sample 1 will have 80% of total rows and sample 2 will have 20% of total rows.
iris_sample = iris_input.sample(frac=[0.8, 0.2])
iris_sample
Out[ ]:
id | sepal_length | sepal_width | petal_length | petal_width | species | sampleid |
---|---|---|---|---|---|---|
78 | 6.7 | 3.0 | 5.0 | 1.7 | 2 | 2 |
141 | 6.7 | 3.1 | 5.6 | 2.4 | 3 | 1 |
17 | 5.4 | 3.9 | 1.3 | 0.4 | 1 | 1 |
40 | 5.1 | 3.4 | 1.5 | 0.2 | 1 | 2 |
120 | 6.0 | 2.2 | 5.0 | 1.5 | 3 | 1 |
122 | 5.6 | 2.8 | 4.9 | 2.0 | 3 | 1 |
19 | 5.7 | 3.8 | 1.7 | 0.3 | 1 | 1 |
59 | 6.6 | 2.9 | 4.6 | 1.3 | 2 | 1 |
80 | 5.7 | 2.6 | 3.5 | 1.0 | 2 | 1 |
101 | 6.3 | 3.3 | 6.0 | 2.5 | 3 | 1 |
In [ ]:
# Create train dataset from sample 1 by filtering on "sampleid" and drop "sampleid" column as it is not required for training model.
iris_train = iris_sample[iris_sample.sampleid == "1"].drop("sampleid", axis = 1)
iris_train
Out[ ]:
id | sepal_length | sepal_width | petal_length | petal_width | species |
---|---|---|---|---|---|
76 | 6.6 | 3.0 | 4.4 | 1.4 | 2 |
141 | 6.7 | 3.1 | 5.6 | 2.4 | 3 |
17 | 5.4 | 3.9 | 1.3 | 0.4 | 1 |
40 | 5.1 | 3.4 | 1.5 | 0.2 | 1 |
120 | 6.0 | 2.2 | 5.0 | 1.5 | 3 |
19 | 5.7 | 3.8 | 1.7 | 0.3 | 1 |
99 | 5.1 | 2.5 | 3.0 | 1.1 | 2 |
36 | 5.0 | 3.2 | 1.2 | 0.2 | 1 |
80 | 5.7 | 2.6 | 3.5 | 1.0 | 2 |
101 | 6.3 | 3.3 | 6.0 | 2.5 | 3 |
In [ ]:
# Create test dataset from sample 2 by filtering on "sampleid" and drop "sampleid" column as it is not required for scoring.
iris_test = iris_sample[iris_sample.sampleid == "2"].drop("sampleid", axis = 1)
iris_test
Out[ ]:
id | sepal_length | sepal_width | petal_length | petal_width | species |
---|---|---|---|---|---|
1 | 5.1 | 3.5 | 1.4 | 0.2 | 1 |
89 | 5.6 | 3.0 | 4.1 | 1.3 | 2 |
106 | 7.6 | 3.0 | 6.6 | 2.1 | 3 |
57 | 6.3 | 3.3 | 4.7 | 1.6 | 2 |
135 | 6.1 | 2.6 | 5.6 | 1.4 | 3 |
59 | 6.6 | 2.9 | 4.6 | 1.3 | 2 |
99 | 5.1 | 2.5 | 3.0 | 1.1 | 2 |
36 | 5.0 | 3.2 | 1.2 | 0.2 | 1 |
118 | 7.7 | 3.8 | 6.7 | 2.2 | 3 |
13 | 4.8 | 3.0 | 1.4 | 0.1 | 1 |
Prepare dataset for creating a GLM model.¶
In [ ]:
# Convert teradataml dataframe to pandas dataframe.
# features : Training data.
# target : Training targets.
train_pd = iris_train.to_pandas()
features = train_pd.columns.drop('species')
target = 'species'
Train Model.¶
In [ ]:
# Import required libraries.
import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.linear_model import LogisticRegression
from sklearn.preprocessing import StandardScaler
In [ ]:
# Generate the GLM model.
glm_pipe_obj = Pipeline([
('scaler', StandardScaler()),
("glm", LogisticRegression(random_state=0, solver="newton-cg", C=2.0))
])
In [ ]:
glm_pipe_obj.fit(train_pd[features], train_pd[target])
Out[ ]:
Pipeline(steps=[('scaler', StandardScaler()), ('glm', LogisticRegression(C=2.0, random_state=0, solver='newton-cg'))])In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
Pipeline(steps=[('scaler', StandardScaler()), ('glm', LogisticRegression(C=2.0, random_state=0, solver='newton-cg'))])
StandardScaler()
LogisticRegression(C=2.0, random_state=0, solver='newton-cg')
Save the model in ONNX format.¶
In [ ]:
# Import required libraries.
from skl2onnx import to_onnx
In [ ]:
# Create temporary filepath to save model.
temp_dir = tempfile.TemporaryDirectory()
model_file_path = f"{temp_dir.name}/iris_db_glm_model.onnx"
In [ ]:
onx = to_onnx(glm_pipe_obj, train_pd.iloc[:,:4].astype(np.float32))
In [ ]:
with open(model_file_path, "wb") as f:
f.write(onx.SerializeToString())
Save the model in Vantage.¶
In [ ]:
# Save the ONNX model in Vantage.
save_byom("onnx_glm_iris", model_file_path, "byom_models")
Created the model table 'byom_models' as it does not exist. Model is saved.
In [ ]:
# List the ONNX models in Vantage.
list_byom("byom_models")
model model_id onnx_glm_iris b'8081208736B6C326F...'
Retrieve the model from Vantage.¶
In [ ]:
# Retrieve the model from table "byom_models", using the model id 'onnx_glm_iris'.
modeldata = retrieve_byom("onnx_glm_iris", "byom_models")
In [ ]:
configure.byom_install_location = getpass.getpass("byom_install_location: ")
Score the model.¶
In [ ]:
# Import required libraries
from teradataml import ONNXPredict
In [ ]:
# Perform prediction using ONNXPredict() and the ONNX model stored in Vantage.
predict_output = ONNXPredict(
modeldata = modeldata,
newdata = iris_test,
accumulate = ['id', 'sepal_length', 'petal_length'],
overwrite_cached_models = '*',
model_output_fields = "output_label"
)
In [ ]:
# Print the query.
print(predict_output.show_query())
SELECT * FROM "mldb".ONNXPredict( ON "MLDB"."ml__select__1666872686152360" AS InputTable PARTITION BY ANY ON (select model_id,model from "MLDB"."ml__filter__1666872276184695") AS ModelTable DIMENSION USING Accumulate('id','sepal_length','petal_length') ModelOutputFields('output_label') OverwriteCachedModel('*') ) as sqlmr
In [ ]:
# Print the result.
predict_output.result
Out[ ]:
id | sepal_length | petal_length | output_label |
---|---|---|---|
18 | 5.1 | 1.4 | [1] |
17 | 5.4 | 1.3 | [1] |
34 | 5.5 | 1.4 | [1] |
80 | 5.7 | 3.5 | [2] |
35 | 4.9 | 1.5 | [1] |
99 | 5.1 | 3.0 | [2] |
97 | 5.7 | 4.2 | [2] |
125 | 6.7 | 5.7 | [3] |
112 | 6.4 | 5.3 | [3] |
141 | 6.7 | 5.6 | [3] |
Cleanup.¶
In [ ]:
# Drop input data tables.
# Delete the model from table "byom_models", using the model id 'onnx_glm_iris'.
delete_byom("onnx_glm_iris", "byom_models")
Model is deleted.
In [ ]:
db_drop_table("byom_models")
Out[ ]:
True
In [ ]:
db_drop_table("iris_input")
Out[ ]:
True
In [ ]:
remove_context()
Out[ ]:
True