kaggle==1.3.9 protobuf==3.20.*
# TODO: Add a hub.KerasLayer for BERT text preprocessing using the hparams dict.
# Name the layer 'preprocessing' and store in the variable 'preprocessor'.
preprocessor = hub.KerasLayer(
hparams['tfhub-bert-preprocessor'],
name='preprocessing'
)
# TODO: Add a trainable hub.KerasLayer for BERT text encoding using the hparams dict.
# Name the layer 'BERT_encoder' and store in the variable 'encoder'.
encoder = hub.KerasLayer(
hparams['tfhub-bert-encoder'],
trainable=True,
name='BERT_encoder'
)
HPARAMS.update({
# TODO: Save your BERT sentiment classifier locally in the form of <key>:<path to save the model>.
# Hint: You can use the key as 'model-dir' and save it to './bert-sentiment-classifier-local'.
'model-dir': './bert-sentiment-classifier-local',
})
# TODO: create a Docker Artifact Registry using the gcloud CLI. Note the required 'repository-format', 'location' and 'description' flags while creating the Artifact Registry.
# Documentation link: https://cloud.google.com/sdk/gcloud/reference/artifacts/repositories/create
!gcloud artifacts repositories create "{ARTIFACT_REGISTRY}" --repository-format=docker --location="{REGION}" --description="Repository for custom Docker images"
# TODO: use Cloud Build to build and submit your custom model container to your Artifact Registry.
# Documentation link: https://cloud.google.com/sdk/gcloud/reference/builds/submit
# Hint: make sure the config flag is pointed at `{MODEL_DIR}/cloudbuild.yaml` defined above and you include your model directory as {MODEL_DIR}. Also, add a timeout flag.
!gcloud builds submit --config {MODEL_DIR}/cloudbuild.yaml --timeout=60m {MODEL_DIR}
# TODO: fill in the remaining arguments from the pipeline definition.
display_name=display_name,
container_uri=container_uri,
model_serving_container_image_uri=model_serving_container_image_uri,
base_output_dir=base_output_dir,
#TODO: Generate online predictions using your Vertex Endpoint.
#Hint: You need to add the following variables: endpoint_name, project, location, with their required values.
endpoint = vertexai.Endpoint(
endpoint_name=ENDPOINT_NAME,
project_id=PROJECT_ID, # Replace with your project ID
location=REGION # Replace with your endpoint's region
)
#TODO: write a movie review to test your model e.g. "The Dark Knight is the best Batman movie!"
test_review = "The Dark Knight is the best Batman movie!"
# TODO: use your Endpoint to return prediction for your 'test_review' using 'endpoint.predict()' method.
prediction = endpoint.predict(instances=[{"content": test_review}])