-

Terjemahan disediakan oleh mesin penerjemah. Jika konten terjemahan yang diberikan bertentangan dengan versi bahasa Inggris aslinya, utamakan versi bahasa Inggris.

penting

Untuk informasi selengkapnya, lihat .

catatan

import boto3 import sagemaker from sagemaker.session import Session sagemaker_session = sagemaker.Session() region = sagemaker_session.boto_region_name boto_session = boto3.Session(region_name=region) role = sagemaker.get_execution_role() default_bucket = sagemaker_session.default_bucket() prefix = 'sagemaker-featurestore' offline_feature_store_bucket = 's3://{}/{}'.format(default_bucket, prefix) sagemaker_client = boto_session.client(service_name='sagemaker', region_name=region) featurestore_runtime = boto_session.client(service_name='sagemaker-featurestore-runtime', region_name=region) feature_store_session = Session(     boto_session=boto_session,     sagemaker_client=sagemaker_client,     sagemaker_featurestore_runtime_client=featurestore_runtime )

import numpy as np import pandas as pd import matplotlib.pyplot as plt import io s3_client = boto3.client(service_name='s3', region_name=region) fraud_detection_bucket_name = 'sagemaker-featurestore-fraud-detection' identity_file_key = 'sampled_identity.csv' transaction_file_key = 'sampled_transactions.csv' identity_data_object = s3_client.get_object(Bucket=fraud_detection_bucket_name, Key=identity_file_key) transaction_data_object = s3_client.get_object(Bucket=fraud_detection_bucket_name, Key=transaction_file_key) identity_data = pd.read_csv(io.BytesIO(identity_data_object['Body'].read())) transaction_data = pd.read_csv(io.BytesIO(transaction_data_object['Body'].read())) identity_data = identity_data.round(5) transaction_data = transaction_data.round(5) identity_data = identity_data.fillna(0) transaction_data = transaction_data.fillna(0) # Feature transformations for this dataset are applied before ingestion into FeatureStore. # One hot encode card4, card6 encoded_card_bank = pd.get_dummies(transaction_data['card4'], prefix = 'card_bank') encoded_card_type = pd.get_dummies(transaction_data['card6'], prefix = 'card_type') transformed_transaction_data = pd.concat([transaction_data, encoded_card_type, encoded_card_bank], axis=1) transformed_transaction_data = transformed_transaction_data.rename(columns={"card_bank_american express": "card_bank_american_express"})

from sagemaker.feature_store.feature_group import FeatureGroup feature_group_name = "some string for a name" feature_group = FeatureGroup(name=feature_group_name, sagemaker_session=feature_store_session)

import time from time import gmtime, strftime, sleep from sagemaker.feature_store.feature_group import FeatureGroup identity_feature_group_name = 'identity-feature-group-' + strftime('%d-%H-%M-%S', gmtime()) transaction_feature_group_name = 'transaction-feature-group-' + strftime('%d-%H-%M-%S', gmtime()) identity_feature_group = FeatureGroup(name=identity_feature_group_name, sagemaker_session=feature_store_session) transaction_feature_group = FeatureGroup(name=transaction_feature_group_name, sagemaker_session=feature_store_session)

record_identifier_name = "TransactionID" event_time_feature_name = "EventTime" current_time_sec = int(round(time.time())) identity_data[event_time_feature_name] = pd.Series([current_time_sec]*len(identity_data), dtype="float64") transformed_transaction_data[event_time_feature_name] = pd.Series([current_time_sec]*len(transaction_data), dtype="float64")

identity_feature_group.load_feature_definitions(data_frame=identity_data); # output is suppressed transaction_feature_group.load_feature_definitions(data_frame=transformed_transaction_data); # output is suppressed

# create a FeatureGroup feature_group.create(     description = "Some info about the feature group",     feature_group_name = feature_group_name,     record_identifier_name = record_identifier_name,     event_time_feature_name = event_time_feature_name,     feature_definitions = feature_definitions,     role_arn = role,     s3_uri = offline_feature_store_bucket,     enable_online_store = True,     online_store_kms_key_id = None,     offline_store_kms_key_id = None,     disable_glue_table_creation = False,     data_catalog_config = None,     tags = ["tag1","tag2"])

identity_feature_group.create(     s3_uri=offline_feature_store_bucket,     record_identifier_name=record_identifier_name,     event_time_feature_name=event_time_feature_name,     role_arn=role,     enable_online_store=True ) transaction_feature_group.create(     s3_uri=offline_feature_store_bucket,     record_identifier_name=record_identifier_name,     event_time_feature_name=event_time_feature_name,     role_arn=role,     enable_online_store=True )

status = feature_group.describe().get("FeatureGroupStatus")

Topik

feature_group.describe()

sagemaker_client.list_feature_groups()

feature_group.ingest(     data_frame=feature_data, max_workers=3, wait=True )

record_identifier_value = str(2990130) featurestore_runtime.get_record(FeatureGroupName=transaction_feature_group_name, RecordIdentifierValueAsString=record_identifier_value)

... 'Record': [{'FeatureName': 'TransactionID', 'ValueAsString': '2990130'},   {'FeatureName': 'isFraud', 'ValueAsString': '0'},   {'FeatureName': 'TransactionDT', 'ValueAsString': '152647'},   {'FeatureName': 'TransactionAmt', 'ValueAsString': '75.0'},   {'FeatureName': 'ProductCD', 'ValueAsString': 'H'},   {'FeatureName': 'card1', 'ValueAsString': '4577'}, ...

print(feature_group.as_hive_ddl())

Contoh output:

CREATE EXTERNAL TABLE IF NOT EXISTS sagemaker_featurestore.identity-feature-group-27-19-33-00 (   TransactionID INT   id_01 FLOAT   id_02 FLOAT   id_03 FLOAT   id_04 FLOAT  ...

identity_query = identity_feature_group.athena_query() transaction_query = transaction_feature_group.athena_query() identity_table = identity_query.table_name transaction_table = transaction_query.table_name

# Athena query query_string = 'SELECT * FROM "'+transaction_table+'" LEFT JOIN "'+identity_table+'" ON "'+transaction_table+'".transactionid = "'+identity_table+'".transactionid' # run Athena query. The output is loaded to a Pandas dataframe. dataset = pd.DataFrame() identity_query.run(query_string=query_string, output_location='s3://'+default_s3_bucket_name+'/query_results/') identity_query.wait() dataset = identity_query.as_dataframe()

feature_group.delete()

identity_feature_group.delete() transaction_feature_group.delete()