AWS SDK for C++

AWS SDK for C++ Version 1.11.633

Loading...
Searching...
No Matches
PromptModelInferenceConfiguration.h
1
6#pragma once
7#include <aws/bedrock-agent/BedrockAgent_EXPORTS.h>
8#include <aws/core/utils/memory/stl/AWSVector.h>
9#include <aws/core/utils/memory/stl/AWSString.h>
10#include <utility>
11
12namespace Aws
13{
14namespace Utils
15{
16namespace Json
17{
18 class JsonValue;
19 class JsonView;
20} // namespace Json
21} // namespace Utils
22namespace BedrockAgent
23{
24namespace Model
25{
26
36 {
37 public:
38 AWS_BEDROCKAGENT_API PromptModelInferenceConfiguration() = default;
41 AWS_BEDROCKAGENT_API Aws::Utils::Json::JsonValue Jsonize() const;
42
43
45
49 inline double GetTemperature() const { return m_temperature; }
50 inline bool TemperatureHasBeenSet() const { return m_temperatureHasBeenSet; }
51 inline void SetTemperature(double value) { m_temperatureHasBeenSet = true; m_temperature = value; }
52 inline PromptModelInferenceConfiguration& WithTemperature(double value) { SetTemperature(value); return *this;}
54
56
60 inline double GetTopP() const { return m_topP; }
61 inline bool TopPHasBeenSet() const { return m_topPHasBeenSet; }
62 inline void SetTopP(double value) { m_topPHasBeenSet = true; m_topP = value; }
63 inline PromptModelInferenceConfiguration& WithTopP(double value) { SetTopP(value); return *this;}
65
67
70 inline int GetMaxTokens() const { return m_maxTokens; }
71 inline bool MaxTokensHasBeenSet() const { return m_maxTokensHasBeenSet; }
72 inline void SetMaxTokens(int value) { m_maxTokensHasBeenSet = true; m_maxTokens = value; }
73 inline PromptModelInferenceConfiguration& WithMaxTokens(int value) { SetMaxTokens(value); return *this;}
75
77
81 inline const Aws::Vector<Aws::String>& GetStopSequences() const { return m_stopSequences; }
82 inline bool StopSequencesHasBeenSet() const { return m_stopSequencesHasBeenSet; }
83 template<typename StopSequencesT = Aws::Vector<Aws::String>>
84 void SetStopSequences(StopSequencesT&& value) { m_stopSequencesHasBeenSet = true; m_stopSequences = std::forward<StopSequencesT>(value); }
85 template<typename StopSequencesT = Aws::Vector<Aws::String>>
86 PromptModelInferenceConfiguration& WithStopSequences(StopSequencesT&& value) { SetStopSequences(std::forward<StopSequencesT>(value)); return *this;}
87 template<typename StopSequencesT = Aws::String>
88 PromptModelInferenceConfiguration& AddStopSequences(StopSequencesT&& value) { m_stopSequencesHasBeenSet = true; m_stopSequences.emplace_back(std::forward<StopSequencesT>(value)); return *this; }
90 private:
91
92 double m_temperature{0.0};
93 bool m_temperatureHasBeenSet = false;
94
95 double m_topP{0.0};
96 bool m_topPHasBeenSet = false;
97
98 int m_maxTokens{0};
99 bool m_maxTokensHasBeenSet = false;
100
101 Aws::Vector<Aws::String> m_stopSequences;
102 bool m_stopSequencesHasBeenSet = false;
103 };
104
105} // namespace Model
106} // namespace BedrockAgent
107} // namespace Aws
AWS_BEDROCKAGENT_API PromptModelInferenceConfiguration()=default
PromptModelInferenceConfiguration & WithStopSequences(StopSequencesT &&value)
PromptModelInferenceConfiguration & WithTemperature(double value)
PromptModelInferenceConfiguration & AddStopSequences(StopSequencesT &&value)
AWS_BEDROCKAGENT_API PromptModelInferenceConfiguration & operator=(Aws::Utils::Json::JsonView jsonValue)
AWS_BEDROCKAGENT_API PromptModelInferenceConfiguration(Aws::Utils::Json::JsonView jsonValue)
AWS_BEDROCKAGENT_API Aws::Utils::Json::JsonValue Jsonize() const
std::vector< T, Aws::Allocator< T > > Vector
Aws::Utils::Json::JsonValue JsonValue