AWS SDK for C++

AWS SDK for C++ Version 1.11.610

Loading...
Searching...
No Matches
TextInferenceConfig.h
1
6#pragma once
7#include <aws/bedrock/Bedrock_EXPORTS.h>
8#include <aws/core/utils/memory/stl/AWSVector.h>
9#include <aws/core/utils/memory/stl/AWSString.h>
10#include <utility>
11
12namespace Aws
13{
14namespace Utils
15{
16namespace Json
17{
18 class JsonValue;
19 class JsonView;
20} // namespace Json
21} // namespace Utils
22namespace Bedrock
23{
24namespace Model
25{
26
34 {
35 public:
36 AWS_BEDROCK_API TextInferenceConfig() = default;
39 AWS_BEDROCK_API Aws::Utils::Json::JsonValue Jsonize() const;
40
41
43
50 inline double GetTemperature() const { return m_temperature; }
51 inline bool TemperatureHasBeenSet() const { return m_temperatureHasBeenSet; }
52 inline void SetTemperature(double value) { m_temperatureHasBeenSet = true; m_temperature = value; }
53 inline TextInferenceConfig& WithTemperature(double value) { SetTemperature(value); return *this;}
55
57
62 inline double GetTopP() const { return m_topP; }
63 inline bool TopPHasBeenSet() const { return m_topPHasBeenSet; }
64 inline void SetTopP(double value) { m_topPHasBeenSet = true; m_topP = value; }
65 inline TextInferenceConfig& WithTopP(double value) { SetTopP(value); return *this;}
67
69
75 inline int GetMaxTokens() const { return m_maxTokens; }
76 inline bool MaxTokensHasBeenSet() const { return m_maxTokensHasBeenSet; }
77 inline void SetMaxTokens(int value) { m_maxTokensHasBeenSet = true; m_maxTokens = value; }
78 inline TextInferenceConfig& WithMaxTokens(int value) { SetMaxTokens(value); return *this;}
80
82
88 inline const Aws::Vector<Aws::String>& GetStopSequences() const { return m_stopSequences; }
89 inline bool StopSequencesHasBeenSet() const { return m_stopSequencesHasBeenSet; }
90 template<typename StopSequencesT = Aws::Vector<Aws::String>>
91 void SetStopSequences(StopSequencesT&& value) { m_stopSequencesHasBeenSet = true; m_stopSequences = std::forward<StopSequencesT>(value); }
92 template<typename StopSequencesT = Aws::Vector<Aws::String>>
93 TextInferenceConfig& WithStopSequences(StopSequencesT&& value) { SetStopSequences(std::forward<StopSequencesT>(value)); return *this;}
94 template<typename StopSequencesT = Aws::String>
95 TextInferenceConfig& AddStopSequences(StopSequencesT&& value) { m_stopSequencesHasBeenSet = true; m_stopSequences.emplace_back(std::forward<StopSequencesT>(value)); return *this; }
97 private:
98
99 double m_temperature{0.0};
100 bool m_temperatureHasBeenSet = false;
101
102 double m_topP{0.0};
103 bool m_topPHasBeenSet = false;
104
105 int m_maxTokens{0};
106 bool m_maxTokensHasBeenSet = false;
107
108 Aws::Vector<Aws::String> m_stopSequences;
109 bool m_stopSequencesHasBeenSet = false;
110 };
111
112} // namespace Model
113} // namespace Bedrock
114} // namespace Aws
const Aws::Vector< Aws::String > & GetStopSequences() const
TextInferenceConfig & WithTemperature(double value)
TextInferenceConfig & WithTopP(double value)
TextInferenceConfig & WithMaxTokens(int value)
AWS_BEDROCK_API TextInferenceConfig & operator=(Aws::Utils::Json::JsonView jsonValue)
void SetStopSequences(StopSequencesT &&value)
TextInferenceConfig & AddStopSequences(StopSequencesT &&value)
TextInferenceConfig & WithStopSequences(StopSequencesT &&value)
AWS_BEDROCK_API Aws::Utils::Json::JsonValue Jsonize() const
AWS_BEDROCK_API TextInferenceConfig()=default
AWS_BEDROCK_API TextInferenceConfig(Aws::Utils::Json::JsonView jsonValue)
std::vector< T, Aws::Allocator< T > > Vector
Aws::Utils::Json::JsonValue JsonValue