7#include <aws/ec2/EC2_EXPORTS.h>
8#include <aws/core/utils/memory/stl/AWSStreamFwd.h>
9#include <aws/core/utils/memory/stl/AWSVector.h>
10#include <aws/ec2/model/InferenceDeviceInfo.h>
72 bool m_acceleratorsHasBeenSet =
false;
74 int m_totalInferenceMemoryInMiB;
75 bool m_totalInferenceMemoryInMiBHasBeenSet =
false;
int GetTotalInferenceMemoryInMiB() const
AWS_EC2_API InferenceAcceleratorInfo(const Aws::Utils::Xml::XmlNode &xmlNode)
InferenceAcceleratorInfo & WithAccelerators(const Aws::Vector< InferenceDeviceInfo > &value)
bool AcceleratorsHasBeenSet() const
InferenceAcceleratorInfo & AddAccelerators(InferenceDeviceInfo &&value)
void SetTotalInferenceMemoryInMiB(int value)
InferenceAcceleratorInfo & WithTotalInferenceMemoryInMiB(int value)
AWS_EC2_API void OutputToStream(Aws::OStream &oStream, const char *location) const
InferenceAcceleratorInfo & WithAccelerators(Aws::Vector< InferenceDeviceInfo > &&value)
bool TotalInferenceMemoryInMiBHasBeenSet() const
void SetAccelerators(const Aws::Vector< InferenceDeviceInfo > &value)
void SetAccelerators(Aws::Vector< InferenceDeviceInfo > &&value)
AWS_EC2_API void OutputToStream(Aws::OStream &ostream, const char *location, unsigned index, const char *locationValue) const
InferenceAcceleratorInfo & AddAccelerators(const InferenceDeviceInfo &value)
const Aws::Vector< InferenceDeviceInfo > & GetAccelerators() const
AWS_EC2_API InferenceAcceleratorInfo()
AWS_EC2_API InferenceAcceleratorInfo & operator=(const Aws::Utils::Xml::XmlNode &xmlNode)
std::vector< T, Aws::Allocator< T > > Vector
std::basic_ostream< char, std::char_traits< char > > OStream