1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288
// ================================================================= // // * WARNING * // // This file is generated! // // Changes made to this file will be overwritten. If changes are // required to the generated code, the service_crategen project // must be updated to generate the changes. // // ================================================================= use std::error::Error; use std::fmt; #[allow(warnings)] use futures::future; use futures::Future; use rusoto_core::credential::ProvideAwsCredentials; use rusoto_core::region; use rusoto_core::request::{BufferedHttpResponse, DispatchSignedRequest}; use rusoto_core::{Client, RusotoError, RusotoFuture}; use rusoto_core::proto; use rusoto_core::signature::SignedRequest; use serde_json; /// <p>Container for the parameters to the GenerateDataSet operation.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct GenerateDataSetRequest { /// <p>(Optional) Key-value pairs which will be returned, unmodified, in the Amazon SNS notification message and the data set metadata file. These key-value pairs can be used to correlated responses with tracking information from other systems.</p> #[serde(rename = "customerDefinedValues")] #[serde(skip_serializing_if = "Option::is_none")] pub customer_defined_values: Option<::std::collections::HashMap<String, String>>, /// <p>The date a data set was published. For daily data sets, provide a date with day-level granularity for the desired day. For weekly data sets, provide a date with day-level granularity within the desired week (the day value will be ignored). For monthly data sets, provide a date with month-level granularity for the desired month (the day value will be ignored).</p> #[serde(rename = "dataSetPublicationDate")] pub data_set_publication_date: f64, /// <p>The desired data set type.</p> <p> <ul> <li> <strong>customer_subscriber_hourly_monthly_subscriptions</strong> <p>From 2014-07-21 to present: Available daily by 5:00 PM Pacific Time.</p> </li> <li> <strong>customer_subscriber_annual_subscriptions</strong> <p>From 2014-07-21 to present: Available daily by 5:00 PM Pacific Time.</p> </li> <li> <strong>daily_business_usage_by_instance_type</strong> <p>From 2015-01-26 to present: Available daily by 5:00 PM Pacific Time.</p> </li> <li> <strong>daily_business_fees</strong> <p>From 2015-01-26 to present: Available daily by 5:00 PM Pacific Time.</p> </li> <li> <strong>daily_business_free_trial_conversions</strong> <p>From 2015-01-26 to present: Available daily by 5:00 PM Pacific Time.</p> </li> <li> <strong>daily_business_new_instances</strong> <p>From 2015-01-26 to present: Available daily by 5:00 PM Pacific Time.</p> </li> <li> <strong>daily_business_new_product_subscribers</strong> <p>From 2015-01-26 to present: Available daily by 5:00 PM Pacific Time.</p> </li> <li> <strong>daily_business_canceled_product_subscribers</strong> <p>From 2015-01-26 to present: Available daily by 5:00 PM Pacific Time.</p> </li> <li> <strong>monthly_revenue_billing_and_revenue_data</strong> <p>From 2015-02 to 2017-06: Available monthly on the 4th day of the month by 5:00pm Pacific Time. Data includes metered transactions (e.g. hourly) from two months prior.</p> <p>From 2017-07 to present: Available monthly on the 15th day of the month by 5:00pm Pacific Time. Data includes metered transactions (e.g. hourly) from one month prior.</p> </li> <li> <strong>monthly_revenue_annual_subscriptions</strong> <p>From 2015-02 to 2017-06: Available monthly on the 4th day of the month by 5:00pm Pacific Time. Data includes up-front software charges (e.g. annual) from one month prior.</p> <p>From 2017-07 to present: Available monthly on the 15th day of the month by 5:00pm Pacific Time. Data includes up-front software charges (e.g. annual) from one month prior.</p> </li> <li> <strong>disbursed_amount_by_product</strong> <p>From 2015-01-26 to present: Available every 30 days by 5:00 PM Pacific Time.</p> </li> <li> <strong>disbursed_amount_by_product_with_uncollected_funds</strong> <p>From 2012-04-19 to 2015-01-25: Available every 30 days by 5:00 PM Pacific Time.</p> <p>From 2015-01-26 to present: This data set was split into three data sets: disbursed_amount_by_product, disbursed_amount_by_age_of_uncollected_funds, and disbursed_amount_by_age_of_disbursed_funds.</p> </li> <li> <strong>disbursed_amount_by_instance_hours</strong> <p>From 2012-09-04 to present: Available every 30 days by 5:00 PM Pacific Time.</p> </li> <li> <strong>disbursed_amount_by_customer_geo</strong> <p>From 2012-04-19 to present: Available every 30 days by 5:00 PM Pacific Time.</p> </li> <li> <strong>disbursed_amount_by_age_of_uncollected_funds</strong> <p>From 2015-01-26 to present: Available every 30 days by 5:00 PM Pacific Time.</p> </li> <li> <strong>disbursed_amount_by_age_of_disbursed_funds</strong> <p>From 2015-01-26 to present: Available every 30 days by 5:00 PM Pacific Time.</p> </li> <li> <strong>customer_profile_by_industry</strong> <p>From 2015-10-01 to 2017-06-29: Available daily by 5:00 PM Pacific Time.</p> <p>From 2017-06-30 to present: This data set is no longer available.</p> </li> <li> <strong>customer_profile_by_revenue</strong> <p>From 2015-10-01 to 2017-06-29: Available daily by 5:00 PM Pacific Time.</p> <p>From 2017-06-30 to present: This data set is no longer available.</p> </li> <li> <strong>customer_profile_by_geography</strong> <p>From 2015-10-01 to 2017-06-29: Available daily by 5:00 PM Pacific Time.</p> <p>From 2017-06-30 to present: This data set is no longer available.</p> </li> <li> <strong>sales_compensation_billed_revenue</strong> <p>From 2016-12 to 2017-06: Available monthly on the 4th day of the month by 5:00pm Pacific Time. Data includes metered transactions (e.g. hourly) from two months prior, and up-front software charges (e.g. annual) from one month prior.</p> <p>From 2017-06 to present: Available monthly on the 15th day of the month by 5:00pm Pacific Time. Data includes metered transactions (e.g. hourly) from one month prior, and up-front software charges (e.g. annual) from one month prior.</p> </li> <li> <strong>us_sales_and_use_tax_records</strong> <p>From 2017-02-15 to present: Available monthly on the 15th day of the month by 5:00 PM Pacific Time.</p> </li> </ul> </p> #[serde(rename = "dataSetType")] pub data_set_type: String, /// <p>The name (friendly name, not ARN) of the destination S3 bucket.</p> #[serde(rename = "destinationS3BucketName")] pub destination_s3_bucket_name: String, /// <p>(Optional) The desired S3 prefix for the published data set, similar to a directory path in standard file systems. For example, if given the bucket name "mybucket" and the prefix "myprefix/mydatasets", the output file "outputfile" would be published to "s3://mybucket/myprefix/mydatasets/outputfile". If the prefix directory structure does not exist, it will be created. If no prefix is provided, the data set will be published to the S3 bucket root.</p> #[serde(rename = "destinationS3Prefix")] #[serde(skip_serializing_if = "Option::is_none")] pub destination_s3_prefix: Option<String>, /// <p>The Amazon Resource Name (ARN) of the Role with an attached permissions policy to interact with the provided AWS services.</p> #[serde(rename = "roleNameArn")] pub role_name_arn: String, /// <p>Amazon Resource Name (ARN) for the SNS Topic that will be notified when the data set has been published or if an error has occurred.</p> #[serde(rename = "snsTopicArn")] pub sns_topic_arn: String, } /// <p>Container for the result of the GenerateDataSet operation.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct GenerateDataSetResult { /// <p>A unique identifier representing a specific request to the GenerateDataSet operation. This identifier can be used to correlate a request with notifications from the SNS topic.</p> #[serde(rename = "dataSetRequestId")] #[serde(skip_serializing_if = "Option::is_none")] pub data_set_request_id: Option<String>, } /// <p>Container for the parameters to the StartSupportDataExport operation.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct StartSupportDataExportRequest { /// <p>(Optional) Key-value pairs which will be returned, unmodified, in the Amazon SNS notification message and the data set metadata file.</p> #[serde(rename = "customerDefinedValues")] #[serde(skip_serializing_if = "Option::is_none")] pub customer_defined_values: Option<::std::collections::HashMap<String, String>>, /// <p> Specifies the data set type to be written to the output csv file. The data set types customer_support_contacts_data and test_customer_support_contacts_data both result in a csv file containing the following fields: Product Id, Product Code, Customer Guid, Subscription Guid, Subscription Start Date, Organization, AWS Account Id, Given Name, Surname, Telephone Number, Email, Title, Country Code, ZIP Code, Operation Type, and Operation Time. </p> <p> <ul> <li><i>customer_support_contacts_data</i> Customer support contact data. The data set will contain all changes (Creates, Updates, and Deletes) to customer support contact data from the date specified in the from_date parameter.</li> <li><i>test_customer_support_contacts_data</i> An example data set containing static test data in the same format as customer_support_contacts_data</li> </ul> </p> #[serde(rename = "dataSetType")] pub data_set_type: String, /// <p>The name (friendly name, not ARN) of the destination S3 bucket.</p> #[serde(rename = "destinationS3BucketName")] pub destination_s3_bucket_name: String, /// <p>(Optional) The desired S3 prefix for the published data set, similar to a directory path in standard file systems. For example, if given the bucket name "mybucket" and the prefix "myprefix/mydatasets", the output file "outputfile" would be published to "s3://mybucket/myprefix/mydatasets/outputfile". If the prefix directory structure does not exist, it will be created. If no prefix is provided, the data set will be published to the S3 bucket root.</p> #[serde(rename = "destinationS3Prefix")] #[serde(skip_serializing_if = "Option::is_none")] pub destination_s3_prefix: Option<String>, /// <p>The start date from which to retrieve the data set in UTC. This parameter only affects the customer<em>support</em>contacts_data data set type.</p> #[serde(rename = "fromDate")] pub from_date: f64, /// <p>The Amazon Resource Name (ARN) of the Role with an attached permissions policy to interact with the provided AWS services.</p> #[serde(rename = "roleNameArn")] pub role_name_arn: String, /// <p>Amazon Resource Name (ARN) for the SNS Topic that will be notified when the data set has been published or if an error has occurred.</p> #[serde(rename = "snsTopicArn")] pub sns_topic_arn: String, } /// <p>Container for the result of the StartSupportDataExport operation.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct StartSupportDataExportResult { /// <p>A unique identifier representing a specific request to the StartSupportDataExport operation. This identifier can be used to correlate a request with notifications from the SNS topic.</p> #[serde(rename = "dataSetRequestId")] #[serde(skip_serializing_if = "Option::is_none")] pub data_set_request_id: Option<String>, } /// Errors returned by GenerateDataSet #[derive(Debug, PartialEq)] pub enum GenerateDataSetError { /// <p>This exception is thrown when an internal service error occurs.</p> MarketplaceCommerceAnalytics(String), } impl GenerateDataSetError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<GenerateDataSetError> { if let Some(err) = proto::json::Error::parse(&res) { match err.typ.as_str() { "MarketplaceCommerceAnalyticsException" => { return RusotoError::Service( GenerateDataSetError::MarketplaceCommerceAnalytics(err.msg), ) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } return RusotoError::Unknown(res); } } impl fmt::Display for GenerateDataSetError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for GenerateDataSetError { fn description(&self) -> &str { match *self { GenerateDataSetError::MarketplaceCommerceAnalytics(ref cause) => cause, } } } /// Errors returned by StartSupportDataExport #[derive(Debug, PartialEq)] pub enum StartSupportDataExportError { /// <p>This exception is thrown when an internal service error occurs.</p> MarketplaceCommerceAnalytics(String), } impl StartSupportDataExportError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<StartSupportDataExportError> { if let Some(err) = proto::json::Error::parse(&res) { match err.typ.as_str() { "MarketplaceCommerceAnalyticsException" => { return RusotoError::Service( StartSupportDataExportError::MarketplaceCommerceAnalytics(err.msg), ) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } return RusotoError::Unknown(res); } } impl fmt::Display for StartSupportDataExportError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for StartSupportDataExportError { fn description(&self) -> &str { match *self { StartSupportDataExportError::MarketplaceCommerceAnalytics(ref cause) => cause, } } } /// Trait representing the capabilities of the AWS Marketplace Commerce Analytics API. AWS Marketplace Commerce Analytics clients implement this trait. pub trait MarketplaceCommerceAnalytics { /// <p>Given a data set type and data set publication date, asynchronously publishes the requested data set to the specified S3 bucket and notifies the specified SNS topic once the data is available. Returns a unique request identifier that can be used to correlate requests with notifications from the SNS topic. Data sets will be published in comma-separated values (CSV) format with the file name {data<em>set</em>type}_YYYY-MM-DD.csv. If a file with the same name already exists (e.g. if the same data set is requested twice), the original file will be overwritten by the new file. Requires a Role with an attached permissions policy providing Allow permissions for the following actions: s3:PutObject, s3:GetBucketLocation, sns:GetTopicAttributes, sns:Publish, iam:GetRolePolicy.</p> fn generate_data_set( &self, input: GenerateDataSetRequest, ) -> RusotoFuture<GenerateDataSetResult, GenerateDataSetError>; /// <p>Given a data set type and a from date, asynchronously publishes the requested customer support data to the specified S3 bucket and notifies the specified SNS topic once the data is available. Returns a unique request identifier that can be used to correlate requests with notifications from the SNS topic. Data sets will be published in comma-separated values (CSV) format with the file name {data<em>set</em>type}_YYYY-MM-DD'T'HH-mm-ss'Z'.csv. If a file with the same name already exists (e.g. if the same data set is requested twice), the original file will be overwritten by the new file. Requires a Role with an attached permissions policy providing Allow permissions for the following actions: s3:PutObject, s3:GetBucketLocation, sns:GetTopicAttributes, sns:Publish, iam:GetRolePolicy.</p> fn start_support_data_export( &self, input: StartSupportDataExportRequest, ) -> RusotoFuture<StartSupportDataExportResult, StartSupportDataExportError>; } /// A client for the AWS Marketplace Commerce Analytics API. #[derive(Clone)] pub struct MarketplaceCommerceAnalyticsClient { client: Client, region: region::Region, } impl MarketplaceCommerceAnalyticsClient { /// Creates a client backed by the default tokio event loop. /// /// The client will use the default credentials provider and tls client. pub fn new(region: region::Region) -> MarketplaceCommerceAnalyticsClient { MarketplaceCommerceAnalyticsClient { client: Client::shared(), region, } } pub fn new_with<P, D>( request_dispatcher: D, credentials_provider: P, region: region::Region, ) -> MarketplaceCommerceAnalyticsClient where P: ProvideAwsCredentials + Send + Sync + 'static, P::Future: Send, D: DispatchSignedRequest + Send + Sync + 'static, D::Future: Send, { MarketplaceCommerceAnalyticsClient { client: Client::new_with(credentials_provider, request_dispatcher), region, } } } impl MarketplaceCommerceAnalytics for MarketplaceCommerceAnalyticsClient { /// <p>Given a data set type and data set publication date, asynchronously publishes the requested data set to the specified S3 bucket and notifies the specified SNS topic once the data is available. Returns a unique request identifier that can be used to correlate requests with notifications from the SNS topic. Data sets will be published in comma-separated values (CSV) format with the file name {data<em>set</em>type}_YYYY-MM-DD.csv. If a file with the same name already exists (e.g. if the same data set is requested twice), the original file will be overwritten by the new file. Requires a Role with an attached permissions policy providing Allow permissions for the following actions: s3:PutObject, s3:GetBucketLocation, sns:GetTopicAttributes, sns:Publish, iam:GetRolePolicy.</p> fn generate_data_set( &self, input: GenerateDataSetRequest, ) -> RusotoFuture<GenerateDataSetResult, GenerateDataSetError> { let mut request = SignedRequest::new("POST", "marketplacecommerceanalytics", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header( "x-amz-target", "MarketplaceCommerceAnalytics20150701.GenerateDataSet", ); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded)); self.client.sign_and_dispatch(request, |response| { if response.status.is_success() { Box::new(response.buffer().from_err().and_then(|response| { proto::json::ResponsePayload::new(&response) .deserialize::<GenerateDataSetResult, _>() })) } else { Box::new( response .buffer() .from_err() .and_then(|response| Err(GenerateDataSetError::from_response(response))), ) } }) } /// <p>Given a data set type and a from date, asynchronously publishes the requested customer support data to the specified S3 bucket and notifies the specified SNS topic once the data is available. Returns a unique request identifier that can be used to correlate requests with notifications from the SNS topic. Data sets will be published in comma-separated values (CSV) format with the file name {data<em>set</em>type}_YYYY-MM-DD'T'HH-mm-ss'Z'.csv. If a file with the same name already exists (e.g. if the same data set is requested twice), the original file will be overwritten by the new file. Requires a Role with an attached permissions policy providing Allow permissions for the following actions: s3:PutObject, s3:GetBucketLocation, sns:GetTopicAttributes, sns:Publish, iam:GetRolePolicy.</p> fn start_support_data_export( &self, input: StartSupportDataExportRequest, ) -> RusotoFuture<StartSupportDataExportResult, StartSupportDataExportError> { let mut request = SignedRequest::new("POST", "marketplacecommerceanalytics", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header( "x-amz-target", "MarketplaceCommerceAnalytics20150701.StartSupportDataExport", ); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded)); self.client.sign_and_dispatch(request, |response| { if response.status.is_success() { Box::new(response.buffer().from_err().and_then(|response| { proto::json::ResponsePayload::new(&response) .deserialize::<StartSupportDataExportResult, _>() })) } else { Box::new( response.buffer().from_err().and_then(|response| { Err(StartSupportDataExportError::from_response(response)) }), ) } }) } }