feat(hos_client_create, hos_client_destory): 多次调用destory不会导致重复释放

This commit is contained in:
彭宣正
2020-12-14 17:24:58 +08:00
parent 505d529c32
commit 10b370e486
55976 changed files with 8544395 additions and 2 deletions

View File

@@ -0,0 +1,54 @@
#!/bin/bash
set -eu
if [[ $# -lt 1 ]]; then
echo -e "error: missing location paramter.\n"
echo -e "USAGE: BuildMyCode [OPTIONS]\n"
echo "OPTIONS:"
echo "-b|--branch The name of the git branch. Default is the current branch."
echo "-c|--cmake-flags Any additional CMake flags to pass to the build jobs."
echo "-l|--location The name of key in S3 under which to save the BuildSpec.zip file."
exit 1
fi
branch=""
cmakeFlags=""
# default to the current branch
if [[ -z $branch ]]; then
branch=$(git rev-parse --abbrev-ref HEAD)
fi
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-b|--branch)
branch=$2
shift # past argument
;;
-c|--cmake-flags)
cmakeFlags=$2
shift # past argument
;;
-l|--location) # where to put the buildspec.zip file
buildspecLocation=$2
shift # past argument
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
json='{ "branch": "'$branch'", "cmakeFlags": "'$cmakeFlags' "}'
echo "$json" >BuildSpec.json
zip -r BuildSpec.zip BuildSpec.json
aws s3 cp BuildSpec.zip s3://aws-sdk-cpp-dev-pipeline/"${buildspecLocation}"/BuildSpec.zip
S3VERSION=$(aws s3api head-object --bucket aws-sdk-cpp-dev-pipeline --key "${buildspecLocation}"/BuildSpec.zip | awk '/VersionId/{gsub(/[",]/, ""); print $2}')
echo -e "\033[30;42mYour build version ID is ${S3VERSION}\033[0m"

View File

@@ -0,0 +1,40 @@
#!/usr/bin/python
import argparse
import shutil
import subprocess
import re
import subprocess
import os
import zipfile
import io
import json
def Main():
parser = argparse.ArgumentParser(description="Creates a release doc based on a list of changes.")
parser.add_argument("--changesList", action="store")
args = vars( parser.parse_args() )
changes = args["changesList"]
changeDoc = {}
changeList = changes.split()
releases = []
release = {}
features = []
for change in changeList:
feature = {}
feature["service-name"] = change.replace("aws-cpp-sdk-", "")
features.append(feature)
release["features"] = features
releases.append(release)
changeDoc["releases"] = releases
print(json.dumps(changeDoc))
Main()

View File

@@ -0,0 +1,20 @@
#!/bin/bash
FILES_CHANGED_STR=`git diff --name-only $1@{1} $1`
FILES_CHANGED=${FILES_CHANGED_STR}
declare -A DIRS_SET
for FILE in $FILES_CHANGED ; do
DIR=`echo $FILE | cut -d "/" -f1`
if test "${DIRS_SET[${DIR}]+isset}"
then
continue
else
echo $DIR
fi
DIRS_SET[${DIR}]=""
done

View File

@@ -0,0 +1,21 @@
#!/bin/bash
CURDIR="$(dirname "$(readlink -f "$0")")"
CHANGED_DIRS=`$CURDIR/DetectDirectoryChanges $1`
case $CHANGED_DIRS in
*"aws-cpp-sdk-core"*)
;&
*"CMakeLists.txt"*)
;&
*"cmake"*)
;&
*"code-generation"*)
echo "-DBUILD_ONLY=\"\""
exit 0
;;
*)
esac
BUILD_ONLY_OUT="-DBUILD_ONLY=\"${CHANGED_DIRS//$'\n'/';'}\""
echo ${BUILD_ONLY_OUT//$'aws-cpp-sdk-'/''}

View File

@@ -0,0 +1,17 @@
#!/usr/bin/python
import sys
import json
if len(sys.argv) != 2:
print >> sys.stderr, " Usage: python ExtractBuildArgs.py <ArgName>"
exit (-1)
try:
data = json.load(open('BuildSpec.json'))
if sys.argv[1] == "cmakeFlags" and data["cmakeFlags"] != "":
print(data["cmakeFlags"])
elif sys.argv[1] == "branch" and data["branch"] != "":
print(data["branch"])
except:
print >> sys.stderr, "No related args found in BuildSpec.json"
exit(-1)

View File

@@ -0,0 +1,9 @@
#!/bin/bash
branch =$(python aws-sdk-cpp/CI/ExtractBuildArgs.py branch)
git clone git@github.com:awslabs/aws-sdk-cpp-staging.git aws-sdk-cpp
cd aws-sdk-cpp
git reset --hard HEAD
git checkout master
git pull
git checkout $branch

View File

@@ -0,0 +1,22 @@
version: 0.2
phases:
build:
commands:
- mv aws-sdk-cpp /tmp
- mkdir /tmp/build
- cd /tmp/build
- python /tmp/aws-sdk-cpp/scripts/build_3rdparty.py --configs="${BUILD_CONFIG}" --sourcedir=/tmp/aws-sdk-cpp/ --parallel=${BUILD_PARALLEL} --installdir=/tmp/install --generateClients="0" --architecture=${ARCHITECTURE} --cmake_params="-DMINIMIZE_SIZE=ON -DANDROID_NATIVE_API_LEVEL=${API_LEVEL}"
post_build:
commands:
- export BUILD_JOB_NAME=$(echo "${CODEBUILD_BUILD_ID}" | cut -f1 -d ":")
- export BUILD_URL="https://console.aws.amazon.com/codesuite/codebuild/projects/${BUILD_JOB_NAME}/build/${CODEBUILD_BUILD_ID}"
- |
if [ "${CODEBUILD_BUILD_SUCCEEDING}" = "1" ]; then
aws sns publish --topic-arn ${NOTIFICATIONS_TOPIC} --message "/md [BUILD SUCCESS](${BUILD_URL}) (${CODEBUILD_BUILD_ID})";
else
aws sns publish --topic-arn ${NOTIFICATIONS_TOPIC} --message "/md [BUILD FAILURE](${BUILD_URL}) (${CODEBUILD_BUILD_ID})";
fi
artifacts:
files:
- "**/*"
base-directory: /tmp/install

View File

@@ -0,0 +1,19 @@
version: 0.2
phases:
build:
commands:
- VERSION_NUM=$(grep AWS_SDK_VERSION_STRING aws-sdk-cpp/aws-cpp-sdk-core/include/aws/core/VersionConfig.h | cut -f2 -d '"')
- echo $VERSION_NUM | tee aws-sdk-cpp-version
post_build:
commands:
- export BUILD_JOB_NAME=$(echo "${CODEBUILD_BUILD_ID}" | cut -f1 -d ":")
- export BUILD_URL="https://console.aws.amazon.com/codesuite/codebuild/projects/${BUILD_JOB_NAME}/build/${CODEBUILD_BUILD_ID}"
- |
if [ "${CODEBUILD_BUILD_SUCCEEDING}" = "1" ]; then
aws sns publish --topic-arn ${NOTIFICATIONS_TOPIC} --message "/md [BUILD SUCCESS](${BUILD_URL}) (Extract Metadata)";
else
aws sns publish --topic-arn ${NOTIFICATIONS_TOPIC} --message "/md [BUILD FAILURE](${BUILD_URL}) (Extract Metadata)";
fi
artifacts:
files:
- "aws-sdk-cpp-version"

View File

@@ -0,0 +1,11 @@
version: 0.2
phases:
build:
commands:
- cd ..
- zip -r latestSnapshot.zip aws-sdk-cpp
- mv latestSnapshot.zip $CODEBUILD_SRC_DIR
- cd $CODEBUILD_SRC_DIR
artifacts:
files:
- latestSnapshot.zip

View File

@@ -0,0 +1,22 @@
version: 0.2
phases:
build:
commands:
- mv aws-sdk-cpp /tmp
- mkdir /tmp/build
- cd /tmp/build
- python /tmp/aws-sdk-cpp/scripts/build_3rdparty.py --configs="${BUILD_CONFIG}" --sourcedir=/tmp/aws-sdk-cpp/ --parallel=${BUILD_PARALLEL} --installdir=/tmp/install --generateClients="0" --cmake_params=""
post_build:
commands:
- export BUILD_JOB_NAME=$(echo "${CODEBUILD_BUILD_ID}" | cut -f1 -d ":")
- export BUILD_URL="https://console.aws.amazon.com/codesuite/codebuild/projects/${BUILD_JOB_NAME}/build/${CODEBUILD_BUILD_ID}"
- |
if [ "${CODEBUILD_BUILD_SUCCEEDING}" = "1" ]; then
aws sns publish --topic-arn ${NOTIFICATIONS_TOPIC} --message "/md [BUILD SUCCESS](${BUILD_URL}) (${CODEBUILD_BUILD_ID})";
else
aws sns publish --topic-arn ${NOTIFICATIONS_TOPIC} --message "/md [BUILD FAILURE](${BUILD_URL}) (${CODEBUILD_BUILD_ID})";
fi
artifacts:
files:
- "**/*"
base-directory: /tmp/install

View File

@@ -0,0 +1,23 @@
version: 0.2
phases:
build:
commands:
- mkdir C:\tmp
- mv aws-sdk-cpp C:\tmp
- mkdir C:\tmp\build
- cd C:\tmp\build
- python "C:\tmp\aws-sdk-cpp\scripts\build_3rdparty.py" --architecture=${Env:ARCHITURE} --configs="${Env:BUILD_CONFIG}" --sourcedir="C:\tmp\aws-sdk-cpp" --parallel=${Env:BUILD_PARALLEL} --installdir="C:\tmp\install" --generateClients="0" --cmake_params=""
post_build:
commands:
- $BUILD_JOB_NAME=$Env:CODEBUILD_BUILD_ID.Substring(0, $Env:CODEBUILD_BUILD_ID.IndexOf(":"))
- $BUILD_URL="https://console.aws.amazon.com/codesuite/codebuild/projects/$BUILD_JOB_NAME/build/$Env:CODEBUILD_BUILD_ID"
- |
if (${Env:CODEBUILD_BUILD_SUCCEEDING} -eq 1) {
aws sns publish --topic-arn ${Env:NOTIFICATIONS_TOPIC} --message "/md [BUILD SUCCESS](${BUILD_URL}) (${Env:CODEBUILD_BUILD_ID})"
} Else {
aws sns publish --topic-arn ${Env:NOTIFICATIONS_TOPIC} --message "/md [BUILD FAILURE](${BUILD_URL}) (${Env:CODEBUILD_BUILD_ID})"
}
artifacts:
files:
- "**/*"
base-directory: C:\tmp\install

View File

@@ -0,0 +1,90 @@
# Whenever you make any change here, you should update it in Amazon S3.
# This lambda function is used to publish binaries and make notifications in binary release pipeline.
# It will copy the binaries generated in each pipeline action from a temporary location (provided by its inputs) to a specific s3 bucket for customer download.
# In the "Publish" stage, each lambda function is responsible for uploading binaries for one platform.
import boto3
import json
import os
import zipfile
from botocore.client import Config
def lambda_handler(event, context):
print(event)
job_id = event['CodePipeline.job']['id']
sns_client = boto3.client('sns')
codepipeline_client = boto3.client('codepipeline')
try:
parameters = json.loads(event['CodePipeline.job']['data']['actionConfiguration']['configuration']['UserParameters'])
publish_bucket = parameters['bucket']
publish_key_prefix = parameters['key_prefix']
# Get SDK version
input_bucket = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName']
input_key = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey']
s3 = boto3.resource('s3', config=Config(signature_version='s3v4'))
s3.meta.client.download_file(input_bucket, input_key, '/tmp/aws-sdk-cpp-version.zip')
with zipfile.ZipFile('/tmp/aws-sdk-cpp-version.zip', 'r') as zip:
zip.extractall('/tmp')
with open('/tmp/aws-sdk-cpp-version', 'r') as fp:
sdk_version = fp.read().strip()
# Copy SDK binaries to public bucket
input_artifacts = event['CodePipeline.job']['data']['inputArtifacts']
for i in range(1, len(input_artifacts)):
artifact_name = input_artifacts[i]['name']
config = artifact_name[artifact_name.find('_')+1:]
publish_key = 'cpp/builds/{version}/{prefix}/{prefix}-{config}.zip'.format(
version = sdk_version,
prefix = publish_key_prefix,
config = config
)
print('Uploading artifacts to https://s3.console.aws.amazon.com/s3/object/{bucket}/{key}'.format(
bucket = publish_bucket,
key = publish_key))
s3.meta.client.copy(
{ 'Bucket': input_artifacts[i]['location']['s3Location']['bucketName'],
'Key': input_artifacts[i]['location']['s3Location']['objectKey'] },
publish_bucket, publish_key)
# Notifications
sns_response = sns_client.publish(
TopicArn = os.environ['NOTIFICATIONS_TOPIC'],
Message = '/md [PUBLISH SUCCESS]({url}) ({prefix})'.format(
url = 'https://s3.console.aws.amazon.com/s3/buckets/{bucket}/cpp/builds/{version}/{prefix}/'.format(
bucket = publish_bucket,
version = sdk_version,
prefix = publish_key_prefix
),
prefix = publish_key_prefix
)
)
print(sns_response)
codepipeline_client.put_job_success_result(
jobId = job_id
)
except Exception as e:
codepipeline_client.put_job_failure_result(
jobId = job_id,
failureDetails = {
'type': 'JobFailed',
'message': str(e)
}
)
sns_response = sns_client.publish(
TopicArn = os.environ['NOTIFICATIONS_TOPIC'],
Message = '/md [PUBLISH FAILURE]({url}) ({prefix})'.format(
url = 'https://s3.console.aws.amazon.com/s3/buckets/{bucket}/cpp/builds/{version}/{prefix}/'.format(
bucket = publish_bucket,
version = sdk_version,
prefix = publish_key_prefix
),
prefix = publish_key_prefix
)
)
print(sns_response)
print(e)
return 0

View File

@@ -0,0 +1,36 @@
# Whenever you make any change here, you should update it in Amazon S3.
# In binary release pipeline, build jobs will send the results to a SNS topic.
# And this lambda function, triggered by this SNS notifications, will send messages about the build results to a Chime room.
# Other functionality could be added in the future, like put metrics to CloudWatch or trigger another alarm.
import boto3
import json
import os
from botocore.vendored import requests
chime_bot_url = os.environ['CHIME_BOT_URL']
def lambda_handler(event, context):
print(event)
message = event["Records"][0]["Sns"]["Message"]
headers = {'Content-Type': 'application/json'}
data = {}
if "FAILURE" in message:
# @All Members if build failed.
# Will convert '/md [message]' to '/md @All[message]'
firstSpaceIndex = message.find(' ')
message = message[:firstSpaceIndex+1] + '@All' + message[firstSpaceIndex+1:]
make_request = True
elif 'SUCCESS' in message:
make_request = True
if make_request == True:
data["Content"] = message
r = requests.post(chime_bot_url, headers = headers, data = json.dumps(data))
return r.reason
else:
return 0

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,308 @@
# Whenever you make any change here, you should update it in Amazon S3.
# This CloudFormation template is used to create resources for CodeBuild projects to build C++ SDK on Linux and Windows with both Visual Studio 2015 and 2017.
# It's a sub-template used in the main template to create binary release pipeline.
AWSTemplateFormatVersion: 2010-09-09
Parameters:
BuildConfig:
Type: String
Default: <build-config>
Description: Build config when building SDK on Linux and Windows.
BinaryReleaseResultNotificationsTopic:
Type: String
Default: <binary-release-result-notifications-topic>
Description: Topic ARN of the SNS, used to handle notifications received from lambda functions.
BinaryReleaseCodeBuildRole:
Type: String
Default: <binary-release-codebuild-role>
Description: Name of the service role used by CodeBuild projects used to build SDK.
ParameterStoreAwsAccessKeyId:
Type: String
Default: <parameter-store-aws-access-key-id>
Description: Key name in Parameter Store, used for aws access key id.
ParameterStoreAwsSecretAccessKey:
Type: String
Default: <parameter-store-aws-secret-access-key>
Description: Key name in Parameter Store, used for aws secret access key.
LinuxGccProjectName:
Type: String
Default: <linux-gcc-project-name>
Description: Name of the CodeBuild project, which will build C++ SDK on Linux with GCC.
LinuxGccImageName:
Type: String
Default: <linux-gcc-image-name>
Description: Name of the image used in the CodeBuild Project to build SDK on Linux with GCC.
LinuxGccBuildSpecLocation:
Type: String
Default: <linux-gcc-buildspec-location>
Description: Location of buildspec for CodeBuild Project to build SDK on Linux with GCC.
WindowsProjectName:
Type: String
Default: <windows-project-name>
Description: Name of the CodeBuild project, which will build C++ SDK on Windows.
WindowsVS2015ImageName:
Type: String
Default: <windows-vs2015-image-name>
Description: Name of the image used in the CodeBuild Project to build SDK on Windows with VS2015.
WindowsVS2017ImageName:
Type: String
Default: <windows-vs2017-image-name>
Description: Name of the image used in the CodeBuild Project to build SDK on Windows with VS2017.
WindowsBuildSpecLocation:
Type: String
Default: <windows-buildspec-location>
Description: Location of buildspec for CodeBuild Project to build SDK on Windows.
AndroidProjectName:
Type: String
Default: <android-project-name>
Description: Name of the CodeBuild project, which cross compiles C++ SDK on Linux with Android NDK.
AndroidBuildSpecLocation:
Type: String
Default: <android-buildspec-location>
Description: Location of buildspec for CodeBuild Project to build SDK with Android NDK.
BuildParallel:
Type: String
Default: <build-parallel>
Description: Number of jobs in parallel to build C++ SDK.
Resources:
LinuxGccProject:
Type: AWS::CodeBuild::Project
Properties:
Name:
!Join
- '-'
- - !Ref LinuxGccProjectName
- !Ref BuildConfig
ServiceRole: !Ref BinaryReleaseCodeBuildRole
Source:
Type: CODEPIPELINE
BuildSpec: !Ref LinuxGccBuildSpecLocation
Artifacts:
Type: CODEPIPELINE
Environment:
Type: LINUX_CONTAINER
ComputeType: BUILD_GENERAL1_LARGE
Image: !Ref LinuxGccImageName
EnvironmentVariables:
- Name: BUILD_CONFIG
Type: PLAINTEXT
Value: !Ref BuildConfig
- Name: BUILD_PARALLEL
Type: PLAINTEXT
Value: !Ref BuildParallel
- Name: NOTIFICATIONS_TOPIC
Type: PLAINTEXT
Value: !Ref BinaryReleaseResultNotificationsTopic
- Name: AWS_ACCESS_KEY_ID
Type: PARAMETER_STORE
Value: !Ref ParameterStoreAwsAccessKeyId
- Name: AWS_SECRET_ACCESS_KEY
Type: PARAMETER_STORE
Value: !Ref ParameterStoreAwsSecretAccessKey
TimeoutInMinutes: 60
WindowsVS2015Project:
Type: AWS::CodeBuild::Project
Properties:
Name:
!Join
- '-'
- - !Ref WindowsProjectName
- vs2015
- !Ref BuildConfig
ServiceRole: !Ref BinaryReleaseCodeBuildRole
Source:
Type: CODEPIPELINE
BuildSpec: !Ref WindowsBuildSpecLocation
Artifacts:
Type: CODEPIPELINE
Environment:
Type: WINDOWS_CONTAINER
ComputeType: BUILD_GENERAL1_LARGE
Image: !Ref WindowsVS2015ImageName
EnvironmentVariables:
- Name: ARCHITURE
Type: PLAINTEXT
Value: Windows2015
- Name: BUILD_CONFIG
Type: PLAINTEXT
Value: !Ref BuildConfig
- Name: BUILD_PARALLEL
Type: PLAINTEXT
Value: !Ref BuildParallel
- Name: NOTIFICATIONS_TOPIC
Type: PLAINTEXT
Value: !Ref BinaryReleaseResultNotificationsTopic
- Name: AWS_ACCESS_KEY_ID
Type: PARAMETER_STORE
Value: !Ref ParameterStoreAwsAccessKeyId
- Name: AWS_SECRET_ACCESS_KEY
Type: PARAMETER_STORE
Value: !Ref ParameterStoreAwsSecretAccessKey
TimeoutInMinutes: 90
WindowsVS2017Project:
Type: AWS::CodeBuild::Project
Properties:
Name:
!Join
- '-'
- - !Ref WindowsProjectName
- vs2017
- !Ref BuildConfig
ServiceRole: !Ref BinaryReleaseCodeBuildRole
Source:
Type: CODEPIPELINE
BuildSpec: !Ref WindowsBuildSpecLocation
Artifacts:
Type: CODEPIPELINE
Environment:
Type: WINDOWS_CONTAINER
ComputeType: BUILD_GENERAL1_LARGE
Image: !Ref WindowsVS2017ImageName
EnvironmentVariables:
- Name: ARCHITURE
Type: PLAINTEXT
Value: Windows2017
- Name: BUILD_CONFIG
Type: PLAINTEXT
Value: !Ref BuildConfig
- Name: BUILD_PARALLEL
Type: PLAINTEXT
Value: !Ref BuildParallel
- Name: NOTIFICATIONS_TOPIC
Type: PLAINTEXT
Value: !Ref BinaryReleaseResultNotificationsTopic
- Name: AWS_ACCESS_KEY_ID
Type: PARAMETER_STORE
Value: !Ref ParameterStoreAwsAccessKeyId
- Name: AWS_SECRET_ACCESS_KEY
Type: PARAMETER_STORE
Value: !Ref ParameterStoreAwsSecretAccessKey
TimeoutInMinutes: 90
AndroidArm32Api19Project:
Type: AWS::CodeBuild::Project
Properties:
Name:
!Join
- '-'
- - !Ref AndroidProjectName
- arm32
- api19
- !Ref BuildConfig
ServiceRole: !Ref BinaryReleaseCodeBuildRole
Source:
Type: CODEPIPELINE
BuildSpec: !Ref AndroidBuildSpecLocation
Artifacts:
Type: CODEPIPELINE
Environment:
Type: LINUX_CONTAINER
ComputeType: BUILD_GENERAL1_LARGE
Image: !Ref LinuxGccImageName
EnvironmentVariables:
- Name: ARCHITECTURE
Type: PLAINTEXT
Value: AndroidArm32
- Name: API_LEVEL
Type: PLAINTEXT
Value: 19
- Name: BUILD_CONFIG
Type: PLAINTEXT
Value: !Ref BuildConfig
- Name: BUILD_PARALLEL
Type: PLAINTEXT
Value: !Ref BuildParallel
- Name: NOTIFICATIONS_TOPIC
Type: PLAINTEXT
Value: !Ref BinaryReleaseResultNotificationsTopic
- Name: AWS_ACCESS_KEY_ID
Type: PARAMETER_STORE
Value: !Ref ParameterStoreAwsAccessKeyId
- Name: AWS_SECRET_ACCESS_KEY
Type: PARAMETER_STORE
Value: !Ref ParameterStoreAwsSecretAccessKey
TimeoutInMinutes: 60
AndroidArm32Api21Project:
Type: AWS::CodeBuild::Project
Properties:
Name:
!Join
- '-'
- - !Ref AndroidProjectName
- arm32
- api21
- !Ref BuildConfig
ServiceRole: !Ref BinaryReleaseCodeBuildRole
Source:
Type: CODEPIPELINE
BuildSpec: !Ref AndroidBuildSpecLocation
Artifacts:
Type: CODEPIPELINE
Environment:
Type: LINUX_CONTAINER
ComputeType: BUILD_GENERAL1_LARGE
Image: !Ref LinuxGccImageName
EnvironmentVariables:
- Name: ARCHITECTURE
Type: PLAINTEXT
Value: AndroidArm32
- Name: API_LEVEL
Type: PLAINTEXT
Value: 21
- Name: BUILD_CONFIG
Type: PLAINTEXT
Value: !Ref BuildConfig
- Name: BUILD_PARALLEL
Type: PLAINTEXT
Value: !Ref BuildParallel
- Name: NOTIFICATIONS_TOPIC
Type: PLAINTEXT
Value: !Ref BinaryReleaseResultNotificationsTopic
- Name: AWS_ACCESS_KEY_ID
Type: PARAMETER_STORE
Value: !Ref ParameterStoreAwsAccessKeyId
- Name: AWS_SECRET_ACCESS_KEY
Type: PARAMETER_STORE
Value: !Ref ParameterStoreAwsSecretAccessKey
TimeoutInMinutes: 60
AndroidArm64Api21Project:
Type: AWS::CodeBuild::Project
Properties:
Name:
!Join
- '-'
- - !Ref AndroidProjectName
- arm64
- api21
- !Ref BuildConfig
ServiceRole: !Ref BinaryReleaseCodeBuildRole
Source:
Type: CODEPIPELINE
BuildSpec: !Ref AndroidBuildSpecLocation
Artifacts:
Type: CODEPIPELINE
Environment:
Type: LINUX_CONTAINER
ComputeType: BUILD_GENERAL1_LARGE
Image: !Ref LinuxGccImageName
EnvironmentVariables:
- Name: ARCHITECTURE
Type: PLAINTEXT
Value: AndroidArm64
- Name: API_LEVEL
Type: PLAINTEXT
Value: 21
- Name: BUILD_CONFIG
Type: PLAINTEXT
Value: !Ref BuildConfig
- Name: BUILD_PARALLEL
Type: PLAINTEXT
Value: !Ref BuildParallel
- Name: NOTIFICATIONS_TOPIC
Type: PLAINTEXT
Value: !Ref BinaryReleaseResultNotificationsTopic
- Name: AWS_ACCESS_KEY_ID
Type: PARAMETER_STORE
Value: !Ref ParameterStoreAwsAccessKeyId
- Name: AWS_SECRET_ACCESS_KEY
Type: PARAMETER_STORE
Value: !Ref ParameterStoreAwsSecretAccessKey
TimeoutInMinutes: 60

View File

@@ -0,0 +1,12 @@
#!/bin/bash
rm -f ./not_a_release
aws s3 cp --quiet s3://aws-sdk-cpp-pipeline-sdks-team/not_a_release ./not_a_release
if [ -f ./not_a_release ]; then
aws s3 rm s3://aws-sdk-cpp-pipeline-sdks-team/not_a_release
exit 1
fi
exit 0

View File

@@ -0,0 +1,49 @@
version: 0.2
phases:
build:
commands:
- export SDK_ROOT=$CODEBUILD_SRC_DIR/aws-sdk-cpp
- cd $SDK_ROOT
# Testing the first approach to build custom client as a separate package, which means you have to build and install aws-sdk-cpp first.
# Generate custom client source code under custom-service/ with API description file located at code-generation/api-description/custom-service.
- python scripts/generate_sdks.py --pathToApiDefinitions=code-generation/api-descriptions/custom-service --outputLocation custom-service --serviceName custom-service --apiVersion 2017-11-03 --namespace Custom --prepareTool --standalone
# Build and install aws-cpp-sdk-core
- mkdir -p $SDK_ROOT/build/AWSSDK
- mkdir -p $SDK_ROOT/install
- cd $SDK_ROOT/build/AWSSDK
- cmake $SDK_ROOT -DBUILD_ONLY="core" -DCMAKE_BUILD_TYPE=Debug -DCMAKE_INSTALL_PREFIX="$SDK_ROOT/install" -DBUILD_SHARED_LIBS=ON
- make -j 8
- make install
# Build custom-service
- mkdir -p $SDK_ROOT/build/custom-service
- cd $SDK_ROOT/build/custom-service
- cmake $SDK_ROOT/custom-service/aws-cpp-sdk-custom-service -DCMAKE_BUILD_TYPE=Debug -DCMAKE_PREFIX_PATH="$SDK_ROOT/install" -DAWSSDK_ROOT_DIR="$SDK_ROOT/install" -DBUILD_SHARED_LIBS=ON
- make -j 8
# Build and run custom-service integration tests
- mkdir -p $SDK_ROOT/build/custom-service-integration-tests
- cd $SDK_ROOT/build/custom-service-integration-tests
- cmake $SDK_ROOT/aws-cpp-sdk-custom-service-integration-tests -DCMAKE_BUILD_TYPE=Debug -DCMAKE_PREFIX_PATH="$SDK_ROOT/install;$SDK_ROOT/build/custom-service" -DAWSSDK_ROOT_DIR="$SDK_ROOT/install" -DBUILD_SHARED_LIBS=ON -DSTANDALONE=ON
- make -j 8
- $SDK_ROOT/build/custom-service-integration-tests/aws-cpp-sdk-custom-service-integration-tests
# Testing the second approach to build custom client along with AWS C++ SDK, which means we will build everything altogether at the same time.
# Copy the c2j model to code-generation/api-descriptions
- cp $SDK_ROOT/code-generation/api-descriptions/custom-service/custom-service-2017-11-03.normal.json $SDK_ROOT/code-generation/api-descriptions/petstore-2017-11-03.normal.json
# Build and install aws-cpp-sdk-core and aws-cpp-sdk-petstore
- mkdir -p $SDK_ROOT/build_all
- mkdir -p $SDK_ROOT/install_all
- cd $SDK_ROOT/build_all
- cmake $SDK_ROOT -DBUILD_ONLY=core -DADD_CUSTOM_CLIENTS="serviceName=petstore, version=2017-11-03" -DCMAKE_BUILD_TYPE=Debug -DCMAKE_INSTALL_PREFIX=$SDK_ROOT/install_all -DBUILD_SHARED_LIBS=ON
- make -j 8
- make install
# Build and run petstore integration tests
- mkdir -p $SDK_ROOT/build_tests
- cd $SDK_ROOT/build_tests
- cmake $SDK_ROOT/aws-cpp-sdk-custom-service-integration-tests -DCMAKE_BUILD_TYPE=Debug -DCMAKE_PREFIX_PATH="$SDK_ROOT/install_all" -DAWSSDK_ROOT_DIR="$SDK_ROOT/install_all" -DBUILD_SHARED_LIBS=ON -DSTANDALONE=OFF
- make -j 8
- $SDK_ROOT/build_tests/aws-cpp-sdk-custom-service-integration-tests

View File

@@ -0,0 +1,50 @@
version: 0.2
phases:
build:
commands:
- $SDK_ROOT="$Env:CODEBUILD_SRC_DIR/aws-sdk-cpp"
- cd $SDK_ROOT
# Testing the first approach to build custom client as a separate package, which means you have to build and install aws-sdk-cpp first.
# Generate custom client source code under custom-service/ with API description file located at code-generation/api-description/custom-service.
- python scripts/generate_sdks.py --pathToApiDefinitions=code-generation/api-descriptions/custom-service --outputLocation custom-service --serviceName custom-service --apiVersion 2017-11-03 --namespace Custom --prepareTool --standalone
# Build and install aws-cpp-sdk-core
- mkdir -p $SDK_ROOT/build/AWSSDK
- mkdir -p $SDK_ROOT/install
- cd $SDK_ROOT/build/AWSSDK
- cmake $SDK_ROOT -DBUILD_ONLY="core" -DCMAKE_BUILD_TYPE=Debug -DCMAKE_INSTALL_PREFIX="$SDK_ROOT/install" -DBUILD_SHARED_LIBS=ON
- MSBuild.exe ALL_BUILD.vcxproj -p:Configuration=Debug -m
- MSBuild.exe INSTALL.vcxproj -p:Configuration=Debug
# Build custom-service
- mkdir -p $SDK_ROOT/build/custom-service
- cd $SDK_ROOT/build/custom-service
- cmake $SDK_ROOT/custom-service/aws-cpp-sdk-custom-service -DCMAKE_PREFIX_PATH="$SDK_ROOT/install" -DAWSSDK_ROOT_DIR="$SDK_ROOT/install" -DCMAKE_INSTALL_PREFIX="$SDK_ROOT/install" -DBUILD_SHARED_LIBS=ON -DUSE_WINDOWS_DLL_SEMANTICS=ON
- MSBuild.exe ALL_BUILD.vcxproj -p:Configuration=Debug -m
- MSBuild.exe INSTALL.vcxproj -p:Configuration=Debug
# Build and run custom-service integration tests
- mkdir -p $SDK_ROOT/build/custom-service-integration-tests
- cd $SDK_ROOT/build/custom-service-integration-tests
- cmake $SDK_ROOT/aws-cpp-sdk-custom-service-integration-tests -DCMAKE_PREFIX_PATH="$SDK_ROOT/install" -DAWSSDK_ROOT_DIR="$SDK_ROOT/install" -DBUILD_SHARED_LIBS=ON -DSTANDALONE=ON
- MSBuild.exe ALL_BUILD.vcxproj -p:Configuration=Debug -m
- ./Debug/aws-cpp-sdk-custom-service-integration-tests
# Testing the second approach to build custom client along with AWS C++ SDK, which means we will build everything altogether at the same time.
# Copy the c2j model to code-generation/api-descriptions
- cp $SDK_ROOT/code-generation/api-descriptions/custom-service/custom-service-2017-11-03.normal.json $SDK_ROOT/code-generation/api-descriptions/petstore-2017-11-03.normal.json
# Build and install aws-cpp-sdk-core and aws-cpp-sdk-petstore
- mkdir -p $SDK_ROOT/build_all
- mkdir -p $SDK_ROOT/install_all
- cd $SDK_ROOT/build_all
- cmake $SDK_ROOT -DBUILD_ONLY=core -DADD_CUSTOM_CLIENTS="serviceName=petstore, version=2017-11-03" -DCMAKE_BUILD_TYPE=Debug -DCMAKE_INSTALL_PREFIX="$SDK_ROOT/install_all" -DBUILD_SHARED_LIBS=ON
- MSBuild.exe ALL_BUILD.vcxproj -p:Configuration=Debug -m
- MSBuild.exe INSTALL.vcxproj -p:Configuration=Debug
# Build and run petstore integration tests
- mkdir -p $SDK_ROOT/build_tests
- cd $SDK_ROOT/build_tests
- cmake $SDK_ROOT/aws-cpp-sdk-custom-service-integration-tests -DCMAKE_PREFIX_PATH="$SDK_ROOT/install_all" -DAWSSDK_ROOT_DIR="$SDK_ROOT/install_all" -DBUILD_SHARED_LIBS=ON -DSTANDALONE=OFF
- MSBuild.exe ALL_BUILD.vcxproj -p:Configuration=Debug -m
- ./Debug/aws-cpp-sdk-custom-service-integration-tests.exe

View File

@@ -0,0 +1,21 @@
# Using Amazon Linux 2 docker image
FROM amazonlinux:2
#Install g++
RUN yum groupinstall "Development Tools" -y
#Install cmake
RUN curl https://cmake.org/files/v3.13/cmake-3.13.3-Linux-x86_64.tar.gz --output cmake-3.13.3-Linux-x86_64.tar.gz && \
tar -xvzf cmake-3.13.3-Linux-x86_64.tar.gz && \
mv cmake-3.13.3-Linux-x86_64 /opt && \
rm cmake-3.13.3-Linux-x86_64.tar.gz && \
ln -s /opt/cmake-3.13.3-Linux-x86_64/bin/cmake /usr/local/bin/cmake
#Install curl and openssl
RUN yum install curl-devel -y && \
yum install openssl-devel -y && \
yum install ninja-build -y
#Install awscli
RUN yum install python-pip -y && \
pip install awscli

View File

@@ -0,0 +1,28 @@
# Using official gcc docker image
FROM gcc:7.4
# Install zip, cmake, maven, python-pip via apt
RUN apt-get update && \
apt-get upgrade -y && \
apt-get install -y zip cmake python-pip
# Install awscli
RUN pip install awscli --upgrade
# We can install openjdk by "apt install openjdk-8-jdk", but it has some issues during building code-generation, we have to install it manually.
RUN wget --no-check-certificate -c --header "Cookie: oraclelicense=accept-securebackup-cookie" https://download.oracle.com/otn-pub/java/jdk/8u191-b12/2787e4a523244c269598db4e85c51e0c/jdk-8u191-linux-x64.tar.gz && \
tar zxvf jdk-8u191-linux-x64.tar.gz && \
mkdir /usr/bin/java && \
mv jdk1.8.0_191 /usr/bin/java && \
rm jdk-8u191-linux-x64.tar.gz && \
ln -s /usr/bin/java/jdk1.8.0_191/bin/java /bin/java && \
ln -s /usr/bin/java/jdk1.8.0_191/bin/javac /bin/javac
ENV JAVA_HOME /usr/bin/java/jdk1.8.0_191
RUN apt-get install -y maven
# Download and install Android NDK
RUN wget https://dl.google.com/android/repository/android-ndk-r19c-linux-x86_64.zip && \
unzip android-ndk-r19c-linux-x86_64.zip && \
mv android-ndk-r19c /opt && \
rm android-ndk-r19c-linux-x86_64.zip
ENV ANDROID_NDK /opt/android-ndk-r19c

View File

@@ -0,0 +1,23 @@
# Using official ubuntu docker image
FROM ubuntu:18.04
# Install git, zip, python-pip, cmake, g++, zlib, libssl, libcurl, java, maven via apt
RUN apt update && \
apt upgrade -y && \
apt install -y git zip wget python-pip python3 python3-pip cmake g++ zlib1g-dev libssl-dev libcurl4-openssl-dev openjdk-8-jdk doxygen ninja-build
# Install maven
RUN apt install -y maven
# Install awscli
RUN pip install awscli --upgrade
# Install boto3
RUN pip3 install boto3 --upgrade
# Download and install Android NDK
RUN wget https://dl.google.com/android/repository/android-ndk-r19c-linux-x86_64.zip && \
unzip android-ndk-r19c-linux-x86_64.zip && \
mv android-ndk-r19c /opt && \
rm android-ndk-r19c-linux-x86_64.zip
ENV ANDROID_NDK /opt/android-ndk-r19c

View File

@@ -0,0 +1,45 @@
# escape=`
FROM microsoft/windowsservercore:ltsc2016
ADD https://download.microsoft.com/download/6/A/A/6AA4EDFF-645B-48C5-81CC-ED5963AEAD48/vc_redist.x64.exe /vc_redist.x64.exe
RUN start /wait C:\vc_redist.x64.exe /quiet /norestart
# Install chocolatey
RUN @powershell -NoProfile -ExecutionPolicy unrestricted -Command "$env:chocolateyUseWindowsCompression = 'true'; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12; (iex ((new-object net.webclient).DownloadString('https://chocolatey.org/install.ps1'))) >$null 2>&1"
RUN choco install git 7zip -y
RUN choco install cmake --installargs 'ADD_CMAKE_TO_PATH=""System""' -y
# Install Visual C++ Build Tools, as per: https://chocolatey.org/packages/visualcpp-build-tools
RUN choco install visualcpp-build-tools -version 14.0.25420.1 -y
RUN setx /M PATH "C:\Program Files (x86)\Windows Kits\10\bin\x86\ucrt;C:\Program Files (x86)\Windows Kits\10\bin\x64\ucrt;%PATH%"
# Add msbuild to PATH
RUN setx /M PATH "%PATH%;C:\Program Files (x86)\MSBuild\14.0\bin"
# Test msbuild can be accessed without path
RUN msbuild -version
# Install Java
RUN choco install jdk8 -y
# Add Java to PATH
RUN setx /M PATH "%PATH%;C:\Program Files\Java\jdk_1.8.0_172\bin"
# Install Maven
RUN choco install maven -y
# Install Python3
RUN choco install python -y
# Add Python to PATH
RUN setx /M PATH "%PATH%;C:\Python36"
# Install boto3
RUN pip install boto3 --upgrade
# Install awscli
RUN pip install awscli --upgrade
CMD [ "cmd.exe" ]

View File

@@ -0,0 +1,47 @@
# escape=`
FROM microsoft/windowsservercore:ltsc2016
ADD https://download.microsoft.com/download/6/A/A/6AA4EDFF-645B-48C5-81CC-ED5963AEAD48/vc_redist.x64.exe /vc_redist.x64.exe
RUN start /wait C:\vc_redist.x64.exe /quiet /norestart
# Install chocolatey
RUN @powershell -NoProfile -ExecutionPolicy unrestricted -Command "$env:chocolateyUseWindowsCompression = 'true'; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12; (iex ((new-object net.webclient).DownloadString('https://chocolatey.org/install.ps1'))) >$null 2>&1"
RUN choco install git 7zip -y
RUN choco install cmake --installargs 'ADD_CMAKE_TO_PATH=""System""' -y
# Install Visual C++ Build Tools, as per: https://chocolatey.org/packages/visualcpp-build-tools
RUN powershell -NoProfile -InputFormat None -Command `
choco install visualcpp-build-tools -version 15.0.26228.20170424 -y; `
Write-Host 'Waiting for Visual C++ Build Tools to finish'; `
Wait-Process -Name vs_installer
# Add msbuild to PATH
RUN setx /M PATH "%PATH%;C:\Program Files (x86)\Microsoft Visual Studio\2017\BuildTools\MSBuild\15.0\Bin"
# Test msbuild can be accessed without path
RUN msbuild -version
# Install Java
RUN choco install jdk8 -y
# Add Java to PATH
RUN setx /M PATH "%PATH%;C:\Program Files\Java\jdk_1.8.0_172\bin"
# Install Maven
RUN choco install maven -y
# Install Python3
RUN choco install python -y
# Add Python to PATH
RUN setx /M PATH "%PATH%;C:\Python36"
# Install boto3
RUN pip install boto3 --upgrade
# Install awscli
RUN pip install awscli --upgrade
CMD [ "cmd.exe" ]

View File

@@ -0,0 +1,57 @@
from __future__ import print_function
import json
import zipfile
import boto3
from botocore.exceptions import ClientError
print('Loading function')
bucket_name = 'aws-sdk-cpp-pipeline-sdks-team'
key = 'pending-releases.zip'
temp_archive_file = '/tmp/pending_releases.zip'
artifact = 'pending_releases'
temp_artifact_file = '/tmp/pending_releases'
s3 = boto3.client('s3')
def lambda_handler(event, context):
message = event['Records'][0]['Sns']['Message']
print("From SNS: " + message)
releasesDoc = {}
releasesDoc['releases'] = []
pendingReleases = None
try:
pendingReleases = s3.get_object(Bucket=bucket_name, Key=key)
body_stream_to_file(pendingReleases["Body"].read())
releasesDoc = read_zipped_release_doc()
except ClientError as e:
print("Couldn't pull doc, assuming it is empty. exception " + e.message)
releasesDoc['releases'].append(json.loads(message)["release"])
write_zipped_release_doc(releasesDoc)
with open(temp_archive_file) as archive:
s3.put_object(Bucket=bucket_name, Key=key, Body=archive.read())
return message
def read_zipped_release_doc():
archive = zipfile.ZipFile(temp_archive_file, 'r')
with archive.open(artifact) as artifactFile:
return json.loads(artifactFile.read())
def write_zipped_release_doc(doc):
releasesDocStr = json.dumps(doc)
print("New Release Doc: " + releasesDocStr)
with open(temp_artifact_file, "w") as artifactFile:
artifactFile.write(releasesDocStr)
with zipfile.ZipFile(temp_archive_file, 'w') as archiveStream:
archiveStream.write(temp_artifact_file, artifact)
def body_stream_to_file(body):
with open(temp_archive_file, 'w') as archiveFile:
archiveFile.write(body)

View File

@@ -0,0 +1,7 @@
cmake_minimum_required(VERSION 3.3)
set(CMAKE_CXX_STANDARD 11)
project(app LANGUAGES CXX)
find_package(AWSSDK REQUIRED COMPONENTS s3)
add_executable(${PROJECT_NAME} "main.cpp")
target_link_libraries(${PROJECT_NAME} ${AWSSDK_LINK_LIBRARIES})
target_compile_options(${PROJECT_NAME} PRIVATE "-Wall" "-Werror")

View File

@@ -0,0 +1,29 @@
#include <aws/core/Aws.h>
#include <aws/core/utils/logging/LogLevel.h>
#include <aws/s3/S3Client.h>
#include <iostream>
using namespace Aws;
int main(int argc, char *argv[])
{
SDKOptions options;
options.loggingOptions.logLevel = Utils::Logging::LogLevel::Warn;
InitAPI(options);
{
S3::S3Client client;
auto outcome = client.ListBuckets();
if (outcome.IsSuccess()) {
std::cout << "Found " << outcome.GetResult().GetBuckets().size() << " buckets\n";
for (auto&& b : outcome.GetResult().GetBuckets()) {
std::cout << b.GetName() << std::endl;
}
} else {
std::cout << "Failed with error: " << outcome.GetError() << std::endl;
}
}
ShutdownAPI(options);
return 0;
}

View File

@@ -0,0 +1,89 @@
from __future__ import print_function
import json
import zipfile
import boto3
import os
import re
import sys
import argparse
from botocore.exceptions import ClientError
import requests
import requests.packages.urllib3
requests.packages.urllib3.disable_warnings()
temp_archive_file = 'models.zip'
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--releaseDoc')
parser.add_argument('-m', '--modelsDir')
args = parser.parse_args()
releaseDocPath = args.releaseDoc
modelsDir = args.modelsDir
print('Release Doc path {0}'.format(releaseDocPath))
print('Models Directory {0}'.format(modelsDir))
releaseDoc = {}
pendingReleases = None
with open(releaseDocPath, "r") as releaseDocFileStream:
releaseDoc = json.loads(releaseDocFileStream.read())
if(len(releaseDoc) == 0 or len(releaseDoc["releases"]) == 0):
return
for release in releaseDoc["releases"]:
for feature in release["features"]:
if feature["c2jModels"] != None:
response = requests.get(feature["c2jModels"])
if response.status_code != 200:
print("Error downloading {0} artifacts skipping.", json.dumps(feature))
continue
body_stream_to_file(response.content)
copy_model_files(modelsDir)
cat_release_notes(feature["releaseNotes"], modelsDir)
cat_pending_releases(release["id"], modelsDir)
emptyReleaseDoc = "{ \"releases\": []}"
with open(releaseDocPath, "w") as emptyReleaseFile:
emptyReleaseFile.write(emptyReleaseDoc)
def copy_model_files(models_dir):
archive = zipfile.ZipFile(temp_archive_file, 'r')
archive.debug = 3
for info in archive.infolist():
print(info.filename)
if re.match(r'output/.*\.normal\.json', info.filename):
outputPath = os.path.join(models_dir, os.path.basename(info.filename))
print("copying {0} to {1}".format(info.filename, outputPath))
fileHandle = archive.open(info.filename, 'r')
fileOutput = fileHandle.read()
with open(outputPath, 'wb') as destination:
destination.write(fileOutput)
fileHandle.close()
def body_stream_to_file(body):
with open(temp_archive_file, 'w') as archiveFile:
archiveFile.write(body)
def cat_release_notes(releaseNotes, models_path):
with open(os.path.join(models_path, "release_notes"), "a") as releaseNotesFile:
releaseNotesFile.write(releaseNotes + "\n\n")
def cat_pending_releases(release_guid, models_path):
with open(os.path.join(models_path, "pending_releases"), "a") as pendingReleasesFile:
pendingReleasesFile.write(release_guid + "\n")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,8 @@
#!/bin/bash
IFS=$'\n' read -d '' -r -a releases < $1/pending_releases
for i in "${releases[@]}"
do
aws sqs send-message --debug --message-group-id "needlessField" --queue-url "$4" --message-body "{ \"releaseId\": \"$i\", \"language\": \"CPP\", \"releaseState\":\"$2\", \"statusMessage\":\"$3\" }" --region us-west-2
done

View File

@@ -0,0 +1,19 @@
#!/bin/bash -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [ -f ./models/pending_releases ] && [ -s ./models/pending_releases ];
then
aws s3 cp s3://aws-sdk-cpp-pipeline-sdks-team/modelsSnapshot.zip modelsLatest.zip --region us-east-1
unzip modelsLatest.zip -d modelsLatest
rm modelsLatest.zip
grep -vf ./models/pending_releases ./modelsLatest/models/pending_releases | xargs | tee ./modelsLatest/models/pending_releases
grep -vf ./models/release_notes ./modelsLatest/models/release_notes | xargs | tee ./modelsLatest/models/release_notes
touch ./not_a_release
aws s3 cp not_a_release s3://aws-sdk-cpp-pipeline-sdks-team/not_a_release --region us-east-1
rm -rf ./models
mkdir ./models
cp -r ./modelsLatest/models/* ./models
zip -r modelsSnapshot.zip ./models
aws s3 cp modelsSnapshot.zip s3://aws-sdk-cpp-pipeline-sdks-team/modelsSnapshot.zip --region us-east-1
fi

View File

@@ -0,0 +1,13 @@
#!/bin/bash
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
aws s3 cp s3://aws-sdk-cpp-pipeline-sdks-team/modelsSnapshot.zip models.zip --region us-east-1
unzip models.zip
rm models.zip
cp $1 $1_cpy
python $DIR/move_release_doc_to_models.py --modelsDir="./models" --releaseDoc="$1"
rm models.zip
zip -r modelsSnapshot.zip ./models
aws s3 cp modelsSnapshot.zip s3://aws-sdk-cpp-pipeline-sdks-team/modelsSnapshot.zip --region us-east-1
zip -r pending-releases.zip -j $1
aws s3 cp pending-releases.zip s3://aws-sdk-cpp-pipeline-sdks-team/pending-releases.zip --region us-east-1

View File

@@ -0,0 +1,45 @@
import os
import json
import boto3
import argparse
UPDATE_STATUS_LAMBDA_FUNCTION_NAME = os.environ['UPDATE_STATUS_LAMBDA_FUNCTION_NAME']
lambdaClient = boto3.client('lambda', region_name = os.environ['AWS_REGION'])
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--stage_name', default = 'Unknown')
parser.add_argument('-e', '--internal_message', default = '')
parser.add_argument('-i', '--release_id', default = '')
parser.add_argument('-m', '--status_message', default = '')
parser.add_argument('-b', '--build_succeeding', default = '0')
parser.add_argument('-o', '--internal_only', action = 'store_true')
parser.add_argument('-c', '--release_complete', action = 'store_true')
args = parser.parse_args()
updateStatus({
'stageName': args.stage_name,
'internalMessage': args.internal_message,
'internalOnly': args.internal_only,
'messageToTrebuchet': {
'releaseId' : args.release_id,
'language' : 'CPP',
'releaseState' : 'Success' if args.release_complete else ('InProgress' if args.build_succeeding == '1' else 'Blocked'),
'statusMessage' : args.status_message
}
})
def updateStatus(updateStatusMessage):
print('[Lambda] Triggering Lambda function to update status:', end = ' ')
print(updateStatusMessage)
response = lambdaClient.invoke(
FunctionName = UPDATE_STATUS_LAMBDA_FUNCTION_NAME,
InvocationType = 'RequestResponse',
Payload = json.dumps(updateStatusMessage)
)
print('Response:', end = ' ')
print(response)
if response['ResponseMetadata']['HTTPStatusCode'] != 200:
quit(1)
main()

View File

@@ -0,0 +1,26 @@
version: 0.2
phases:
build:
commands:
- echo $CODEBUILD_SOURCE_VERSION
- export RELEASE_ID=$(cat $RELEASE_ID_FILENAME)
- python3 aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s Build -i "$RELEASE_ID" -m "Step 2 of 4. Verifying Build." -b $CODEBUILD_BUILD_SUCCEEDING
- mv * /tmp && mkdir -p /tmp/build
- cd /tmp/aws-sdk-cpp
- python ./scripts/endpoints_checker.py
- cd ../build
- cmake ../aws-sdk-cpp -DCMAKE_BUILD_TYPE=Debug -DENABLE_ADDRESS_SANITIZER=ON -DMINIMIZE_SIZE=ON
- make -j 3
post_build:
commands:
- cd /tmp
- export BUILD_JOB_NAME=$(echo "${CODEBUILD_BUILD_ID}" | cut -f1 -d ":")
- export BUILD_URL="https://${AWS_REGION}.console.aws.amazon.com/codesuite/codebuild/projects/${BUILD_JOB_NAME}/build/${CODEBUILD_BUILD_ID}"
- |
if [ "${CODEBUILD_BUILD_SUCCEEDING}" = "0" ]; then
python3 aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s Build -e "[BUILD FAILURE](${BUILD_URL}) (${CODEBUILD_BUILD_ID})" -i $RELEASE_ID -m "Step 2 of 4. Verification of Build Failed. A technician has already been notified." -b $CODEBUILD_BUILD_SUCCEEDING;
fi
artifacts:
files:
- "**/*"
base-directory: /tmp

View File

@@ -0,0 +1,22 @@
version: 0.2
phases:
pre_build:
commands:
- export RELEASE_ID=$(cat $RELEASE_ID_FILENAME)
- python3 aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s IntegrationTests -i "$RELEASE_ID" -m "Step 3 of 4. Running Integration Tests." -b $CODEBUILD_BUILD_SUCCEEDING
build:
commands:
- echo $CODEBUILD_SOURCE_VERSION
- mv aws-sdk-cpp build /tmp
- cd /tmp/build
- python ../aws-sdk-cpp/scripts/run_integration_tests.py --testDir .
post_build:
commands:
- cd /tmp
- aws s3 cp ./build s3://${S3_BUCKET_NAME}/log/${CODEBUILD_BUILD_ID}/ --recursive --exclude "*" --include "aws*.log"
- export BUILD_JOB_NAME=$(echo "${CODEBUILD_BUILD_ID}" | cut -f1 -d ":")
- export BUILD_URL="https://${AWS_REGION}.console.aws.amazon.com/codesuite/codebuild/projects/${BUILD_JOB_NAME}/build/${CODEBUILD_BUILD_ID}"
- |
if [ "${CODEBUILD_BUILD_SUCCEEDING}" = "0" ]; then
python3 aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s IntegrationTests -e "[BUILD FAILURE](${BUILD_URL}) (${CODEBUILD_BUILD_ID})" -i $RELEASE_ID -m "Step 3 of 4. Integration Tests Failed. A technician has already been notified." -b $CODEBUILD_BUILD_SUCCEEDING;
fi

View File

@@ -0,0 +1,29 @@
version: 0.2
phases:
build:
commands:
- echo $CODEBUILD_SOURCE_VERSION
- rm -rf aws-sdk-cpp
- git clone https://github.com/${GITHUB_PUBLIC_REPOSITORY}.git
- cd aws-sdk-cpp
- export VERSION_NUM=$(grep AWS_SDK_VERSION_STRING ./aws-cpp-sdk-core/include/aws/core/VersionConfig.h | cut -d '"' -f2)
- sed -i "s/PROJECT_NUMBER .*/PROJECT_NUMBER = $VERSION_NUM/" ./doxygen/doxygen.config
- doxygen ./doxygen/doxygen.config
- python doc_crosslinks/generate_cross_link_data.py --apiDefinitionsPath ./code-generation/api-descriptions/ --templatePath ./doc_crosslinks/crosslink_redirect.html --outputPath ./crosslink_redirect.html
- aws s3 cp ./doxygen/html s3://${DOCS_S3_BUCKET_NAME}/cpp/api/$VERSION_NUM --recursive
- aws s3 cp s3://${DOCS_S3_BUCKET_NAME}/cpp/api/$VERSION_NUM s3://${DOCS_S3_BUCKET_NAME}/cpp/api/LATEST --recursive
- aws s3 cp ./crosslink_redirect.html s3://${DOCS_S3_BUCKET_NAME}/cpp/api/crosslink_redirect.html
- mkdir aws_sdk_cpp
- cp -r ./doxygen/html aws_sdk_cpp
- cp -r ./crosslink_redirect.html aws_sdk_cpp
- zip -r documentation.zip ./aws_sdk_cpp
- aws s3 cp documentation.zip s3://${BINARY_S3_BUCKET_NAME}/cpp/builds/$VERSION_NUM/documentation.zip
post_build:
commands:
- cd $CODEBUILD_SRC_DIR
- export BUILD_JOB_NAME=$(echo "${CODEBUILD_BUILD_ID}" | cut -f1 -d ":")
- export BUILD_URL="https://${AWS_REGION}.console.aws.amazon.com/codesuite/codebuild/projects/${BUILD_JOB_NAME}/build/${CODEBUILD_BUILD_ID}"
- |
if [ "${CODEBUILD_BUILD_SUCCEEDING}" = "0" ]; then
python3 aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s PublishAPIDocs -e "[BUILD FAILURE](${BUILD_URL}) (${CODEBUILD_BUILD_ID})" -i $RELEASE_ID -m "Publish API Docs Failed." -b $CODEBUILD_BUILD_SUCCEEDING -o;
fi

View File

@@ -0,0 +1,71 @@
version: 0.2
phases:
build:
commands:
- echo $CODEBUILD_SOURCE_VERSION
- export RELEASE_ID=$(cat $RELEASE_ID_FILENAME)
- if [ -s $RELEASE_NOTES_FILENAME ]; then export COMMIT_MSG="$(cat $RELEASE_NOTES_FILENAME)"; fi;
- python3 aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s PushToGithub -i "$RELEASE_ID" -m "Step 4 of 4. Pushing Code to Public Github." -b $CODEBUILD_BUILD_SUCCEEDING
- cd aws-sdk-cpp
# Verify the candidate commit, in case there is new merge without testing during release.
- if [ "$(git rev-parse --abbrev-ref HEAD)" != "master" ]; then exit 1; fi;
- git fetch --all
- if [ -n "$(git diff master origin/master)" ]; then exit 1; fi;
# Get highest tag number
- export VERSION=$(git describe --abbrev=0 --tags)
# Calculate new version
- export VERSION_MAJOR=$(echo $VERSION | cut -d '.' -f1)
- export VERSION_MINOR=$(echo $VERSION | cut -d '.' -f2)
- export VERSION_PATCH=$(echo $VERSION | cut -d '.' -f3)
- export VERSION_PATCH=$((VERSION_PATCH+1))
- export VERSION_BUMP=$VERSION_MAJOR.$VERSION_MINOR.$VERSION_PATCH
- echo "Updating $VERSION to $VERSION_BUMP"
# Write new version to VersionConfig.h
- sed -i "s/AWS_SDK_VERSION_STRING.*/AWS_SDK_VERSION_STRING \"$VERSION_BUMP\"/" aws-cpp-sdk-core/include/aws/core/VersionConfig.h
# git add
- git add --all
- git status
# Generate release notes
- if [ -z "$COMMIT_MSG" ]; then export COMMIT_MSG="Auto commit from CI."; fi;
# Commit to release candidate branch
- git config --global user.name "$GIT_COMMIT_AUTHOR_NAME"
- git config --global user.email "$GIT_COMMIT_AUTHOR_EMAIL"
- git commit -m "$COMMIT_MSG"
- git checkout release-candidate
- git merge master
- git push origin release-candidate
# Get current hash and see if it already has a tag
- export GIT_COMMIT=$(git rev-parse HEAD)
- export NEEDS_TAG=$(git describe --contains $GIT_COMMIT)
# Only tag if no tag already (would be better if the git describe command above could have a silent option)
- |
if [ -z "$NEEDS_TAG" ]; then
echo "Tagged with $VERSION_BUMP (Ignoring fatal:cannot describe - this means commit is untagged) "
git tag $VERSION_BUMP
git push --tags
else
echo "Already a tag on this commit"
fi
# Push code to Github
# - git fetch --tags
# - git fetch --all
# - git reset --hard HEAD
# - git checkout release-candidate
# - git pull
- git checkout master
- git pull
- git merge release-candidate
- git push https://${GIT_USERNAME}:${GIT_PASSWORD}@github.com/${GITHUB_PRIVATE_REPOSITORY}.git master
- git push https://${GIT_USERNAME}:${GIT_PASSWORD}@github.com/${GITHUB_PUBLIC_REPOSITORY}.git master
- git push --tags https://${GIT_USERNAME}:${GIT_PASSWORD}@github.com/${GITHUB_PUBLIC_REPOSITORY}.git
post_build:
commands:
- cd $CODEBUILD_SRC_DIR
- export BUILD_JOB_NAME=$(echo "${CODEBUILD_BUILD_ID}" | cut -f1 -d ":")
- export BUILD_URL="https://${AWS_REGION}.console.aws.amazon.com/codesuite/codebuild/projects/${BUILD_JOB_NAME}/build/${CODEBUILD_BUILD_ID}"
- |
if [ "${CODEBUILD_BUILD_SUCCEEDING}" = "1" ]; then
python3 aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s PushToGithub -e "[BUILD SUCCESS](${BUILD_URL}) (${CODEBUILD_BUILD_ID})" -i $RELEASE_ID -m "Step 4 of 4. Code Pushed to Public Github." -b $CODEBUILD_BUILD_SUCCEEDING -c;
else
python3 aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s PushToGithub -e "[BUILD FAILURE](${BUILD_URL}) (${CODEBUILD_BUILD_ID})" -i $RELEASE_ID -m "Step 4 of 4. Push to Public Github Failed. A technician has already been notified." -b $CODEBUILD_BUILD_SUCCEEDING;
fi

View File

@@ -0,0 +1,30 @@
# This buildspec is source controlled, whenever you make any change in the AWS console, you should update it to Github.
version: 0.2
phases:
build:
commands:
- echo $CODEBUILD_SOURCE_VERSION
- git clone https://${GIT_USERNAME}:${GIT_PASSWORD}@github.com/${GITHUB_PRIVATE_REPOSITORY}.git aws-sdk-cpp
- export RELEASE_ID=$(cat $RELEASE_ID_FILENAME)
- python3 aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s RegenerateCode -i "$RELEASE_ID" -m "Step 1 of 4. Regenerating Code with New Models." -b $CODEBUILD_BUILD_SUCCEEDING
- cp models/*.normal.json aws-sdk-cpp/code-generation/api-descriptions/
- cd aws-sdk-cpp
- mkdir build
- cd build
- cmake .. -DREGENERATE_CLIENTS=ON
- cd ..
- rm -rf build
post_build:
commands:
- cd $CODEBUILD_SRC_DIR
- export BUILD_JOB_NAME=$(echo "${CODEBUILD_BUILD_ID}" | cut -f1 -d ":")
- export BUILD_URL="https://${AWS_REGION}.console.aws.amazon.com/codesuite/codebuild/projects/${BUILD_JOB_NAME}/build/${CODEBUILD_BUILD_ID}"
- |
if [ "${CODEBUILD_BUILD_SUCCEEDING}" = "1" ]; then
python3 aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s RegenerateCode -e "[BUILD SUCCESS](${BUILD_URL}) (${CODEBUILD_BUILD_ID})" -i $RELEASE_ID -m "Step 1 of 4. Regenerated Code with New Models." -b $CODEBUILD_BUILD_SUCCEEDING;
else
python3 aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s RegenerateCode -e "[BUILD FAILURE](${BUILD_URL}) (${CODEBUILD_BUILD_ID})" -i $RELEASE_ID -m "Step 1 of 4. Code Generation with New Models Failed. A technician has already been notified." -b $CODEBUILD_BUILD_SUCCEEDING;
fi
artifacts:
files:
- "**/*"

View File

@@ -0,0 +1,28 @@
version: 0.2
phases:
build:
commands:
- echo ${Env:CODEBUILD_SOURCE_VERSION}
- $RELEASE_ID=$(cat ${Env:RELEASE_ID_FILENAME})
- mkdir C:\tmp
- mkdir C:\tmp\build
- mv * C:\tmp
- cd C:\tmp\build
- cmake.exe -G "Visual Studio 14 2015 Win64" -DCMAKE_BUILD_TYPE=Debug -DMINIMIZE_SIZE=ON ../aws-sdk-cpp
- msbuild.exe ALL_BUILD.vcxproj -p:Configuration=Debug -m
- cd ..
- Get-ChildItem aws-sdk-cpp -Exclude *tests | Where-Object Name -Like 'aws-cpp-sdk-*' | Remove-Item -Recurse -Force
- Get-ChildItem build -Exclude bin | Remove-Item -Recurse -Force
post_build:
commands:
- cd C:\tmp
- $BUILD_JOB_NAME=${Env:CODEBUILD_BUILD_ID}.Substring(0, ${Env:CODEBUILD_BUILD_ID}.IndexOf(":"))
- $BUILD_URL="https://${Env:AWS_REGION}.console.aws.amazon.com/codesuite/codebuild/projects/$BUILD_JOB_NAME/build/${Env:CODEBUILD_BUILD_ID}"
- |
if (${Env:CODEBUILD_BUILD_SUCCEEDING} -eq 0) {
python aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s Build -e "[BUILD FAILURE](${BUILD_URL}) (${Env:CODEBUILD_BUILD_ID})" -i $RELEASE_ID -m "Step 2 of 4. Verification of Build Failed. A technician has already been notified." -b $CODEBUILD_BUILD_SUCCEEDING;
}
artifacts:
files:
- "**/*"
base-directory: C:\tmp

View File

@@ -0,0 +1,28 @@
version: 0.2
phases:
build:
commands:
- echo ${Env:CODEBUILD_SOURCE_VERSION}
- $RELEASE_ID=$(cat ${Env:RELEASE_ID_FILENAME})
- mkdir C:\tmp
- mkdir C:\tmp\build
- mv * C:\tmp
- cd C:\tmp\build
- cmake.exe -G "Visual Studio 15 2017 Win64" -DCMAKE_BUILD_TYPE=Debug -DMINIMIZE_SIZE=ON ../aws-sdk-cpp
- msbuild.exe ALL_BUILD.vcxproj -p:Configuration=Debug -m
- cd ..
- Get-ChildItem aws-sdk-cpp -Exclude *tests | Where-Object Name -Like 'aws-cpp-sdk-*' | Remove-Item -Recurse -Force
- Get-ChildItem build -Exclude bin | Remove-Item -Recurse -Force
post_build:
commands:
- cd C:\tmp
- $BUILD_JOB_NAME=${Env:CODEBUILD_BUILD_ID}.Substring(0, ${Env:CODEBUILD_BUILD_ID}.IndexOf(":"))
- $BUILD_URL="https://${Env:AWS_REGION}.console.aws.amazon.com/codesuite/codebuild/projects/$BUILD_JOB_NAME/build/${Env:CODEBUILD_BUILD_ID}"
- |
if (${Env:CODEBUILD_BUILD_SUCCEEDING} -eq 0) {
python aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s Build -e "[BUILD FAILURE](${BUILD_URL}) (${Env:CODEBUILD_BUILD_ID})" -i $RELEASE_ID -m "Step 2 of 4. Verification of Build Failed. A technician has already been notified." -b $CODEBUILD_BUILD_SUCCEEDING;
}
artifacts:
files:
- "**/*"
base-directory: C:\tmp

View File

@@ -0,0 +1,21 @@
version: 0.2
phases:
build:
commands:
- echo ${Env:CODEBUILD_SOURCE_VERSION}
- $RELEASE_ID=$(cat ${Env:RELEASE_ID_FILENAME})
- mkdir C:\tmp
- mv aws-sdk-cpp C:\tmp
- mv build C:\tmp
- cd C:\tmp\build
- python ../aws-sdk-cpp/scripts/run_integration_tests.py --testDir ./bin/Debug
post_build:
commands:
- cd C:\tmp
- aws s3 cp ./build s3://${Env:S3_BUCKET_NAME}/log/${Env:CODEBUILD_BUILD_ID}/ --recursive --exclude "*" --include "aws*.log"
- $BUILD_JOB_NAME=${Env:CODEBUILD_BUILD_ID}.Substring(0, ${Env:CODEBUILD_BUILD_ID}.IndexOf(":"))
- $BUILD_URL="https://${Env:AWS_REGION}.console.aws.amazon.com/codesuite/codebuild/projects/$BUILD_JOB_NAME/build/${Env:CODEBUILD_BUILD_ID}"
- |
if (${Env:CODEBUILD_BUILD_SUCCEEDING} -eq 0) {
python aws-sdk-cpp/CI/trebuchet-release-pipeline/UpdateStatus.py -s IntegrationTests -e "[BUILD FAILURE](${BUILD_URL}) (${Env:CODEBUILD_BUILD_ID})" -i $RELEASE_ID -m "Step 3 of 4. Integration Tests Failed. A technician has already been notified." -b $CODEBUILD_BUILD_SUCCEEDING;
}

View File

@@ -0,0 +1,127 @@
# Whenever you make any change here, you should update it in Amazon S3.
# This function serves as glue between SNS and S3.
# 1- Receives SNS message when Trebuchet release starts
# 2- Extracts the message (which should be JSON)
# 3- Writes the JSON to a file on disk
# 4- Downloads models with the presigned URL
# 5- Writes release notes to a file
# 6- Writes release id to a file
# 7- Upload all these files as a zip file to S3
import os
import shutil
import re
import json
import zipfile
import traceback
import boto3
from botocore.vendored import requests
S3_BUCKET_NAME = os.environ['S3_BUCKET_NAME']
RELEASE_MESSAGE_FILENAME = os.environ['RELEASE_MESSAGE_FILENAME']
RELEASE_ID_FILENAME = os.environ['RELEASE_ID_FILENAME']
RELEASE_NOTES_FILENAME = os.environ['RELEASE_NOTES_FILENAME']
PIPELINE_SOURCE = os.environ['PIPELINE_SOURCE']
UPDATE_STATUS_LAMBDA_FUNCTION_NAME = os.environ['UPDATE_STATUS_LAMBDA_FUNCTION_NAME']
OUTPUT_PATH = os.path.join('/tmp', 'output')
MODELS_OUTPUT_PATH = os.path.join(OUTPUT_PATH, 'models')
s3Resource = boto3.resource('s3', region_name = os.environ['AWS_REGION'])
lambdaClient = boto3.client('lambda', region_name = os.environ['AWS_REGION'])
updateStatusMessage = {
'stageName': 'HandleTrebuchetReleaseNotification',
'internalMessage': '',
'internalOnly': False,
'messageToTrebuchet': {
'releaseId' : '',
'language' : 'CPP',
'releaseState' : '',
'statusMessage' : ''
}
}
def lambda_handler(event, context):
try:
releaseMessage = json.loads(event['Records'][0]['Sns']['Message'])
# For local testing:
# with open(RELEASE_MESSAGE_FILENAME, 'r') as releaseMessageFile:
# releaseMessage = json.loads(releaseMessageFile.read())
print('[SNS] Receiving message from Trebuchet:', end = ' ')
print(releaseMessage)
if os.path.isdir(OUTPUT_PATH):
shutil.rmtree(OUTPUT_PATH)
os.mkdir(OUTPUT_PATH)
os.mkdir(MODELS_OUTPUT_PATH)
with open(os.path.join(OUTPUT_PATH, RELEASE_MESSAGE_FILENAME), 'w') as releaseMessageFile:
releaseMessageFile.write(json.dumps(releaseMessage))
releaseMessageFile.close()
with open(os.path.join(OUTPUT_PATH, RELEASE_ID_FILENAME), 'w') as releaseIdFile:
releaseIdFile.write(releaseMessage['release']['id'])
with open(os.path.join(OUTPUT_PATH, RELEASE_NOTES_FILENAME), 'w') as releaseNotesFile:
releaseNotesFile.write('')
updateStatusMessage['messageToTrebuchet'] = {
'releaseId' : releaseMessage['release']['id'],
'language' : 'CPP',
'releaseState' : 'InProgress',
'statusMessage' : 'Step 0 of 4. Handling release notification from Trebuchet.'
}
updateStatus(updateStatusMessage)
for feature in releaseMessage['release']['features']:
print('Downloading c2j model files for ' + feature['serviceId'])
response = requests.get(feature['c2jModels'])
if response.status_code != 200:
raise Exception('Error downloading c2j model with feature: ' + feature['featureArn'])
with open(os.path.join('/tmp', 'models.tmp.zip'), 'wb') as c2jModelsZipFile:
c2jModelsZipFile.write(response.content)
archive = zipfile.ZipFile(os.path.join('/tmp', 'models.tmp.zip'), 'r')
archive.debug = 3
for info in archive.infolist():
print(' ' + info.filename)
if re.match(r'output/.*\.normal\.json', info.filename):
outputPath = os.path.join(MODELS_OUTPUT_PATH, os.path.basename(info.filename))
print('* copying {0} to {1}'.format(info.filename, outputPath))
fileHandle = archive.open(info.filename, 'r')
fileOutput = fileHandle.read()
with open(outputPath, 'wb') as destination:
destination.write(fileOutput)
fileHandle.close()
releaseNotes = feature['releaseNotes']
print('Append release notes for ' + feature['serviceId'])
with open(os.path.join(OUTPUT_PATH, RELEASE_NOTES_FILENAME), 'a') as releaseNotesFile:
releaseNotesFile.write(releaseNotes + '\n\n')
updateStatusMessage['messageToTrebuchet']['statusMessage'] = 'Step 0 of 4. Handled release notification from Trebuchet.'
updateStatus(updateStatusMessage)
print('Archiving release-message, release-id, release-notes, and models directory into a zip file.')
shutil.make_archive('/tmp/models', 'zip', OUTPUT_PATH)
print('[S3] Sending zip file including json file to S3://{0}/{1}.'.format(S3_BUCKET_NAME, PIPELINE_SOURCE))
response = s3Resource.meta.client.upload_file('/tmp/models.zip', S3_BUCKET_NAME, PIPELINE_SOURCE)
print('Response:', end = ' ')
print(response)
except Exception:
traceback.print_exc()
updateStatusMessage['internalMessage'] = traceback.format_exc()
updateStatusMessage['messageToTrebuchet']['releaseState'] = 'Blocked'
updateStatusMessage['messageToTrebuchet']['statusMessage'] = 'Step 0 of 4. Failed to handle release notification from Trebuchet.'
updateStatus(updateStatusMessage)
def updateStatus(releaseStatus):
print('[Lambda] Triggering Lambda function to update status.')
response = lambdaClient.invoke(
FunctionName = UPDATE_STATUS_LAMBDA_FUNCTION_NAME,
InvocationType = 'RequestResponse',
Payload = json.dumps(releaseStatus)
)
print('Response:', end = ' ')
print(response)
# lambda_handler('', '')

View File

@@ -0,0 +1,29 @@
import os
import json
from botocore.vendored import requests
CHIME_BOT_URL = os.environ['CHIME_BOT_URL']
TREBUCHET_RELEASE_PIPELINE_NAME = os.environ['TREBUCHET_RELEASE_PIPELINE_NAME']
SOURCE_STAGE_NAME = os.environ['SOURCE_STAGE_NAME']
PROD_STAGE_NAME = os.environ['PROD_STAGE_NAME']
def lambda_handler(event, context):
print('Received Event: ' + json.dumps(event))
message = json.loads(event['Records'][0]['Sns']['Message'])
pipeline = message['detail']['pipeline']
stage = message['detail']['stage']
state = message['detail']['state']
if (state == 'SUCCEEDED' and pipeline == TREBUCHET_RELEASE_PIPELINE_NAME and (stage == SOURCE_STAGE_NAME or stage == PROD_STAGE_NAME)) or (state == 'FAILED'):
headers = {'Content-Type': 'application/json'}
data = {}
data['Content'] = '/md {mentionAll}\nPipeline: {pipeline}\nStage: {stage}\nState: {state}'.format(
mentionAll = '@All' if state == 'FAILED' else '',
pipeline = pipeline,
stage = stage,
state = state)
print('[Chime] Sending message to Chime Bot: ' + json.dumps(data['Content']))
respone = requests.post(CHIME_BOT_URL, headers = headers, data = json.dumps(data))
print('Response:', end=' ')
print(respone)

View File

@@ -0,0 +1,122 @@
# Whenever you make any change here, you should update it in Amazon S3.
# This Lambda function will make notifications to:
# 1. SQS queue to update status with Trebuchet
# 2. ChimeBot to notify engineers in the Chime room
# 3. CloudWatch metrics to trigger alarms and cut tickets
# Expected inputs of this Lambda function:
# {
# "stageName": "HandleTrebuchetReleaseNotification|RegenerateCode|Build|IntegrationTests|PublishToGithub",
# "internalMessage": "",
# "internalOnly": True|False
# "messageToTrebuchet": {
# "releaseId" : "",
# "language" : "CPP",
# "releaseState" : "InProgress|Success|Blocked|Failed",
# "statusMessage" : "",
# "additionalDetails" : {
# "generatedCodePresignedUrl":"",
# "logPresignedUrl":""
# }
# }
# }
import os
import json
import boto3
import traceback
from botocore.vendored import requests
CHIME_BOT_URL = os.environ['CHIME_BOT_URL']
TREBUCHET_QUEUE_URL = os.environ['TREBUCHET_QUEUE_URL']
sqsClient = boto3.client('sqs')
cloudwatchClient = boto3.client('cloudwatch')
def lambda_handler(event, context):
print('Received Event: ' + json.dumps(event))
if 'stageName' not in event or event['stageName'] == "":
event['stageName'] = 'Unknown'
if 'internalMessage' not in event:
event['internalMessage'] = ''
if 'internalOnly' not in event:
event['internalOnly'] = False
try:
failure = 0.0
sendMessageToChimeBot = False
mentionAll = False
if 'messageToTrebuchet' not in event or 'releaseId' not in event['messageToTrebuchet'] or event['messageToTrebuchet']['releaseId'] == "":
raise Exception('Missing releaseId in the received release message.')
messageToTrebuchet = event['messageToTrebuchet']
if messageToTrebuchet['releaseState'] == 'InProgress' or messageToTrebuchet['releaseState'] == 'Success':
pass
elif messageToTrebuchet['releaseState'] == 'Blocked' or messageToTrebuchet['releaseState'] == 'Failed':
failure = 1.0
sendMessageToChimeBot = True
mentionAll = True
else:
failure = 1.0
sendMessageToChimeBot = True
mentionAll = True
event['internalMessage'] = ('{originalInternalMessage} releaseState ({releaseState}) should be one of these: InProgress|Success|Blocked|Failed, this build will be marked as Blocked.'.format(
originalInternalMessage = event['internalMessage'],
releaseState = messageToTrebuchet['releaseState']
)).strip()
if not event['internalOnly']:
notifyTrebuchetSQS(messageToTrebuchet)
notifyCloudWatch(failure)
if sendMessageToChimeBot:
notifyChimeBot(event['stageName'], event['internalMessage'], mentionAll)
except Exception:
traceback.print_exc()
notifyChimeBot(
stageName = event['stageName'],
message = '\n'.join([event['internalMessage'], traceback.format_exc()]).strip(),
mentionAll = True)
if 'messageToTrebuchet' in event and 'releaseId' in event['messageToTrebuchet'] and not event['messageToTrebuchet']['releaseId'] == "":
notifyTrebuchetSQS({
"releaseId" : event['messageToTrebuchet']['releaseId'],
"language" : "CPP",
"releaseState" : "Blocked",
"statusMessage" : "Encountered internal errors."
})
def notifyChimeBot(stageName, message, mentionAll = False):
headers = {'Content-Type': 'application/json'}
data = {}
data['Content'] = '/md {mentionAll}\nStage: {stageName}\nMessage: {message}'.format(
mentionAll = '@All' if mentionAll else '',
stageName = stageName,
message = message)
print('[Chime] Sending message to Chime Bot: ' + json.dumps(data['Content']))
respone = requests.post(CHIME_BOT_URL, headers = headers, data = json.dumps(data))
print('Response:', end=' ')
print(respone)
def notifyCloudWatch(value):
print('[CloudWatch] Puting data to Metric: BuildFailure with value: ' + str(value))
response = cloudwatchClient.put_metric_data(
Namespace='BuildPipeline',
MetricData=[{
'MetricName' : "BuildFailure",
'Value' : value,
'Unit' : 'Count',
'StorageResolution' : 60
}]
)
print('Response:', end=' ')
print(response)
def notifyTrebuchetSQS(message):
print('[SQS] Sending message to Trebuchet queue:', end=' ')
print(message)
response = sqsClient.send_message(
QueueUrl = TREBUCHET_QUEUE_URL,
MessageBody = json.dumps(message),
MessageGroupId = 'CppSdkRelease'
)
print('Response:', end=' ')
print(response)

File diff suppressed because it is too large Load Diff