update
This commit is contained in:
@@ -1,41 +0,0 @@
|
|||||||
#########################################################################
|
|
||||||
# File Name: autorelease.sh
|
|
||||||
# Author: pxz
|
|
||||||
# Created Time: Mon 31 Aug 2020 10:19:47 AM CST
|
|
||||||
#########################################################################
|
|
||||||
#!/bin/sh
|
|
||||||
if [ $# -lt 8 ]; then
|
|
||||||
echo "USAGE: ./autorelease.sh [API_V4_URL] [PROJECT_URL]
|
|
||||||
[PROJECT_ID] [TOKEN]
|
|
||||||
[COMMIT_TAG] [JOB] [PROJECT_NAME] [USER_DEFINE]"
|
|
||||||
echo "$1; $2; $3; $4; $5; $6; $7; $8"
|
|
||||||
exit 1;
|
|
||||||
fi
|
|
||||||
|
|
||||||
CI_API_V4_URL=$1
|
|
||||||
CI_PROJECT_URL=$2
|
|
||||||
CI_PROJECT_ID=$3
|
|
||||||
CI_TOKEN=$4
|
|
||||||
CI_COMMIT_TAG=$5
|
|
||||||
ARTIFACTS_JOB=$6
|
|
||||||
CI_PROJECT_NAME=$7
|
|
||||||
USER_DEFINE=$8
|
|
||||||
|
|
||||||
res=`echo -e "curl --header \"PRIVATE-TOKEN: $CI_TOKEN\" $CI_API_V4_URL/projects/$CI_PROJECT_ID/releases/$CI_COMMIT_TAG -o /dev/null -s -w %{http_code}"| /bin/bash`
|
|
||||||
|
|
||||||
if [[ $res == "200" ]]; then
|
|
||||||
eval $(echo -e "curl --request POST --header \"PRIVATE-TOKEN: $CI_TOKEN\" \
|
|
||||||
--data name=\"$CI_PROJECT_NAME-$USER_DEFINE-$CI_COMMIT_TAG.zip\" \
|
|
||||||
--data url=\"$CI_PROJECT_URL/-/jobs/artifacts/$CI_COMMIT_TAG/download?job=$ARTIFACTS_JOB\"\
|
|
||||||
$CI_API_V4_URL/projects/$CI_PROJECT_ID/releases/$CI_COMMIT_TAG/assets/links")
|
|
||||||
else
|
|
||||||
eval $(echo -e "curl --header 'Content-Type: application/json' --header \
|
|
||||||
\"PRIVATE-TOKEN: $CI_TOKEN\" --data '{ \"name\": \"$CI_COMMIT_TAG\", \
|
|
||||||
\"tag_name\": \"$CI_COMMIT_TAG\", \"description\": \"auto_release\",\
|
|
||||||
\"assets\": { \"links\": [{ \"name\": \
|
|
||||||
\"$CI_PROJECT_NAME-$USER_DEFINE-$CI_COMMIT_TAG.zip\", \"url\": \
|
|
||||||
\"$CI_PROJECT_URL/-/jobs/artifacts/$CI_COMMIT_TAG/download?job=$ARTIFACTS_JOB\"\
|
|
||||||
}] } }' --request POST $CI_API_V4_URL/projects/$CI_PROJECT_ID/releases/")
|
|
||||||
fi
|
|
||||||
h
|
|
||||||
|
|
||||||
1273
autorevision.sh
1273
autorevision.sh
File diff suppressed because it is too large
Load Diff
@@ -1,48 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
# Copyright 2017 Google Inc.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above
|
|
||||||
# copyright notice, this list of conditions and the following disclaimer
|
|
||||||
# in the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
# * Neither the name of Google Inc. nor the names of its
|
|
||||||
# contributors may be used to endorse or promote products derived from
|
|
||||||
# this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# This file is typically sourced by another script.
|
|
||||||
# if possible, ask for the precise number of processors,
|
|
||||||
# otherwise take 2 processors as reasonable default; see
|
|
||||||
# https://docs.travis-ci.com/user/speeding-up-the-build/#Makefile-optimization
|
|
||||||
if [ -x /usr/bin/getconf ]; then
|
|
||||||
NPROCESSORS=$(/usr/bin/getconf _NPROCESSORS_ONLN)
|
|
||||||
else
|
|
||||||
NPROCESSORS=2
|
|
||||||
fi
|
|
||||||
|
|
||||||
# as of 2017-09-04 Travis CI reports 32 processors, but GCC build
|
|
||||||
# crashes if parallelized too much (maybe memory consumption problem),
|
|
||||||
# so limit to 4 processors for the time being.
|
|
||||||
if [ $NPROCESSORS -gt 4 ] ; then
|
|
||||||
echo "$0:Note: Limiting processors to use by make from $NPROCESSORS to 4."
|
|
||||||
NPROCESSORS=4
|
|
||||||
fi
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
#!/usr/bin/env sh
|
|
||||||
set -evx
|
|
||||||
echo "machine ${PULP3_SERVER_URL}\nlogin ${PULP3_SERVER_LOGIN}\npassword ${PULP3_SERVER_PASSWORD}\n" > ~/.netrc
|
|
||||||
64
ci/travis.sh
64
ci/travis.sh
@@ -1,64 +0,0 @@
|
|||||||
#!/usr/bin/env sh
|
|
||||||
set -evx
|
|
||||||
|
|
||||||
chmod +x ci/get-nprocessors.sh
|
|
||||||
. ci/get-nprocessors.sh
|
|
||||||
|
|
||||||
# if possible, ask for the precise number of processors,
|
|
||||||
# otherwise take 2 processors as reasonable default; see
|
|
||||||
# https://docs.travis-ci.com/user/speeding-up-the-build/#Makefile-optimization
|
|
||||||
if [ -x /usr/bin/getconf ]; then
|
|
||||||
NPROCESSORS=$(/usr/bin/getconf _NPROCESSORS_ONLN)
|
|
||||||
else
|
|
||||||
NPROCESSORS=2
|
|
||||||
fi
|
|
||||||
|
|
||||||
# as of 2017-09-04 Travis CI reports 32 processors, but GCC build
|
|
||||||
# crashes if parallelized too much (maybe memory consumption problem),
|
|
||||||
# so limit to 4 processors for the time being.
|
|
||||||
if [ $NPROCESSORS -gt 4 ] ; then
|
|
||||||
echo "$0:Note: Limiting processors to use by make from $NPROCESSORS to 4."
|
|
||||||
NPROCESSORS=4
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Tell make to use the processors. No preceding '-' required.
|
|
||||||
MAKEFLAGS="j${NPROCESSORS}"
|
|
||||||
export MAKEFLAGS
|
|
||||||
|
|
||||||
env | sort
|
|
||||||
|
|
||||||
# Set default values to OFF for these variables if not specified.
|
|
||||||
: "${NO_EXCEPTION:=OFF}"
|
|
||||||
: "${NO_RTTI:=OFF}"
|
|
||||||
: "${COMPILER_IS_GNUCXX:=OFF}"
|
|
||||||
|
|
||||||
# Install dependency from YUM
|
|
||||||
if [ -n "${INSTALL_DEPENDENCY_LIBRARY}" ]; then
|
|
||||||
yum install -y $INSTALL_DEPENDENCY_LIBRARY
|
|
||||||
source /etc/profile.d/framework.sh
|
|
||||||
fi
|
|
||||||
mkdir build || true
|
|
||||||
cd build
|
|
||||||
|
|
||||||
cmake3 -DCMAKE_CXX_FLAGS=$CXX_FLAGS \
|
|
||||||
-DCMAKE_BUILD_TYPE=$BUILD_TYPE \
|
|
||||||
-DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX \
|
|
||||||
-DENABLE_DEVEL=$ENABLE_DEVEL_SWITCH \
|
|
||||||
..
|
|
||||||
|
|
||||||
make
|
|
||||||
|
|
||||||
if [ -n "${PACKAGE}" ]; then
|
|
||||||
make package
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "${UPLOAD}" ]; then
|
|
||||||
cp ~/rpm_upload_tools.py ./
|
|
||||||
python3 rpm_upload_tools.py ${PULP3_REPO_NAME} ${PULP3_DIST_NAME} *.rpm
|
|
||||||
fi
|
|
||||||
|
|
||||||
#if [ -n "${UPLOAD_SYMBOL_FILES}" ]; then
|
|
||||||
# rpm -i tfe*debuginfo*.rpm
|
|
||||||
# cp /usr/lib/debug/opt/tsg/tfe/bin/tfe.debug /tmp/tfe.debuginfo.${CI_COMMIT_SHORT_SHA}
|
|
||||||
# sentry-cli upload-dif -t elf /tmp/tfe.debuginfo.${CI_COMMIT_SHORT_SHA}
|
|
||||||
#fi
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
find_path(TCMALLOC_INCLUDE_DIR tcmalloc.h ${PROJECT_SOURCE_DIR}/3rd/tcmalloc /usr/include /usr/include/gperftools /usr/local/include /usr/local/include/gperftools )
|
|
||||||
find_library(TCMALLOC_LIBRARY NAMES tcmalloc PATHS ${PROJECT_SOURCE_DIR}/3rd/tcmalloc /usr/lib /usr/lib/gperftools /usr/local/lib /usr/local/lib/gperftools)
|
|
||||||
|
|
||||||
if (TCMALLOC_INCLUDE_DIR AND TCMALLOC_LIBRARY)
|
|
||||||
set(tcmalloc_FOUND TRUE)
|
|
||||||
endif (TCMALLOC_INCLUDE_DIR AND TCMALLOC_LIBRARY)
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
|
|
||||||
set(MY_RPM_NAME_PREFIX "${lib_name}-debug")
|
|
||||||
else()
|
|
||||||
set(MY_RPM_NAME_PREFIX "${lib_name}")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
message(STATUS "Package: ${MY_RPM_NAME_PREFIX}")
|
|
||||||
|
|
||||||
set(CPACK_PACKAGE_VECDOR "MESA")
|
|
||||||
set(CPACK_PACKAGE_VERSION_MAJOR "${VERSION_MAJOR}")
|
|
||||||
set(CPACK_PACKAGE_VERSION_MINOR "${VERSION_MINOR}")
|
|
||||||
set(CPACK_PACKAGE_VERSION_PATCH "${VERSION_PATCH}.${VERSION_BUILD}")
|
|
||||||
set(CPACK_PACKAGING_INSTALL_PREFIX ${CMAKE_INSTALL_PREFIX})
|
|
||||||
set(CPACK_PACKAGE_VERSION "${VERSION_MAJOR}.${VERSION_MINOR}.${VERSION_PATCH}.${VERSION_BUILD}")
|
|
||||||
execute_process(COMMAND bash -c "echo -ne \"`uname -r | awk -F'.' '{print $5\".\"$6\".\"$7}'`\"" OUTPUT_VARIABLE SYSTEM_VERSION)
|
|
||||||
|
|
||||||
execute_process(COMMAND sh changelog.sh WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/cmake)
|
|
||||||
SET(CPACK_RPM_CHANGELOG_FILE ${PROJECT_SOURCE_DIR}/cmake/changelog.txt)
|
|
||||||
|
|
||||||
# RPM Build
|
|
||||||
set(CPACK_GENERATOR "RPM")
|
|
||||||
set(CPACK_RPM_PACKAGE_VENDOR "MESA")
|
|
||||||
set(CPACK_RPM_PACKAGE_AUTOREQPROV "yes")
|
|
||||||
set(CPACK_RPM_PACKAGE_RELEASE_LIBRARY "on")
|
|
||||||
set(CPACK_RPM_DEBUGINFO_PACKAGE "on")
|
|
||||||
set(CPACK_RPM_PACKAGE_DEBUG 1)
|
|
||||||
|
|
||||||
set(CPACK_RPM_COMPONENT_INSTALL ON)
|
|
||||||
set(CPACK_COMPONENTS_IGNORE_GROUPS 1)
|
|
||||||
set(CPACK_COMPONENTS_GROUPING ONE_PER_GROUP)
|
|
||||||
set(CPACK_COMPONENT_HEADER_DISPLAY_NAME "develop")
|
|
||||||
|
|
||||||
set(CPACK_COMPONENT_LIBRARY_REQUIRED TRUE)
|
|
||||||
set(CPACK_RPM_LIBRARY_PACKAGE_NAME ${MY_RPM_NAME_PREFIX})
|
|
||||||
set(CPACK_RPM_LIBRARY_FILE_NAME "${CPACK_RPM_LIBRARY_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}-${SYSTEM_VERSION}.rpm")
|
|
||||||
set(CPACK_RPM_LIBRARY_DEBUGINFO_FILE_NAME "${CPACK_RPM_LIBRARY_PACKAGE_NAME}-debuginfo-${CPACK_PACKAGE_VERSION}-${SYSTEM_VERSION}.rpm")
|
|
||||||
|
|
||||||
set(CPACK_COMPONENT_LIBRARY_GROUP "library")
|
|
||||||
set(CPACK_COMPONENT_PROFILE_GROUP "library")
|
|
||||||
|
|
||||||
set(CPACK_RPM_HEADER_PACKAGE_CONFLICTS ${CPACK_RPM_HEADER_PACKAGE_NAME})
|
|
||||||
|
|
||||||
set(CPACK_COMPONENTS_ALL LIBRARY PROFILE)
|
|
||||||
|
|
||||||
|
|
||||||
set(CPACK_BUILD_SOURCE_DIRS "${CMAKE_SOURCE_DIR}")
|
|
||||||
|
|
||||||
# Must uninstall the debug package before install release package
|
|
||||||
set(CPACK_RPM_PACKAGE_CONFLICTS ${MY_RPM_NAME_PREFIX})
|
|
||||||
|
|
||||||
# set(CPACK_STRIP_FILES TRUE)
|
|
||||||
include(CPack)
|
|
||||||
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
# Using autorevision.sh to generate version information
|
|
||||||
|
|
||||||
set(__SOURCE_AUTORESIVISION ${CMAKE_SOURCE_DIR}/autorevision.sh)
|
|
||||||
set(__AUTORESIVISION ${CMAKE_BINARY_DIR}/autorevision.sh)
|
|
||||||
set(__VERSION_CACHE ${CMAKE_SOURCE_DIR}/version.txt)
|
|
||||||
set(__VERSION_CONFIG ${CMAKE_BINARY_DIR}/version.cmake)
|
|
||||||
|
|
||||||
file(COPY ${__SOURCE_AUTORESIVISION} DESTINATION ${CMAKE_BINARY_DIR}
|
|
||||||
FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE
|
|
||||||
WORLD_READ WORLD_EXECUTE)
|
|
||||||
|
|
||||||
# execute autorevision.sh to generate version information
|
|
||||||
execute_process(COMMAND ${__AUTORESIVISION} -t cmake -o ${__VERSION_CACHE}
|
|
||||||
OUTPUT_FILE ${__VERSION_CONFIG} ERROR_QUIET)
|
|
||||||
include(${__VERSION_CONFIG})
|
|
||||||
|
|
||||||
# extract major, minor, patch version from git tag
|
|
||||||
string(REGEX REPLACE "^v([0-9]+)\\..*" "\\1" VERSION_MAJOR "${VCS_TAG}")
|
|
||||||
string(REGEX REPLACE "^v[0-9]+\\.([0-9]+).*" "\\1" VERSION_MINOR "${VCS_TAG}")
|
|
||||||
string(REGEX REPLACE "^v[0-9]+\\.[0-9]+\\.([0-9]+).*" "\\1" VERSION_PATCH "${VCS_TAG}")
|
|
||||||
|
|
||||||
if(NOT VERSION_MAJOR)
|
|
||||||
set(VERSION_MAJOR 1)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(NOT VERSION_MINOR)
|
|
||||||
set(VERSION_MINOR 0)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(NOT VERSION_PATCH)
|
|
||||||
set(VERSION_PATCH 0)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
set(VERSION "${VERSION_MAJOR}_${VERSION_MINOR}_${VERSION_PATCH}")
|
|
||||||
set(VERSION_BUILD "${VCS_SHORT_HASH}")
|
|
||||||
|
|
||||||
# print information
|
|
||||||
message(STATUS "Version: ${VERSION}-${VERSION_BUILD}")
|
|
||||||
|
|
||||||
if(NOT DEFINE_GIT_VERSION)
|
|
||||||
option(DEFINE_GIT_VERSION "Set DEFINE_GIT_VERSION to OFF" OFF)
|
|
||||||
|
|
||||||
set(GIT_VERSION
|
|
||||||
"${VERSION}-${CMAKE_BUILD_TYPE}-${VERSION_BUILD}-${VCS_BRANCH}-${VCS_TAG}-${VCS_DATE}")
|
|
||||||
string(REGEX REPLACE "[-:+/\\.]" "_" GIT_VERSION ${GIT_VERSION})
|
|
||||||
|
|
||||||
if(DEFINE_GIT_VERSION)
|
|
||||||
add_definitions(-DGIT_VERSION=${GIT_VERSION})
|
|
||||||
option(DEFINE_GIT_VERSION "Set DEFINE_GIT_VERSION to OFF" ON)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
endif()
|
|
||||||
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
#########################################################################
|
|
||||||
# File Name: changelog.sh
|
|
||||||
# Author: pxz
|
|
||||||
# Created Time: Mon 31 Aug 2020 10:06:56 AM CST
|
|
||||||
#########################################################################
|
|
||||||
#!/bin/bash
|
|
||||||
branch=`git status | grep branch | awk '{print $NF}'`
|
|
||||||
git log --branches=$branch --no-merges --date=local --show-signature --pretty="* %ad %an %ae %nhash: %H%ncommit:%n%B" | awk -F"-" '{print "- "$0}' | sed 's/- \*/\*/g' | sed 's/- $//g' | sed 's/-/ -/g' | sed 's/[0-9]\{2\}:[0-9]\{2\}:[0-9]\{2\}//g' > changelog.txt
|
|
||||||
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
* Thu Sep 24 2020 pengxuanzheng pengxuanzheng@geedgenetworks.com
|
|
||||||
- hash: caa5fe48df0896638ee1afc4c8252053d8ba03fb
|
|
||||||
- commit:
|
|
||||||
- 修改example名称
|
|
||||||
|
|
||||||
* Wed Sep 23 2020 pengxuanzheng pengxuanzheng@geedgenetworks.com
|
|
||||||
- hash: f439b8a9ab926d87f3a3ae03d5061d3962a35ca5
|
|
||||||
- commit:
|
|
||||||
- 线程池支持
|
|
||||||
|
|
||||||
* Tue Sep 22 2020 pengxuanzheng pengxuanzheng@geedgenetworks.com
|
|
||||||
- hash: 97bd7ffb2ef28a36089a5e8ba4ab5559dd661004
|
|
||||||
- commit:
|
|
||||||
- 修改cmakelist
|
|
||||||
|
|
||||||
* Tue Sep 22 2020 pengxuanzheng pengxuanzheng@geedgenetworks.com
|
|
||||||
- hash: 812b8a0eb9f4e7cbadaaa1ccfa98309a14a72db5
|
|
||||||
- commit:
|
|
||||||
- 修改buffer模式下的数据输入
|
|
||||||
|
|
||||||
* Mon Sep 21 2020 pengxuanzheng pengxuanzheng@geedgenetworks.com
|
|
||||||
- hash: eb41917cb28d1ec887e3502ed30324f88a183bcc
|
|
||||||
- commit:
|
|
||||||
- 修改并增加新接口
|
|
||||||
|
|
||||||
* Mon Sep 14 2020 pengxuanzheng pengxuanzheng@geedgenetworks.com
|
|
||||||
- hash: a00d892928a9ca350e4388e09323fa799bd8fded
|
|
||||||
- commit:
|
|
||||||
- 1.修复SSL connect error 问题
|
|
||||||
- 2.修复hos_create_bucket时,出现bucekt already exits的问题
|
|
||||||
- 3.修复upload时,远端地址无法解析的问题
|
|
||||||
|
|
||||||
* Fri Sep 11 2020 pengxuanzheng pengxuanzheng@geedgenetworks.com
|
|
||||||
- hash: cd0649bfbcadaeca8d7e37b31d3e9a191160ce04
|
|
||||||
- commit:
|
|
||||||
- init
|
|
||||||
|
|
||||||
@@ -106,13 +106,18 @@ int main(int argc, char *argv[])
|
|||||||
}
|
}
|
||||||
debuginfo("hos_verify_bucket success ... \n");
|
debuginfo("hos_verify_bucket success ... \n");
|
||||||
|
|
||||||
#if 0
|
#if 1
|
||||||
fd = hos_open_fd(handle, bucket, object, callback, (void *)&data, 0, mode);
|
mode = FILE_MODE;
|
||||||
|
for (i = 0; i < test_times; i++)
|
||||||
|
{
|
||||||
|
fd[i] = hos_open_fd(handle, bucket, object, callback, (void *)&data, 0, mode);
|
||||||
|
}
|
||||||
|
|
||||||
debuginfo("hos_upload_file start ...\n");
|
debuginfo("hos_upload_file start ...\n");
|
||||||
clock_gettime(CLOCK_MONOTONIC, &start);
|
clock_gettime(CLOCK_MONOTONIC, &start);
|
||||||
for (i = 0; i < test_times; i++)
|
for (i = 0; i < test_times; i++)
|
||||||
{
|
{
|
||||||
hos_write(fd, object, 0, 0);
|
hos_write(fd[i], object, 0, 0, 0);
|
||||||
}
|
}
|
||||||
clock_gettime(CLOCK_MONOTONIC, &end);
|
clock_gettime(CLOCK_MONOTONIC, &end);
|
||||||
time = calc_time(start, end);
|
time = calc_time(start, end);
|
||||||
@@ -121,8 +126,8 @@ int main(int argc, char *argv[])
|
|||||||
debuginfo("hos_upload_file end ...\n");
|
debuginfo("hos_upload_file end ...\n");
|
||||||
#else
|
#else
|
||||||
|
|
||||||
mode = BUFF_MODE;
|
mode = BUFF_MODE | APPEND_MODE;
|
||||||
for (i = 0; i < 10000; i++)
|
for (i = 0; i < test_times; i++)
|
||||||
{
|
{
|
||||||
fd[i] = hos_open_fd(handle, bucket, object, callback, (void *)&data, 0, mode);
|
fd[i] = hos_open_fd(handle, bucket, object, callback, (void *)&data, 0, mode);
|
||||||
}
|
}
|
||||||
@@ -130,7 +135,7 @@ int main(int argc, char *argv[])
|
|||||||
clock_gettime(CLOCK_MONOTONIC, &start);
|
clock_gettime(CLOCK_MONOTONIC, &start);
|
||||||
for (i = 0; i < test_times; i++)
|
for (i = 0; i < test_times; i++)
|
||||||
{
|
{
|
||||||
hos_write(fd[i], buf, buf_size, 0);
|
hos_write(fd[i], buf, buf_size, 0, i);
|
||||||
}
|
}
|
||||||
clock_gettime(CLOCK_MONOTONIC, &end);
|
clock_gettime(CLOCK_MONOTONIC, &end);
|
||||||
time = calc_time(start, end);
|
time = calc_time(start, end);
|
||||||
|
|||||||
@@ -87,14 +87,11 @@ hos_client_handle hos_client_create(const char *endpoint, const char *accesskeyi
|
|||||||
Aws::Client::ClientConfiguration config;
|
Aws::Client::ClientConfiguration config;
|
||||||
Aws::Auth::AWSCredentials credentials(accesskeyid, secretkey);
|
Aws::Auth::AWSCredentials credentials(accesskeyid, secretkey);
|
||||||
|
|
||||||
|
//初始化
|
||||||
config.endpointOverride = endpoint;
|
config.endpointOverride = endpoint;
|
||||||
config.verifySSL = false;
|
config.verifySSL = false;
|
||||||
config.enableEndpointDiscovery = true;
|
config.enableEndpointDiscovery = true;
|
||||||
//std::shared_ptr<Aws::Utils::Threading::Executor> pooled_thread = Aws::MakeShared<Aws::Utils::Threading::PooledThreadExecutor>("ClientConfigration");
|
config.executor = std::shared_ptr<Aws::Utils::Threading::PooledThreadExecutor>(std::make_shared<Aws::Utils::Threading::PooledThreadExecutor>(100));//支持线程池
|
||||||
//std::shared_ptr<Aws::Utils::Threading::Executor> test = std::make_shared<Aws::Utils::Threading::PooledThreadExecutor>(1000);
|
|
||||||
//config.executor(Aws::MakeShared<Aws::Utils::Threading::PooledThreadExecutor>("ClientConfiguration"));
|
|
||||||
//config.executor = std::dynamic_pointer_cast<Aws::Utils::Threading::PooledThreadExecutor>(config.executor);
|
|
||||||
config.executor = std::shared_ptr<Aws::Utils::Threading::PooledThreadExecutor>(std::make_shared<Aws::Utils::Threading::PooledThreadExecutor>(100));
|
|
||||||
|
|
||||||
handle->S3Client = new Aws::S3::S3Client(credentials, config, Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never, false);
|
handle->S3Client = new Aws::S3::S3Client(credentials, config, Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never, false);
|
||||||
handle->append_size = 30 * 1024 * 1024;
|
handle->append_size = 30 * 1024 * 1024;
|
||||||
@@ -250,11 +247,12 @@ int hos_open_fd(hos_client_handle handle, const char *bucket, const char *object
|
|||||||
return fd;
|
return fd;
|
||||||
}
|
}
|
||||||
|
|
||||||
int hos_write(size_t fd, const char *stream, size_t stream_len, size_t thread_id)
|
int hos_write(size_t fd, const char *stream, size_t stream_len, size_t thread_id, size_t position)
|
||||||
{
|
{
|
||||||
struct stat buffer;
|
struct stat buffer;
|
||||||
hos_info_t *hos_info = NULL;
|
hos_info_t *hos_info = NULL;
|
||||||
hos_client_handle handle = NULL;
|
hos_client_handle handle = NULL;
|
||||||
|
char num[128];
|
||||||
char buf[128];
|
char buf[128];
|
||||||
if ((fd == 0) || (stream == NULL) || (thread_id > MAX_THREAD_NUM))
|
if ((fd == 0) || (stream == NULL) || (thread_id > MAX_THREAD_NUM))
|
||||||
{
|
{
|
||||||
@@ -276,12 +274,24 @@ int hos_write(size_t fd, const char *stream, size_t stream_len, size_t thread_id
|
|||||||
request.SetKey(hos_info->object);
|
request.SetKey(hos_info->object);
|
||||||
|
|
||||||
//TODO APPEND MODE
|
//TODO APPEND MODE
|
||||||
|
snprintf(num, 128, "%lu", position);
|
||||||
|
Aws::Map<Aws::String, Aws::String> headers;
|
||||||
|
if (hos_info->mode & APPEND_MODE)
|
||||||
|
{
|
||||||
|
headers["x-hos-upload-type"] = "append";
|
||||||
|
headers["x-hos_position"] = num;
|
||||||
|
request.SetMetadata(headers);
|
||||||
|
#if 0
|
||||||
|
request.AddMetadata("x-hos-upload-type", "append");
|
||||||
|
request.AddMetadata("x-hos-position", num);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
//设置上传数据类型
|
//设置上传数据类型
|
||||||
if (hos_info->mode & BUFF_MODE)
|
if (hos_info->mode & BUFF_MODE)
|
||||||
{
|
{
|
||||||
//BUFF_MODE
|
//BUFF_MODE
|
||||||
#if 0
|
#if 1
|
||||||
const std::shared_ptr<Aws::IOStream> input_data =
|
const std::shared_ptr<Aws::IOStream> input_data =
|
||||||
Aws::MakeShared<Aws::StringStream>(stream, stream + stream_len);
|
Aws::MakeShared<Aws::StringStream>(stream, stream + stream_len);
|
||||||
Aws::String buffer (stream, stream_len);
|
Aws::String buffer (stream, stream_len);
|
||||||
|
|||||||
@@ -134,7 +134,7 @@ int hos_open_fd(hos_client_handle handle, const char *bucket, const char *object
|
|||||||
* size_t thread_id 线程ID
|
* size_t thread_id 线程ID
|
||||||
* 返回值 int 成功返回0,失败返回hoserros错误码
|
* 返回值 int 成功返回0,失败返回hoserros错误码
|
||||||
*************************************************************************************/
|
*************************************************************************************/
|
||||||
int hos_write(size_t fd, const char *stream, size_t stream_len, size_t thread_id);
|
int hos_write(size_t fd, const char *stream, size_t stream_len, size_t thread_id, size_t position);
|
||||||
/*************************************************************************************
|
/*************************************************************************************
|
||||||
* 函数名: hos_close_fd
|
* 函数名: hos_close_fd
|
||||||
* 参数: size_t fd fd
|
* 参数: size_t fd fd
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ include(ExternalProject)
|
|||||||
set(AWSS3_ROOT ${CMAKE_CURRENT_BINARY_DIR})
|
set(AWSS3_ROOT ${CMAKE_CURRENT_BINARY_DIR})
|
||||||
set(AWSS3_URL ${CMAKE_CURRENT_SOURCE_DIR}/aws-sdk-cpp-master.zip)
|
set(AWSS3_URL ${CMAKE_CURRENT_SOURCE_DIR}/aws-sdk-cpp-master.zip)
|
||||||
set(AWSS3_URL_MD5 a94cce4fe5003acf55fe1eac8c49ad4f)
|
set(AWSS3_URL_MD5 a94cce4fe5003acf55fe1eac8c49ad4f)
|
||||||
set(AWSS3_CONFIGURE cd ${AWSS3_ROOT}/aws-sdk-cpp-master/src/aws-sdk-cpp-master && cmake . -DBUILD_ONLY=s3)
|
set(AWSS3_CONFIGURE cd ${AWSS3_ROOT}/aws-sdk-cpp-master/src/aws-sdk-cpp-master && cmake . -DBUILD_ONLY=s3 -DCMAKE_BUILD_TYPE=Debug)
|
||||||
set(AWSS3_MAKE cd ${AWSS3_ROOT}/aws-sdk-cpp-master/src/aws-sdk-cpp-master && make)
|
set(AWSS3_MAKE cd ${AWSS3_ROOT}/aws-sdk-cpp-master/src/aws-sdk-cpp-master && make)
|
||||||
set(AWSS3_INSTALL cd ${AWSS3_ROOT}/aws-sdk-cpp-master/src/aws-sdk-cpp-master && make install PREFIX=${SUPPORT_INSTALL_PREFIX})
|
set(AWSS3_INSTALL cd ${AWSS3_ROOT}/aws-sdk-cpp-master/src/aws-sdk-cpp-master && make install PREFIX=${SUPPORT_INSTALL_PREFIX})
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user