[postgis-tickets] r16442 - [github][codecov] Fix coverage for files with the same name in different paths
Darafei
komzpa at gmail.com
Sat Mar 3 01:11:47 PST 2018
Author: komzpa
Date: 2018-03-03 01:11:46 -0800 (Sat, 03 Mar 2018)
New Revision: 16442
Added:
trunk/.github/codecov.bash
Modified:
trunk/.travis.yml
Log:
[github][codecov] Fix coverage for files with the same name in different paths
Added: trunk/.github/codecov.bash
===================================================================
--- trunk/.github/codecov.bash (rev 0)
+++ trunk/.github/codecov.bash 2018-03-03 09:11:46 UTC (rev 16442)
@@ -0,0 +1,1550 @@
+#!/usr/bin/env bash
+
+# Apache License Version 2.0, January 2004
+# https://github.com/codecov/codecov-bash/blob/master/LICENSE
+
+
+set -e +o pipefail
+
+VERSION="8b76995"
+
+url="https://codecov.io"
+env="$CODECOV_ENV"
+service=""
+token=""
+search_in=""
+flags=""
+exit_with=0
+curlargs=""
+curlawsargs=""
+dump="0"
+clean="0"
+curl_s="-s"
+name="$CODECOV_NAME"
+include_cov=""
+exclude_cov=""
+ddp="$(echo ~)/Library/Developer/Xcode/DerivedData"
+xp=""
+files=""
+cacert="$CODECOV_CA_BUNDLE"
+gcov_ignore="-not -path './bower_components/**' -not -path './node_modules/**' -not -path './vendor/**'"
+gcov_include=""
+
+ft_gcov="1"
+ft_coveragepy="1"
+ft_fix="1"
+ft_search="1"
+ft_s3="1"
+ft_network="1"
+ft_xcodellvm="0"
+ft_xcodeplist="1"
+
+_git_root=$(git rev-parse --show-toplevel 2>/dev/null || hg root 2>/dev/null || echo $PWD)
+git_root="$_git_root"
+codecov_yml=""
+remote_addr=""
+if [ "$git_root" = "$PWD" ];
+then
+ git_root="."
+fi
+
+url_o=""
+pr_o=""
+build_o=""
+commit_o=""
+search_in_o=""
+tag_o=""
+branch_o=""
+slug_o=""
+prefix_o=""
+
+commit="$VCS_COMMIT_ID"
+branch="$VCS_BRANCH_NAME"
+pr="$VCS_PULL_REQUEST"
+slug="$VCS_SLUG"
+tag="$VCS_TAG"
+build_url="$CI_BUILD_URL"
+build="$CI_BUILD_ID"
+job="$CI_JOB_ID"
+
+beta_xcode_partials=""
+
+proj_root="$git_root"
+gcov_exe="gcov"
+gcov_arg=""
+
+b="\033[0;36m"
+g="\033[0;32m"
+r="\033[0;31m"
+e="\033[0;90m"
+x="\033[0m"
+
+show_help() {
+cat << EOF
+
+ Codecov Bash $VERSION
+
+ Global report uploading tool for Codecov
+ Documentation at https://docs.codecov.io/docs
+ Contribute at https://github.com/codecov/codecov-bash
+
+
+ -h Display this help and exit
+ -f FILE Target file(s) to upload
+
+ -f "path/to/file" only upload this file
+ skips searching unless provided patterns below
+
+ -f '!*.bar' ignore all files at pattern *.bar
+ -f '*.foo' include all files at pattern *.foo
+ Must use single quotes.
+ This is non-exclusive, use -s "*.foo" to match specific paths.
+
+ -s DIR Directory to search for coverage reports.
+ Already searches project root and artifact folders.
+ -t TOKEN Set the private repository token
+ (option) set environment variable CODECOV_TOKEN=:uuid
+
+ -t @/path/to/token_file
+ -t uuid
+
+ -n NAME Custom defined name of the upload. Visible in Codecov UI
+
+ -e ENV Specify environment variables to be included with this build
+ Also accepting environment variables: CODECOV_ENV=VAR,VAR2
+
+ -e VAR,VAR2
+
+ -X feature Toggle functionalities
+
+ -X gcov Disable gcov
+ -X coveragepy Disable python coverage
+ -X fix Disable report fixing
+ -X search Disable searching for reports
+ -X xcode Disable xcode processing
+ -X network Disable uploading the file network
+
+ -R root dir Used when not in git/hg project to identify project root directory
+ -y conf file Used to specify the location of the .codecov.yml config file
+ -F flag Flag the upload to group coverage metrics
+
+ -F unittests This upload is only unittests
+ -F integration This upload is only integration tests
+ -F ui,chrome This upload is Chrome - UI tests
+
+ -c Move discovered coverage reports to the trash
+ -Z Exit with 1 if not successful. Default will Exit with 0
+
+ -- xcode --
+ -D Custom Derived Data Path for Coverage.profdata and gcov processing
+ Default '~/Library/Developer/Xcode/DerivedData'
+ -J Specify packages to build coverage.
+ This can significantly reduces time to build coverage reports.
+
+ -J 'MyAppName' Will match "MyAppName" and "MyAppNameTests"
+ -J '^ExampleApp$' Will match only "ExampleApp" not "ExampleAppTests"
+
+ -- gcov --
+ -g GLOB Paths to ignore during gcov gathering
+ -G GLOB Paths to include during gcov gathering
+ -p dir Project root directory
+ Also used when preparing gcov
+ -k prefix Prefix filepaths to help resolve path fixing: https://github.com/codecov/support/issues/472
+ -x gcovexe gcov executable to run. Defaults to 'gcov'
+ -a gcovargs extra arguments to pass to gcov
+
+ -- Override CI Environment Variables --
+ These variables are automatically detected by popular CI providers
+
+ -B branch Specify the branch name
+ -C sha Specify the commit sha
+ -P pr Specify the pull request number
+ -b build Specify the build number
+ -T tag Specify the git tag
+
+ -- Enterprise --
+ -u URL Set the target url for Enterprise customers
+ Not required when retrieving the bash uploader from your CCE
+ (option) Set environment variable CODECOV_URL=https://my-hosted-codecov.com
+ -r SLUG owner/repo slug used instead of the private repo token in Enterprise
+ (option) set environment variable CODECOV_SLUG=:owner/:repo
+ (option) set in your codecov.yml "codecov.slug"
+ -S PATH File path to your cacert.pem file used to verify ssl with Codecov Enterprise (optional)
+ (option) Set environment variable: CODECOV_CA_BUNDLE="/path/to/ca.pem"
+ -U curlargs Extra curl arguments to communicate with Codecov. e.g., -U "--proxy http://http-proxy"
+ -A curlargs Extra curl arguments to communicate with AWS.
+
+ -- Debugging --
+ -d Don't upload, but dump upload file to stdout
+ -K Remove color from the output
+ -v Verbose mode
+
+EOF
+}
+
+
+say() {
+ echo -e "$1"
+}
+
+
+urlencode() {
+ echo "$1" | curl -Gso /dev/null -w %{url_effective} --data-urlencode @- "" | cut -c 3- | sed -e 's/%0A//'
+}
+
+
+swiftcov() {
+ _dir=$(dirname "$1" | sed 's/\(Build\).*/\1/g')
+ for _type in app framework xctest
+ do
+ find "$_dir" -name "*.$_type" | while read f
+ do
+ _proj=${f##*/}
+ _proj=${_proj%."$_type"}
+ if [ "$2" = "" ] || [ "$(echo "$_proj" | grep -i "$2")" != "" ];
+ then
+ say " $g+$x Building reports for $_proj $_type"
+ dest=$([ -f "$f/$_proj" ] && echo "$f/$_proj" || echo "$f/Contents/MacOS/$_proj")
+ _proj_name=$(echo "$_proj" | sed -e 's/[[:space:]]//g')
+ xcrun llvm-cov show $beta_xcode_partials -instr-profile "$1" "$dest" > "$_proj_name.$_type.coverage.txt" \
+ || say " ${r}x>${x} llvm-cov failed to produce results for $dest"
+ fi
+ done
+ done
+}
+
+
+# Credits to: https://gist.github.com/pkuczynski/8665367
+parse_yaml() {
+ local prefix=$2
+ local s='[[:space:]]*' w='[a-zA-Z0-9_]*' fs=$(echo @|tr @ '\034')
+ sed -ne "s|^\($s\)\($w\)$s:$s\"\(.*\)\"$s\$|\1$fs\2$fs\3|p" \
+ -e "s|^\($s\)\($w\)$s:$s\(.*\)$s\$|\1$fs\2$fs\3|p" $1 |
+ awk -F$fs '{
+ indent = length($1)/2;
+ vname[indent] = $2;
+ for (i in vname) {if (i > indent) {delete vname[i]}}
+ if (length($3) > 0) {
+ vn=""; if (indent > 0) {vn=(vn)(vname[0])("_")}
+ printf("%s%s%s=\"%s\"\n", "'$prefix'",vn, $2, $3);
+ }
+ }'
+}
+
+
+if [ $# != 0 ];
+then
+ while getopts "a:A:b:B:cC:dD:e:f:F:g:G:hJ:k:Kn:p:P:r:R:y:s:S:t:T:u:U:vx:X:Z" o
+ do
+ case "$o" in
+ "a")
+ gcov_arg=$OPTARG
+ ;;
+ "A")
+ curlawsargs="$OPTARG"
+ ;;
+ "b")
+ build_o="$OPTARG"
+ ;;
+ "B")
+ branch_o="$OPTARG"
+ ;;
+ "c")
+ clean="1"
+ ;;
+ "C")
+ commit_o="$OPTARG"
+ ;;
+ "d")
+ dump="1"
+ ;;
+ "D")
+ ddp="$OPTARG"
+ ;;
+ "e")
+ env="$env,$OPTARG"
+ ;;
+ "f")
+ if [ "${OPTARG::1}" = "!" ];
+ then
+ exclude_cov="$exclude_cov -not -path '${OPTARG:1}'"
+
+ elif [[ "$OPTARG" = *"*"* ]];
+ then
+ include_cov="$include_cov -or -name '$OPTARG'"
+
+ else
+ ft_search=0
+ if [ "$files" = "" ];
+ then
+ files="$OPTARG"
+ else
+ files="$files
+$OPTARG"
+ fi
+ fi
+ ;;
+ "F")
+ if [ "$flags" = "" ];
+ then
+ flags="$OPTARG"
+ else
+ flags="$flags,$OPTARG"
+ fi
+ ;;
+ "g")
+ gcov_ignore="$gcov_ignore -not -path '$OPTARG'"
+ ;;
+ "G")
+ gcov_include="$gcov_include -path '$OPTARG'"
+ ;;
+ "h")
+ show_help
+ exit 0;
+ ;;
+ "J")
+ ft_xcodellvm="1"
+ ft_xcodeplist="0"
+ if [ "$xp" = "" ];
+ then
+ xp="$OPTARG"
+ else
+ xp="$xp\|$OPTARG"
+ fi
+ ;;
+ "k")
+ prefix_o=$(echo "$OPTARG" | sed -e 's:^/*::' -e 's:/*$::')
+ ;;
+ "K")
+ b=""
+ g=""
+ r=""
+ e=""
+ x=""
+ ;;
+ "n")
+ name="$OPTARG"
+ ;;
+ "p")
+ proj_root="$OPTARG"
+ ;;
+ "P")
+ pr_o="$OPTARG"
+ ;;
+ "r")
+ slug_o="$OPTARG"
+ ;;
+ "R")
+ git_root="$OPTARG"
+ ;;
+ "s")
+ if [ "$search_in_o" = "" ];
+ then
+ search_in_o="$OPTARG"
+ else
+ search_in_o="$search_in_o $OPTARG"
+ fi
+ ;;
+ "S")
+ cacert="--cacert \"$OPTARG\""
+ ;;
+ "t")
+ if [ "${OPTARG::1}" = "@" ];
+ then
+ token=$(cat "${OPTARG:1}" | tr -d ' \n')
+ else
+ token="$OPTARG"
+ fi
+ ;;
+ "T")
+ tag_o="$OPTARG"
+ ;;
+ "u")
+ url_o=$(echo "$OPTARG" | sed -e 's/\/$//')
+ ;;
+ "U")
+ curlargs="$OPTARG"
+ ;;
+ "v")
+ set -x
+ curl_s=""
+ ;;
+ "x")
+ gcov_exe=$OPTARG
+ ;;
+ "X")
+ if [ "$OPTARG" = "gcov" ];
+ then
+ ft_gcov="0"
+ elif [ "$OPTARG" = "coveragepy" ] || [ "$OPTARG" = "py" ];
+ then
+ ft_coveragepy="0"
+ elif [ "$OPTARG" = "xcodellvm" ];
+ then
+ ft_xcodellvm="1"
+ ft_xcodeplist="0"
+ elif [ "$OPTARG" = "fix" ] || [ "$OPTARG" = "fixes" ];
+ then
+ ft_fix="0"
+ elif [ "$OPTARG" = "xcode" ];
+ then
+ ft_xcodellvm="0"
+ ft_xcodeplist="0"
+ elif [ "$OPTARG" = "search" ];
+ then
+ ft_search="0"
+ elif [ "$OPTARG" = "xcodepartials" ];
+ then
+ beta_xcode_partials="-use-color"
+ elif [ "$OPTARG" = "network" ];
+ then
+ ft_network="0"
+ elif [ "$OPTARG" = "s3" ];
+ then
+ ft_s3="0"
+ fi
+ ;;
+ "y")
+ codecov_yml="$OPTARG"
+ ;;
+ "Z")
+ exit_with=1
+ ;;
+ esac
+ done
+fi
+
+say "
+ _____ _
+ / ____| | |
+| | ___ __| | ___ ___ _____ __
+| | / _ \\ / _\` |/ _ \\/ __/ _ \\ \\ / /
+| |___| (_) | (_| | __/ (_| (_) \\ V /
+ \\_____\\___/ \\__,_|\\___|\\___\\___/ \\_/
+ Bash-$VERSION
+
+"
+
+search_in="$proj_root"
+
+if [ "$JENKINS_URL" != "" ];
+then
+ say "$e==>$x Jenkins CI detected."
+ # https://wiki.jenkins-ci.org/display/JENKINS/Building+a+software+project
+ # https://wiki.jenkins-ci.org/display/JENKINS/GitHub+pull+request+builder+plugin#GitHubpullrequestbuilderplugin-EnvironmentVariables
+ service="jenkins"
+
+ if [ "$ghprbSourceBranch" != "" ];
+ then
+ branch="$ghprbSourceBranch"
+ elif [ "$GIT_BRANCH" != "" ];
+ then
+ branch="$GIT_BRANCH"
+ elif [ "$BRANCH_NAME" != "" ];
+ then
+ branch="$BRANCH_NAME"
+ fi
+
+ if [ "$ghprbActualCommit" != "" ];
+ then
+ commit="$ghprbActualCommit"
+ elif [ "$GIT_COMMIT" != "" ];
+ then
+ commit="$GIT_COMMIT"
+ fi
+
+ if [ "$ghprbPullId" != "" ];
+ then
+ pr="$ghprbPullId"
+ elif [ "$CHANGE_ID" != "" ];
+ then
+ pr="$CHANGE_ID"
+ fi
+
+ build="$BUILD_NUMBER"
+ build_url=$(urlencode "$BUILD_URL")
+
+elif [ "$CI" = "true" ] && [ "$TRAVIS" = "true" ] && [ "$SHIPPABLE" != "true" ];
+then
+ say "$e==>$x Travis CI detected."
+ # https://docs.travis-ci.com/user/environment-variables/
+ service="travis"
+ commit="${TRAVIS_PULL_REQUEST_SHA:-$TRAVIS_COMMIT}"
+ build="$TRAVIS_JOB_NUMBER"
+ pr="$TRAVIS_PULL_REQUEST"
+ job="$TRAVIS_JOB_ID"
+ slug="$TRAVIS_REPO_SLUG"
+ env="$env,TRAVIS_OS_NAME"
+ tag="$TRAVIS_TAG"
+ if [ "$TRAVIS_BRANCH" != "$TRAVIS_TAG" ];
+ then
+ branch="$TRAVIS_BRANCH"
+ fi
+
+ language=$(printenv | grep "TRAVIS_.*_VERSION" | head -1)
+ if [ "$language" != "" ];
+ then
+ env="$env,${language%=*}"
+ fi
+
+elif [ "$DOCKER_REPO" != "" ];
+then
+ say "$e==>$x Docker detected."
+ # https://docs.docker.com/docker-cloud/builds/advanced/
+ service="docker"
+ branch="$SOURCE_BRANCH"
+ commit="$SOURCE_COMMIT"
+ slug="$DOCKER_REPO"
+ tag="$CACHE_TAG"
+ env="$env,IMAGE_NAME"
+
+elif [ "$CI" = "true" ] && [ "$CI_NAME" = "codeship" ];
+then
+ say "$e==>$x Codeship CI detected."
+ # https://www.codeship.io/documentation/continuous-integration/set-environment-variables/
+ service="codeship"
+ branch="$CI_BRANCH"
+ build="$CI_BUILD_NUMBER"
+ build_url=$(urlencode "$CI_BUILD_URL")
+ commit="$CI_COMMIT_ID"
+
+elif [ ! -z "$CF_BUILD_URL" ] && [ ! -z "$CF_BUILD_ID" ];
+then
+ say "$e==>$x Codefresh CI detected."
+ # https://docs.codefresh.io/v1.0/docs/variables
+ service="codefresh"
+ branch="$CF_BRANCH"
+ build="$CF_BUILD_ID"
+ build_url=$(urlencode "$CF_BUILD_URL")
+ commit="$CF_REVISION"
+
+elif [ "$TEAMCITY_VERSION" != "" ];
+then
+ say "$e==>$x TeamCity CI detected."
+ # https://confluence.jetbrains.com/display/TCD8/Predefined+Build+Parameters
+ # https://confluence.jetbrains.com/plugins/servlet/mobile#content/view/74847298
+ if [ "$TEAMCITY_BUILD_BRANCH" = '' ];
+ then
+ echo " Teamcity does not automatically make build parameters available as environment variables."
+ echo " Add the following environment parameters to the build configuration"
+ echo " env.TEAMCITY_BUILD_BRANCH = %teamcity.build.branch%"
+ echo " env.TEAMCITY_BUILD_ID = %teamcity.build.id%"
+ echo " env.TEAMCITY_BUILD_URL = %teamcity.serverUrl%/viewLog.html?buildId=%teamcity.build.id%"
+ echo " env.TEAMCITY_BUILD_COMMIT = %system.build.vcs.number%"
+ echo " env.TEAMCITY_BUILD_REPOSITORY = %vcsroot.<YOUR TEAMCITY VCS NAME>.url%"
+ fi
+ service="teamcity"
+ branch="$TEAMCITY_BUILD_BRANCH"
+ build="$TEAMCITY_BUILD_ID"
+ build_url=$(urlencode "$TEAMCITY_BUILD_URL")
+ if [ "$TEAMCITY_BUILD_COMMIT" != "" ];
+ then
+ commit="$TEAMCITY_BUILD_COMMIT"
+ else
+ commit="$BUILD_VCS_NUMBER"
+ fi
+ remote_addr="$TEAMCITY_BUILD_REPOSITORY"
+
+elif [ "$CI" = "true" ] && [ "$CIRCLECI" = "true" ];
+then
+ say "$e==>$x Circle CI detected."
+ # https://circleci.com/docs/environment-variables
+ service="circleci"
+ branch="$CIRCLE_BRANCH"
+ build="$CIRCLE_BUILD_NUM"
+ job="$CIRCLE_NODE_INDEX"
+ if [ "$CIRCLE_PROJECT_REPONAME" != "" ];
+ then
+ slug="$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME"
+ else
+ # git at github.com:owner/repo.git
+ slug="${CIRCLE_REPOSITORY_URL##*:}"
+ # owner/repo.git
+ slug="${slug%%.git}"
+ fi
+ pr="$CIRCLE_PR_NUMBER"
+ commit="$CIRCLE_SHA1"
+ search_in="$search_in $CIRCLE_ARTIFACTS $CIRCLE_TEST_REPORTS"
+
+elif [ "$BUDDYBUILD_BRANCH" != "" ];
+then
+ say "$e==>$x buddybuild detected"
+ # http://docs.buddybuild.com/v6/docs/custom-prebuild-and-postbuild-steps
+ service="buddybuild"
+ branch="$BUDDYBUILD_BRANCH"
+ build="$BUDDYBUILD_BUILD_NUMBER"
+ build_url="https://dashboard.buddybuild.com/public/apps/$BUDDYBUILD_APP_ID/build/$BUDDYBUILD_BUILD_ID"
+ # BUDDYBUILD_TRIGGERED_BY
+ if [ "$ddp" = "$(echo ~)/Library/Developer/Xcode/DerivedData" ];
+ then
+ ddp="/private/tmp/sandbox/${BUDDYBUILD_APP_ID}/bbtest"
+ fi
+
+elif [ "${bamboo_planRepository_revision}" != "" ];
+then
+ say "$e==>$x Bamboo detected"
+ # https://confluence.atlassian.com/bamboo/bamboo-variables-289277087.html#Bamboovariables-Build-specificvariables
+ service="bamboo"
+ commit="${bamboo_planRepository_revision}"
+ branch="${bamboo_planRepository_branch}"
+ build="${bamboo_buildNumber}"
+ build_url="${bamboo_buildResultsUrl}"
+ remote_addr="${bamboo_planRepository_repositoryUrl}"
+
+elif [ "$CI" = "true" ] && [ "$BITRISE_IO" = "true" ];
+then
+ # http://devcenter.bitrise.io/faq/available-environment-variables/
+ say "$e==>$x Bitrise CI detected."
+ service="bitrise"
+ branch="$BITRISE_GIT_BRANCH"
+ build="$BITRISE_BUILD_NUMBER"
+ build_url=$(urlencode "$BITRISE_BUILD_URL")
+ pr="$BITRISE_PULL_REQUEST"
+ if [ "$GIT_CLONE_COMMIT_HASH" != "" ];
+ then
+ commit="$GIT_CLONE_COMMIT_HASH"
+ fi
+
+elif [ "$CI" = "true" ] && [ "$SEMAPHORE" = "true" ];
+then
+ say "$e==>$x Semaphore CI detected."
+ # https://semaphoreapp.com/docs/available-environment-variables.html
+ service="semaphore"
+ branch="$BRANCH_NAME"
+ build="$SEMAPHORE_BUILD_NUMBER"
+ job="$SEMAPHORE_CURRENT_THREAD"
+ pr="$PULL_REQUEST_NUMBER"
+ slug="$SEMAPHORE_REPO_SLUG"
+ commit="$REVISION"
+ env="$env,SEMAPHORE_TRIGGER_SOURCE"
+
+elif [ "$CI" = "true" ] && [ "$BUILDKITE" = "true" ];
+then
+ say "$e==>$x Buildkite CI detected."
+ # https://buildkite.com/docs/guides/environment-variables
+ service="buildkite"
+ branch="$BUILDKITE_BRANCH"
+ build="$BUILDKITE_BUILD_NUMBER"
+ job="$BUILDKITE_JOB_ID"
+ build_url=$(urlencode "$BUILDKITE_BUILD_URL")
+ slug="$BUILDKITE_PROJECT_SLUG"
+ commit="$BUILDKITE_COMMIT"
+ if [[ "$BUILDKITE_PULL_REQUEST" != "false" ]]; then
+ pr="$BUILDKITE_PULL_REQUEST"
+ fi
+ tag="$BUILDKITE_TAG"
+
+elif [ "$CI" = "drone" ];
+then
+ say "$e==>$x Drone CI detected."
+ # http://docs.drone.io/env.html
+ # drone commits are not full shas
+ service="drone.io"
+ branch="$DRONE_BRANCH"
+ build="$DRONE_BUILD_NUMBER"
+ build_url=$(urlencode "${DRONE_BUILD_LINK}")
+ pr="$DRONE_PULL_REQUEST"
+ job="$DRONE_JOB_NUMBER"
+ tag="$DRONE_TAG"
+
+elif [ "$HEROKU_TEST_RUN_BRANCH" != "" ];
+then
+ say "$e==>$x Heroku CI detected."
+ # https://devcenter.heroku.com/articles/heroku-ci#environment-variables
+ service="heroku"
+ branch="$HEROKU_TEST_RUN_BRANCH"
+ build="$HEROKU_TEST_RUN_ID"
+
+elif [ "$CI" = "True" ] && [ "$APPVEYOR" = "True" ];
+then
+ say "$e==>$x Appveyor CI detected."
+ # http://www.appveyor.com/docs/environment-variables
+ service="appveyor"
+ branch="$APPVEYOR_REPO_BRANCH"
+ build=$(urlencode "$APPVEYOR_JOB_ID")
+ pr="$APPVEYOR_PULL_REQUEST_NUMBER"
+ job="$APPVEYOR_ACCOUNT_NAME%2F$APPVEYOR_PROJECT_SLUG%2F$APPVEYOR_BUILD_VERSION"
+ slug="$APPVEYOR_REPO_NAME"
+ commit="$APPVEYOR_REPO_COMMIT"
+
+elif [ "$CI" = "true" ] && [ "$WERCKER_GIT_BRANCH" != "" ];
+then
+ say "$e==>$x Wercker CI detected."
+ # http://devcenter.wercker.com/articles/steps/variables.html
+ service="wercker"
+ branch="$WERCKER_GIT_BRANCH"
+ build="$WERCKER_MAIN_PIPELINE_STARTED"
+ slug="$WERCKER_GIT_OWNER/$WERCKER_GIT_REPOSITORY"
+ commit="$WERCKER_GIT_COMMIT"
+
+elif [ "$CI" = "true" ] && [ "$MAGNUM" = "true" ];
+then
+ say "$e==>$x Magnum CI detected."
+ # https://magnum-ci.com/docs/environment
+ service="magnum"
+ branch="$CI_BRANCH"
+ build="$CI_BUILD_NUMBER"
+ commit="$CI_COMMIT"
+
+elif [ "$SHIPPABLE" = "true" ];
+then
+ say "$e==>$x Shippable CI detected."
+ # http://docs.shippable.com/ci_configure/
+ service="shippable"
+ branch=$([ "$HEAD_BRANCH" != "" ] && echo "$HEAD_BRANCH" || echo "$BRANCH")
+ build="$BUILD_NUMBER"
+ build_url=$(urlencode "$BUILD_URL")
+ pr="$PULL_REQUEST"
+ slug="$REPO_FULL_NAME"
+ commit="$COMMIT"
+
+elif [ "$TDDIUM" = "true" ];
+then
+ say "Solano CI detected."
+ # http://docs.solanolabs.com/Setup/tddium-set-environment-variables/
+ service="solano"
+ commit="$TDDIUM_CURRENT_COMMIT"
+ branch="$TDDIUM_CURRENT_BRANCH"
+ build="$TDDIUM_TID"
+ pr="$TDDIUM_PR_ID"
+
+elif [ "$GREENHOUSE" = "true" ];
+then
+ say "$e==>$x Greenhouse CI detected."
+ # http://docs.greenhouseci.com/docs/environment-variables-files
+ service="greenhouse"
+ branch="$GREENHOUSE_BRANCH"
+ build="$GREENHOUSE_BUILD_NUMBER"
+ build_url=$(urlencode "$GREENHOUSE_BUILD_URL")
+ pr="$GREENHOUSE_PULL_REQUEST"
+ commit="$GREENHOUSE_COMMIT"
+ search_in="$search_in $GREENHOUSE_EXPORT_DIR"
+
+elif [ "$GITLAB_CI" != "" ];
+then
+ say "$e==>$x GitLab CI detected."
+ # http://doc.gitlab.com/ce/ci/variables/README.html
+ service="gitlab"
+ branch="${CI_BUILD_REF_NAME:-$CI_COMMIT_REF_NAME}"
+ build="${CI_BUILD_ID:-$CI_JOB_ID}"
+ remote_addr="${CI_BUILD_REPO:-$CI_REPOSITORY_URL}"
+ commit="${CI_BUILD_REF:-$CI_COMMIT_SHA}"
+
+else
+ say "${r}x>${x} No CI provider detected."
+ say " Testing inside Docker? ${b}http://docs.codecov.io/docs/testing-with-docker${x}"
+ say " Testing with Tox? ${b}https://docs.codecov.io/docs/python#section-testing-with-tox${x}"
+
+fi
+
+say " ${e}project root:${x} $git_root"
+
+# find branch, commit, repo from git command
+if [ "$GIT_BRANCH" != "" ];
+then
+ branch="$GIT_BRANCH"
+
+elif [ "$branch" = "" ];
+then
+ branch=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || hg branch 2>/dev/null || echo "")
+ if [ "$branch" = "HEAD" ];
+ then
+ branch=""
+ fi
+fi
+
+if [ "$commit_o" = "" ];
+then
+ # merge commit -> actual commit
+ mc=
+ if [ -n "$pr" ] && [ "$pr" != false ];
+ then
+ mc=$(git show --no-patch --format="%P" 2>/dev/null || echo "")
+ fi
+ if [[ "$mc" =~ ^[a-z0-9]{40}[[:space:]][a-z0-9]{40}$ ]];
+ then
+ say " Fixing merge commit SHA"
+ commit=$(echo "$mc" | cut -d' ' -f2)
+ elif [ "$GIT_COMMIT" != "" ];
+ then
+ commit="$GIT_COMMIT"
+ elif [ "$commit" = "" ];
+ then
+ commit=$(git log -1 --format="%H" 2>/dev/null || hg id -i --debug 2>/dev/null | tr -d '+' || echo "")
+ fi
+else
+ commit="$commit_o"
+fi
+
+if [ "$CODECOV_TOKEN" != "" ] && [ "$token" = "" ];
+then
+ say "${e}-->${x} token set from env"
+ token="$CODECOV_TOKEN"
+fi
+
+if [ "$CODECOV_URL" != "" ] && [ "$url_o" = "" ];
+then
+ say "${e}-->${x} url set from env"
+ url_o=$(echo "$CODECOV_URL" | sed -e 's/\/$//')
+fi
+
+if [ "$CODECOV_SLUG" != "" ];
+then
+ say "${e}-->${x} slug set from env"
+ slug_o="$CODECOV_SLUG"
+
+elif [ "$slug" = "" ];
+then
+ if [ "$remote_addr" = "" ];
+ then
+ remote_addr=$(git config --get remote.origin.url || hg paths default || echo '')
+ fi
+ if [ "$remote_addr" != "" ];
+ then
+ if echo "$remote_addr" | grep -q "//"; then
+ # https
+ slug=$(echo "$remote_addr" | cut -d / -f 4,5 | sed -e 's/\.git$//')
+ else
+ # ssh
+ slug=$(echo "$remote_addr" | cut -d : -f 2 | sed -e 's/\.git$//')
+ fi
+ fi
+ if [ "$slug" = "/" ];
+ then
+ slug=""
+ fi
+fi
+
+yaml=$(test -n "$codecov_yml" && echo "$codecov_yml" \
+ || cd "$git_root" && \
+ git ls-files "*codecov.yml" "*codecov.yaml" 2>/dev/null \
+ || hg locate "*codecov.yml" "*codecov.yaml" 2>/dev/null \
+ || cd $proj_root && find . -type f -name '*codecov.y*ml' -depth 1 2>/dev/null \
+ || echo '')
+yaml=$(echo "$yaml" | head -1)
+
+if [ "$yaml" != "" ];
+then
+ say " ${e}Yaml found at:${x} $yaml"
+ config=$(parse_yaml "$git_root/$yaml" || echo '')
+
+ # TODO validate the yaml here
+
+ if [ "$(echo "$config" | grep 'codecov_token="')" != "" ] && [ "$token" = "" ];
+ then
+ say "${e}-->${x} token set from yaml"
+ token="$(echo "$config" | grep 'codecov_token="' | sed -e 's/codecov_token="//' | sed -e 's/"\.*//')"
+ fi
+
+ if [ "$(echo "$config" | grep 'codecov_url="')" != "" ] && [ "$url_o" = "" ];
+ then
+ say "${e}-->${x} url set from yaml"
+ url_o="$(echo "$config" | grep 'codecov_url="' | sed -e 's/codecov_url="//' | sed -e 's/"\.*//')"
+ fi
+
+ if [ "$(echo "$config" | grep 'codecov_slug="')" != "" ] && [ "$slug_o" = "" ];
+ then
+ say "${e}-->${x} slug set from yaml"
+ slug_o="$(echo "$config" | grep 'codecov_slug="' | sed -e 's/codecov_slug="//' | sed -e 's/"\.*//')"
+ fi
+else
+ say " ${g}Yaml not found, that's ok! Learn more at${x} ${b}http://docs.codecov.io/docs/codecov-yaml${x}"
+
+fi
+
+if [ "$branch_o" != "" ];
+then
+ branch=$(urlencode "$branch_o")
+else
+ branch=$(urlencode "$branch")
+fi
+
+query="branch=$branch\
+ &commit=$commit\
+ &build=$([ "$build_o" = "" ] && echo "$build" || echo "$build_o")\
+ &build_url=$build_url\
+ &name=$(urlencode "$name")\
+ &tag=$([ "$tag_o" = "" ] && echo "$tag" || echo "$tag_o")\
+ &slug=$([ "$slug_o" = "" ] && urlencode "$slug" || urlencode "$slug_o")\
+ &service=$service\
+ &flags=$flags\
+ &pr=$([ "$pr_o" = "" ] && echo "${pr##\#}" || echo "${pr_o##\#}")\
+ &job=$job"
+
+if [ "$ft_search" = "1" ];
+then
+ # detect bower comoponents location
+ bower_components="bower_components"
+ bower_rc=$(cd "$git_root" && cat .bowerrc 2>/dev/null || echo "")
+ if [ "$bower_rc" != "" ];
+ then
+ bower_components=$(echo "$bower_rc" | tr -d '\n' | grep '"directory"' | cut -d'"' -f4 | sed -e 's/\/$//')
+ if [ "$bower_components" = "" ];
+ then
+ bower_components="bower_components"
+ fi
+ fi
+
+ # Swift Coverage
+ if [ "$ft_xcodellvm" = "1" ] && [ -d "$ddp" ];
+ then
+ say "${e}==>${x} Processing Xcode reports via llvm-cov"
+ say " DerivedData folder: $ddp"
+ profdata_files=$(find "$ddp" -name '*.profdata' 2>/dev/null || echo '')
+ if [ "$profdata_files" != "" ];
+ then
+ # xcode via profdata
+ if [ "$xp" = "" ];
+ then
+ # xp=$(xcodebuild -showBuildSettings 2>/dev/null | grep -i "^\s*PRODUCT_NAME" | sed -e 's/.*= \(.*\)/\1/')
+ # say " ${e}->${x} Speed up Xcode processing by adding ${e}-J '$xp'${x}"
+ say " ${g}hint${x} Speed up Swift processing by using use ${g}-J 'AppName'${x} (regexp accepted)"
+ say " ${g}hint${x} This will remove Pods/ from your report. Also ${b}https://docs.codecov.io/docs/ignoring-paths${x}"
+ fi
+ while read -r profdata;
+ do
+ if [ "$profdata" != "" ];
+ then
+ swiftcov "$profdata" "$xp"
+ fi
+ done <<< "$profdata_files"
+ else
+ say " ${e}->${x} No Swift coverage found"
+ fi
+
+ # Obj-C Gcov Coverage
+ if [ "$ft_gcov" = "1" ];
+ then
+ say " ${e}->${x} Running $gcov_exe for Obj-C"
+ bash -c "find $ddp -type f -name '*.gcda' $gcov_include $gcov_ignore -exec $gcov_exe -p $gcov_arg {} +" || true
+ fi
+ fi
+
+ if [ "$ft_xcodeplist" = "1" ] && [ -d "$ddp" ];
+ then
+ say "${e}==>${x} Processing Xcode plists"
+ plists_files=$(find "$ddp" -name '*.xccoverage' 2>/dev/null || echo '')
+ if [ "$plists_files" != "" ];
+ then
+ while read -r plist;
+ do
+ if [ "$plist" != "" ];
+ then
+ say " ${g}Found${x} plist file at $plist"
+ plutil -convert xml1 -o "$(basename "$plist").plist" -- $plist
+ fi
+ done <<< "$plists_files"
+ fi
+ fi
+
+ # Gcov Coverage
+ if [ "$ft_gcov" = "1" ];
+ then
+ say "${e}==>${x} Running gcov in $proj_root ${e}(disable via -X gcov)${x}"
+ bash -c "find $proj_root -type f -name '*.gcno' $gcov_include $gcov_ignore -execdir $gcov_exe -pb $gcov_arg {} +" || true
+ else
+ say "${e}==>${x} gcov disabled"
+ fi
+
+ # Python Coverage
+ if [ "$ft_coveragepy" = "1" ];
+ then
+ if [ ! -f coverage.xml ];
+ then
+ if which coverage >/dev/null 2>&1;
+ then
+ say "${e}==>${x} Python coveragepy exists ${e}disable via -X coveragepy${x}"
+
+ dotcoverage=$(find "$git_root" -name '.coverage' -or -name '.coverage.*' | head -1 || echo '')
+ if [ "$dotcoverage" != "" ];
+ then
+ cd "$(dirname "$dotcoverage")"
+ if [ ! -f .coverage ];
+ then
+ say " ${e}->${x} Running coverage combine"
+ coverage combine -a
+ fi
+ say " ${e}->${x} Running coverage xml"
+ if [ "$(coverage xml -i)" != "No data to report." ];
+ then
+ files="$files
+$PWD/coverage.xml"
+ else
+ say " ${r}No data to report.${x}"
+ fi
+ cd "$proj_root"
+ else
+ say " ${r}No .coverage file found.${x}"
+ fi
+ else
+ say "${e}==>${x} Python coveragepy not found"
+ fi
+ fi
+ else
+ say "${e}==>${x} Python coveragepy disabled"
+ fi
+
+ if [ "$search_in_o" != "" ];
+ then
+ # location override
+ search_in="$search_in_o"
+ fi
+
+ say "$e==>$x Searching for coverage reports in:"
+ for _path in $search_in
+ do
+ say " ${g}+${x} $_path"
+ done
+
+ patterns="find $search_in \( \
+ -name vendor \
+ -or -name htmlcov \
+ -or -name virtualenv \
+ -or -name js/generated/coverage \
+ -or -name .virtualenv \
+ -or -name virtualenvs \
+ -or -name .virtualenvs \
+ -or -name .env \
+ -or -name .envs \
+ -or -name env \
+ -or -name .yarn-cache \
+ -or -name envs \
+ -or -name .venv \
+ -or -name .venvs \
+ -or -name venv \
+ -or -name venvs \
+ -or -name .git \
+ -or -name .hg \
+ -or -name .tox \
+ -or -name __pycache__ \
+ -or -name '.egg-info*' \
+ -or -name '$bower_components' \
+ -or -name node_modules \
+ -or -name 'conftest_*.c.gcov' \
+ \) -prune -or \
+ -type f \( -name '*coverage*.*' \
+ -or -name 'nosetests.xml' \
+ -or -name 'jacoco*.xml' \
+ -or -name 'clover.xml' \
+ -or -name 'report.xml' \
+ -or -name '*.codecov.*' \
+ -or -name 'codecov.*' \
+ -or -name 'cobertura.xml' \
+ -or -name 'excoveralls.json' \
+ -or -name 'luacov.report.out' \
+ -or -name 'coverage-final.json' \
+ -or -name 'naxsi.info' \
+ -or -name 'lcov.info' \
+ -or -name 'lcov.dat' \
+ -or -name '*.lcov' \
+ -or -name '*.clover' \
+ -or -name 'cover.out' \
+ -or -name 'gcov.info' \
+ -or -name '*.gcov' \
+ -or -name '*.lst' \
+ $include_cov \) \
+ $exclude_cov \
+ -not -name '*.profdata' \
+ -not -name 'coverage-summary.json' \
+ -not -name 'phpunit-code-coverage.xml' \
+ -not -name '*/classycle/report.xml' \
+ -not -name 'remapInstanbul.coverage*.json' \
+ -not -name 'phpunit-coverage.xml' \
+ -not -name '*codecov.yml' \
+ -not -name '*.serialized' \
+ -not -name '.coverage*' \
+ -not -name '.*coveragerc' \
+ -not -name '*.sh' \
+ -not -name '*.bat' \
+ -not -name '*.ps1' \
+ -not -name '*.env' \
+ -not -name '*.cmake' \
+ -not -name '*.dox' \
+ -not -name '*.ec' \
+ -not -name '*.rst' \
+ -not -name '*.h' \
+ -not -name '*.scss' \
+ -not -name '*.o' \
+ -not -name '*.proto' \
+ -not -name '*.sbt' \
+ -not -name '*.xcoverage.*' \
+ -not -name '*.gz' \
+ -not -name '*.conf' \
+ -not -name '*.p12' \
+ -not -name '*.csv' \
+ -not -name '*.rsp' \
+ -not -name '*.m4' \
+ -not -name '*.pem' \
+ -not -name '*~' \
+ -not -name '*.exe' \
+ -not -name '*.am' \
+ -not -name '*.template' \
+ -not -name '*.cp' \
+ -not -name '*.bw' \
+ -not -name '*.crt' \
+ -not -name '*.log' \
+ -not -name '*.cmake' \
+ -not -name '*.pth' \
+ -not -name '*.in' \
+ -not -name '*.jar*' \
+ -not -name '*.pom*' \
+ -not -name '*.png' \
+ -not -name '*.jpg' \
+ -not -name '*.sql' \
+ -not -name '*.jpeg' \
+ -not -name '*.svg' \
+ -not -name '*.gif' \
+ -not -name '*.csv' \
+ -not -name '*.snapshot' \
+ -not -name '*.mak*' \
+ -not -name '*.bash' \
+ -not -name '*.data' \
+ -not -name '*.py' \
+ -not -name '*.class' \
+ -not -name '*.xcconfig' \
+ -not -name '*.ec' \
+ -not -name '*.coverage' \
+ -not -name '*.pyc' \
+ -not -name '*.cfg' \
+ -not -name '*.egg' \
+ -not -name '*.ru' \
+ -not -name '*.css' \
+ -not -name '*.less' \
+ -not -name '*.pyo' \
+ -not -name '*.whl' \
+ -not -name '*.html' \
+ -not -name '*.ftl' \
+ -not -name '*.erb' \
+ -not -name '*.rb' \
+ -not -name '*.js' \
+ -not -name '*.jade' \
+ -not -name '*.db' \
+ -not -name '*.md' \
+ -not -name '*.cpp' \
+ -not -name '*.gradle' \
+ -not -name '*.tar.tz' \
+ -not -name '*.scss' \
+ -not -name 'include.lst' \
+ -not -name 'fullLocaleNames.lst' \
+ -not -name 'inputFiles.lst' \
+ -not -name 'createdFiles.lst' \
+ -not -name 'scoverage.measurements.*' \
+ -not -name 'test_*_coverage.txt' \
+ -not -name 'testrunner-coverage*' \
+ -print 2>/dev/null"
+ files=$(eval "$patterns" || echo '')
+
+elif [ "$include_cov" != "" ];
+then
+ files=$(eval "find $search_in -type f \( ${include_cov:5} \)$exclude_cov 2>/dev/null" || echo '')
+fi
+
+num_of_files=$(echo "$files" | wc -l | tr -d ' ')
+if [ "$num_of_files" != '' ] && [ "$files" != '' ];
+then
+ say " ${e}->${x} Found $num_of_files reports"
+fi
+
+# no files found
+if [ "$files" = "" ];
+then
+ say "${r}-->${x} No coverage report found."
+ say " Please visit ${b}http://docs.codecov.io/docs/supported-languages${x}"
+ exit ${exit_with};
+fi
+
+if [ "$ft_network" == "1" ];
+then
+ say "${e}==>${x} Detecting git/mercurial file structure"
+ network=$(cd "$git_root" && git ls-files 2>/dev/null || hg locate 2>/dev/null || echo "")
+ if [ "$network" = "" ];
+ then
+ network=$(find "$git_root" \( \
+ -name virtualenv \
+ -name .virtualenv \
+ -name virtualenvs \
+ -name .virtualenvs \
+ -name '*.png' \
+ -name '*.gif' \
+ -name '*.jpg' \
+ -name '*.jpeg' \
+ -name '*.md' \
+ -name .env \
+ -name .envs \
+ -name env \
+ -name envs \
+ -name .venv \
+ -name .venvs \
+ -name venv \
+ -name venvs \
+ -name .git \
+ -name .egg-info \
+ -name shunit2-2.1.6 \
+ -name vendor \
+ -name __pycache__ \
+ -name node_modules \
+ -path '*/$bower_components/*' \
+ -path '*/target/delombok/*' \
+ -path '*/build/lib/*' \
+ -path '*/js/generated/coverage/*' \
+ \) -prune -or \
+ -type f -print 2>/dev/null || echo '')
+ fi
+
+ if [ "$prefix_o" != "" ];
+ then
+ network=$(echo "$network" | awk "{print \"$prefix_o/\"\$0}")
+ fi
+fi
+
+upload_file=`mktemp /tmp/codecov.XXXXXX`
+adjustments_file=`mktemp /tmp/codecov.adjustments.XXXXXX`
+
+cleanup() {
+ rm -f $upload_file $adjustments_file $upload_file.gz
+}
+
+trap cleanup INT ABRT TERM
+
+if [ "$env" != "" ];
+then
+ inc_env=""
+ say "${e}==>${x} Appending build variables"
+ for varname in $(echo "$env" | tr ',' ' ')
+ do
+ if [ "$varname" != "" ];
+ then
+ say " ${g}+${x} $varname"
+ inc_env="${inc_env}${varname}=$(eval echo "\$${varname}")
+"
+ fi
+ done
+
+echo "$inc_env<<<<<< ENV" >> $upload_file
+fi
+
+# Append git file list
+# write discovered yaml location
+echo "$yaml" >> $upload_file
+if [ "$ft_network" == "1" ];
+then
+ i="woff|eot|otf" # fonts
+ i="$i|gif|png|jpg|jpeg|psd" # images
+ i="$i|ptt|pptx|numbers|pages|md|txt|xlsx|docx|doc|pdf|html|csv" # docs
+ i="$i|yml|yaml|.gitignore" # supporting docs
+ echo "$network" | grep -vwE "($i)$" >> $upload_file
+fi
+echo "<<<<<< network" >> $upload_file
+
+fr=0
+say "${e}==>${x} Reading reports"
+while IFS='' read -r file;
+do
+ # read the coverage file
+ if [ "$(echo "$file" | tr -d ' ')" != '' ];
+ then
+ if [ -f "$file" ];
+ then
+ report_len=$(wc -c < "$file")
+ if [ "$report_len" -ne 0 ];
+ then
+ say " ${g}+${x} $file ${e}bytes=$(echo "$report_len" | tr -d ' ')${x}"
+ # append to to upload
+ _filename=$(basename "$file")
+ if [ "${_filename##*.}" = 'gcov' ];
+ then
+ echo "# path=$(echo "$file.reduced" | sed "s|^$git_root/||")" >> $upload_file
+ # get file name
+ head -1 $file >> $upload_file
+ # 1. remove source code
+ # 2. remove ending bracket lines
+ # 3. remove whitespace
+ # 4. remove contextual lines
+ # 5. remove function names
+ awk -F': *' '{print $1":"$2":"}' $file \
+ | sed '\/: *} *$/d' \
+ | sed 's/^ *//' \
+ | sed '/^-/d' \
+ | sed 's/^function.*/func/' >> $upload_file
+ else
+ echo "# path=$(echo "$file" | sed "s|^$git_root/||")" >> $upload_file
+ cat "$file" >> $upload_file
+ fi
+ echo "<<<<<< EOF" >> $upload_file
+ fr=1
+ if [ "$clean" = "1" ];
+ then
+ rm "$file"
+ fi
+ else
+ say " ${r}-${x} Skipping empty file $file"
+ fi
+ else
+ say " ${r}-${x} file not found at $file"
+ fi
+ fi
+done <<< "$(echo -e "$files")"
+
+if [ "$fr" = "0" ];
+then
+ say "${r}-->${x} No coverage data found."
+ say " Please visit ${b}http://docs.codecov.io/docs/supported-languages${x}"
+ say " search for your projects language to learn how to collect reports."
+ exit ${exit_with};
+fi
+
+if [ "$ft_fix" = "1" ];
+then
+ say "${e}==>${x} Appending adjustments"
+ say " ${b}http://docs.codecov.io/docs/fixing-reports${x}"
+
+ empty_line='^[[:space:]]*$'
+ # //
+ syntax_comment='^[[:space:]]*//.*'
+ # /* or */
+ syntax_comment_block='^[[:space:]]*(\/\*|\*\/)[[:space:]]*$'
+ # { or }
+ syntax_bracket='^[[:space:]]*[\{\}][[:space:]]*(//.*)?$'
+ # [ or ]
+ syntax_list='^[[:space:]]*[][][[:space:]]*(//.*)?$'
+
+ skip_dirs="-not -path '*/$bower_components/*' \
+ -not -path '*/node_modules/*'"
+
+ cut_and_join() {
+ awk 'BEGIN { FS=":" }
+ $3 ~ /\/\*/ || $3 ~ /\*\// { print $0 ; next }
+ $1!=key { if (key!="") print out ; key=$1 ; out=$1":"$2 ; next }
+ { out=out","$2 }
+ END { print out }' 2>/dev/null
+ }
+
+ if echo "$network" | grep -m1 '.kt$' 1>/dev/null;
+ then
+ # skip brackets and comments
+ find "$git_root" -type f \
+ -name '*.kt' \
+ -exec \
+ grep -nIHE -e $syntax_bracket \
+ -e $syntax_comment_block {} \; \
+ | cut_and_join \
+ >> $adjustments_file \
+ || echo ''
+
+ # last line in file
+ find "$git_root" -type f \
+ -name '*.kt' -exec \
+ wc -l {} \; \
+ | while read l; do echo "EOF: $l"; done \
+ 2>/dev/null \
+ >> $adjustments_file \
+ || echo ''
+
+ fi
+
+ if echo "$network" | grep -m1 '.go$' 1>/dev/null;
+ then
+ # skip empty lines, comments, and brackets
+ find "$git_root" -not -path '*/vendor/*' \
+ -type f \
+ -name '*.go' \
+ -exec \
+ grep -nIHE \
+ -e $empty_line \
+ -e $syntax_comment \
+ -e $syntax_comment_block \
+ -e $syntax_bracket \
+ {} \; \
+ | cut_and_join \
+ >> $adjustments_file \
+ || echo ''
+ fi
+
+ if echo "$network" | grep -m1 '.dart$' 1>/dev/null;
+ then
+ # skip brackets
+ find "$git_root" -type f \
+ -name '*.dart' \
+ -exec \
+ grep -nIHE \
+ -e $syntax_bracket \
+ {} \; \
+ | cut_and_join \
+ >> $adjustments_file \
+ || echo ''
+ fi
+
+ if echo "$network" | grep -m1 '.php$' 1>/dev/null;
+ then
+ # skip empty lines, comments, and brackets
+ find "$git_root" -not -path "*/vendor/*" \
+ -type f \
+ -name '*.php' \
+ -exec \
+ grep -nIHE \
+ -e $syntax_list \
+ -e $syntax_bracket \
+ -e '^[[:space:]]*\);[[:space:]]*(//.*)?$' \
+ {} \; \
+ | cut_and_join \
+ >> $adjustments_file \
+ || echo ''
+ fi
+
+ if echo "$network" | grep -m1 '\(.cpp\|.h\|.cxx\|.c\|.hpp\|.m\)$' 1>/dev/null;
+ then
+ # skip brackets
+ find "$git_root" -type f \
+ $skip_dirs \
+ \( \
+ -name '*.h' \
+ -or -name '*.cpp' \
+ -or -name '*.cxx' \
+ -or -name '*.m' \
+ -or -name '*.c' \
+ -or -name '*.hpp' \
+ \) -exec \
+ grep -nIHE \
+ -e $empty_line \
+ -e $syntax_bracket \
+ -e '// LCOV_EXCL' \
+ {} \; \
+ | cut_and_join \
+ >> $adjustments_file \
+ || echo ''
+
+ # skip brackets
+ find "$git_root" -type f \
+ $skip_dirs \
+ \( \
+ -name '*.h' \
+ -or -name '*.cpp' \
+ -or -name '*.cxx' \
+ -or -name '*.m' \
+ -or -name '*.c' \
+ -or -name '*.hpp' \
+ \) -exec \
+ grep -nIH '// LCOV_EXCL' \
+ {} \; \
+ >> $adjustments_file \
+ || echo ''
+
+ fi
+
+ found=$(cat $adjustments_file | tr -d ' ')
+
+ if [ "$found" != "" ];
+ then
+ say " ${g}+${x} Found adjustments"
+ echo "# path=fixes" >> $upload_file
+ cat $adjustments_file >> $upload_file
+ echo "<<<<<< EOF" >> $upload_file
+ rm -rf $adjustments_file
+ else
+ say " ${e}->${x} No adjustments found"
+ fi
+fi
+
+if [ "$url_o" != "" ];
+then
+ url="$url_o"
+fi
+
+if [ "$dump" != "0" ];
+then
+ # trim whitespace from query
+ say " ${e}->${x} Dumping upload file (no upload)"
+ echo "$url/upload/v4?$(echo "package=bash-$VERSION&token=$token&$query" | tr -d ' ')"
+ cat $upload_file
+else
+
+ say "${e}==>${x} Gzipping contents"
+ gzip -nf9 $upload_file
+
+ query=$(echo "${query}" | tr -d ' ')
+ say "${e}==>${x} Uploading reports"
+ say " ${e}url:${x} $url"
+ say " ${e}query:${x} $query"
+
+ # now add token to query
+ query=$(echo "package=bash-$VERSION&token=$token&$query" | tr -d ' ')
+
+ if [ "$ft_s3" = "1" ];
+ then
+ i="0"
+ while [ $i -lt 4 ]
+ do
+ i=$[$i+1]
+ say " ${e}->${x} Pinging Codecov"
+ res=$(curl $curl_s -X POST $curlargs $cacert \
+ -H 'X-Reduced-Redundancy: false' \
+ -H 'X-Content-Type: application/x-gzip' \
+ "$url/upload/v4?$query" || true)
+ # a good replay is "https://codecov.io" + "\n" + "https://codecov.s3.amazonaws.com/..."
+ status=$(echo "$res" | head -1 | grep 'HTTP ' | cut -d' ' -f2)
+ if [ "$status" = "" ];
+ then
+ s3target=$(echo "$res" | sed -n 2p)
+ say " ${e}->${x} Uploading"
+ s3=$(curl $curl_s -fiX PUT $curlawsargs \
+ --data-binary @$upload_file.gz \
+ -H 'Content-Type: application/x-gzip' \
+ -H 'Content-Encoding: gzip' \
+ -H 'x-amz-acl: public-read' \
+ "$s3target" || true)
+ if [ "$s3" != "" ];
+ then
+ say " ${g}->${x} View reports at ${b}$(echo "$res" | sed -n 1p)${x}"
+ exit 0
+ else
+ say " ${r}X>${x} Failed to upload"
+ fi
+ elif [ "$status" = "400" ];
+ then
+ # 400 Error
+ say "${g}${res}${x}"
+ exit ${exit_with}
+ fi
+ say " ${e}->${x} Sleeping for 30s and trying again..."
+ sleep 30
+ done
+ fi
+
+ say " ${e}->${x} Uploading to Codecov"
+ i="0"
+ while [ $i -lt 4 ]
+ do
+ i=$[$i+1]
+
+ res=$(curl $curl_s -X POST $curlargs $cacert \
+ --data-binary @$upload_file.gz \
+ -H 'Content-Type: text/plain' \
+ -H 'Content-Encoding: gzip' \
+ -H 'X-Content-Encoding: gzip' \
+ -H 'Accept: text/plain' \
+ "$url/upload/v2?$query" || echo 'HTTP 500')
+ # HTTP 200
+ # http://....
+ status=$(echo "$res" | head -1 | cut -d' ' -f2)
+ if [ "$status" = "" ];
+ then
+ say " View reports at ${b}$(echo "$res" | head -2 | tail -1)${x}"
+ exit 0
+
+ elif [ "${status:0:1}" = "5" ];
+ then
+ say " ${e}->${x} Sleeping for 30s and trying again..."
+ sleep 30
+
+ else
+ say " ${g}${res}${x}"
+ exit 0
+ exit ${exit_with}
+ fi
+
+ done
+
+ say " ${r}X> Failed to upload coverage reports${x}"
+fi
+
+exit ${exit_with}
Modified: trunk/.travis.yml
===================================================================
--- trunk/.travis.yml 2018-03-03 03:31:51 UTC (rev 16441)
+++ trunk/.travis.yml 2018-03-03 09:11:46 UTC (rev 16442)
@@ -40,7 +40,7 @@
- sudo head -n1000 /var/log/postgresql/postgresql-9.6-main.log
after_success:
- - bash <(curl -s https://codecov.io/bash)
+ - bash .github/codecov.bash
language: c
More information about the postgis-tickets
mailing list