aboutsummaryrefslogtreecommitdiff
path: root/dev/create-release/create-release.sh
blob: 30190dcd41ec5f43c488926ecad2769577b5ca38 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
#!/usr/bin/env bash

#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# Quick-and-dirty automation of making maven and binary releases. Not robust at all.
# Publishes releases to Maven and packages/copies binary release artifacts.
# Expects to be run in a totally empty directory.
#
# Options:
#  --skip-create-release	Assume the desired release tag already exists
#  --skip-publish 		Do not publish to Maven central
#  --skip-package		Do not package and upload binary artifacts
# Would be nice to add:
#  - Send output to stderr and have useful logging in stdout

# Note: The following variables must be set before use!
ASF_USERNAME=${ASF_USERNAME:-pwendell}
ASF_PASSWORD=${ASF_PASSWORD:-XXX}
GPG_PASSPHRASE=${GPG_PASSPHRASE:-XXX}
GIT_BRANCH=${GIT_BRANCH:-branch-1.0}
RELEASE_VERSION=${RELEASE_VERSION:-1.2.0}
# Allows publishing under a different version identifier than
# was present in the actual release sources (e.g. rc-X)
PUBLISH_VERSION=${PUBLISH_VERSION:-$RELEASE_VERSION} 
NEXT_VERSION=${NEXT_VERSION:-1.2.1}
RC_NAME=${RC_NAME:-rc2}

M2_REPO=~/.m2/repository
SPARK_REPO=$M2_REPO/org/apache/spark
NEXUS_ROOT=https://repository.apache.org/service/local/staging
NEXUS_PROFILE=d63f592e7eac0 # Profile for Spark staging uploads

if [ -z "$JAVA_HOME" ]; then
  echo "Error: JAVA_HOME is not set, cannot proceed."
  exit -1
fi
JAVA_7_HOME=${JAVA_7_HOME:-$JAVA_HOME}

set -e

GIT_TAG=v$RELEASE_VERSION-$RC_NAME

if [[ ! "$@" =~ --skip-create-release ]]; then
  echo "Creating release commit and publishing to Apache repository"
  # Artifact publishing
  git clone https://$ASF_USERNAME:$ASF_PASSWORD@git-wip-us.apache.org/repos/asf/spark.git \
    -b $GIT_BRANCH
  pushd spark
  export MAVEN_OPTS="-Xmx3g -XX:MaxPermSize=1g -XX:ReservedCodeCacheSize=1g"

  # Create release commits and push them to github
  # NOTE: This is done "eagerly" i.e. we don't check if we can succesfully build
  # or before we coin the release commit. This helps avoid races where
  # other people add commits to this branch while we are in the middle of building.
  cur_ver="${RELEASE_VERSION}-SNAPSHOT"
  rel_ver="${RELEASE_VERSION}"
  next_ver="${NEXT_VERSION}-SNAPSHOT"

  old="^\( \{2,4\}\)<version>${cur_ver}<\/version>$"
  new="\1<version>${rel_ver}<\/version>"
  find . -name pom.xml | grep -v dev | xargs -I {} sed -i \
    -e "s/${old}/${new}/" {}
  find . -name package.scala | grep -v dev | xargs -I {} sed -i \
    -e "s/${old}/${new}/" {}

  git commit -a -m "Preparing Spark release $GIT_TAG"
  echo "Creating tag $GIT_TAG at the head of $GIT_BRANCH"
  git tag $GIT_TAG

  old="^\( \{2,4\}\)<version>${rel_ver}<\/version>$"
  new="\1<version>${next_ver}<\/version>"
  find . -name pom.xml | grep -v dev | xargs -I {} sed -i \
    -e "s/$old/$new/" {}
  find . -name package.scala | grep -v dev | xargs -I {} sed -i \
    -e "s/${old}/${new}/" {}
  git commit -a -m "Preparing development version $next_ver"
  git push origin $GIT_TAG
  git push origin HEAD:$GIT_BRANCH
  popd
  rm -rf spark
fi

if [[ ! "$@" =~ --skip-publish ]]; then
  git clone https://$ASF_USERNAME:$ASF_PASSWORD@git-wip-us.apache.org/repos/asf/spark.git
  pushd spark
  git checkout --force $GIT_TAG 
  
  # Substitute in case published version is different than released
  old="^\( \{2,4\}\)<version>${RELEASE_VERSION}<\/version>$"
  new="\1<version>${PUBLISH_VERSION}<\/version>"
  find . -name pom.xml | grep -v dev | xargs -I {} sed -i \
    -e "s/${old}/${new}/" {}

  # Using Nexus API documented here:
  # https://support.sonatype.com/entries/39720203-Uploading-to-a-Staging-Repository-via-REST-API
  echo "Creating Nexus staging repository"
  repo_request="<promoteRequest><data><description>Apache Spark $GIT_TAG (published as $PUBLISH_VERSION)</description></data></promoteRequest>"
  out=$(curl -X POST -d "$repo_request" -u $ASF_USERNAME:$ASF_PASSWORD \
    -H "Content-Type:application/xml" -v \
    $NEXUS_ROOT/profiles/$NEXUS_PROFILE/start)
  staged_repo_id=$(echo $out | sed -e "s/.*\(orgapachespark-[0-9]\{4\}\).*/\1/")
  echo "Created Nexus staging repository: $staged_repo_id"

  rm -rf $SPARK_REPO

  build/mvn -DskipTests -Pyarn -Phive -Prelease\
    -Phive-thriftserver -Phadoop-2.2 -Pspark-ganglia-lgpl -Pkinesis-asl \
    clean install

  ./dev/change-version-to-2.11.sh
  
  build/mvn -DskipTests -Pyarn -Phive -Prelease\
    -Dscala-2.11 -Phadoop-2.2 -Pspark-ganglia-lgpl -Pkinesis-asl \
    clean install

  ./dev/change-version-to-2.10.sh

  pushd $SPARK_REPO

  # Remove any extra files generated during install
  find . -type f |grep -v \.jar |grep -v \.pom | xargs rm

  echo "Creating hash and signature files"
  for file in $(find . -type f)
  do
    echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --output $file.asc --detach-sig --armour $file;
    if [ $(command -v md5) ]; then
      # Available on OS X; -q to keep only hash
      md5 -q $file > $file.md5
    else
      # Available on Linux; cut to keep only hash
      md5sum $file | cut -f1 -d' ' > $file.md5
    fi
    shasum -a 1 $file | cut -f1 -d' ' > $file.sha1
  done

  nexus_upload=$NEXUS_ROOT/deployByRepositoryId/$staged_repo_id
  echo "Uplading files to $nexus_upload"
  for file in $(find . -type f)
  do
    # strip leading ./
    file_short=$(echo $file | sed -e "s/\.\///")
    dest_url="$nexus_upload/org/apache/spark/$file_short"
    echo "  Uploading $file_short"
    curl -u $ASF_USERNAME:$ASF_PASSWORD --upload-file $file_short $dest_url
  done

  echo "Closing nexus staging repository"
  repo_request="<promoteRequest><data><stagedRepositoryId>$staged_repo_id</stagedRepositoryId><description>Apache Spark $GIT_TAG (published as $PUBLISH_VERSION)</description></data></promoteRequest>"
  out=$(curl -X POST -d "$repo_request" -u $ASF_USERNAME:$ASF_PASSWORD \
    -H "Content-Type:application/xml" -v \
    $NEXUS_ROOT/profiles/$NEXUS_PROFILE/finish)
  echo "Closed Nexus staging repository: $staged_repo_id"

  popd
  popd
  rm -rf spark
fi

if [[ ! "$@" =~ --skip-package ]]; then
  # Source and binary tarballs
  echo "Packaging release tarballs"
  git clone https://git-wip-us.apache.org/repos/asf/spark.git
  cd spark
  git checkout --force $GIT_TAG
  release_hash=`git rev-parse HEAD`

  rm .gitignore
  rm -rf .git
  cd ..

  cp -r spark spark-$RELEASE_VERSION
  tar cvzf spark-$RELEASE_VERSION.tgz spark-$RELEASE_VERSION
  echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --armour --output spark-$RELEASE_VERSION.tgz.asc \
    --detach-sig spark-$RELEASE_VERSION.tgz
  echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --print-md MD5 spark-$RELEASE_VERSION.tgz > \
    spark-$RELEASE_VERSION.tgz.md5
  echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --print-md SHA512 spark-$RELEASE_VERSION.tgz > \
    spark-$RELEASE_VERSION.tgz.sha
  rm -rf spark-$RELEASE_VERSION
  
  # Updated for each binary build
  make_binary_release() {
    NAME=$1
    FLAGS=$2
    ZINC_PORT=$3
    cp -r spark spark-$RELEASE_VERSION-bin-$NAME
    
    cd spark-$RELEASE_VERSION-bin-$NAME

    # TODO There should probably be a flag to make-distribution to allow 2.11 support
    if [[ $FLAGS == *scala-2.11* ]]; then
      ./dev/change-version-to-2.11.sh
    fi

    export ZINC_PORT=$ZINC_PORT
    echo "Creating distribution: $NAME ($FLAGS)"
    ./make-distribution.sh --name $NAME --tgz $FLAGS -DzincPort=$ZINC_PORT 2>&1 > \
      ../binary-release-$NAME.log
    cd ..
    cp spark-$RELEASE_VERSION-bin-$NAME/spark-$RELEASE_VERSION-bin-$NAME.tgz .

    echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --armour \
      --output spark-$RELEASE_VERSION-bin-$NAME.tgz.asc \
      --detach-sig spark-$RELEASE_VERSION-bin-$NAME.tgz
    echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --print-md \
      MD5 spark-$RELEASE_VERSION-bin-$NAME.tgz > \
      spark-$RELEASE_VERSION-bin-$NAME.tgz.md5
    echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --print-md \
      SHA512 spark-$RELEASE_VERSION-bin-$NAME.tgz > \
      spark-$RELEASE_VERSION-bin-$NAME.tgz.sha
  }

  # We increment the Zinc port each time to avoid OOM's and other craziness if multiple builds
  # share the same Zinc server.
  make_binary_release "hadoop1" "-Psparkr -Phadoop-1 -Phive -Phive-thriftserver" "3030" &
  make_binary_release "hadoop1-scala2.11" "-Psparkr -Phadoop-1 -Phive -Dscala-2.11" "3031" &
  make_binary_release "cdh4" "-Psparkr -Phadoop-1 -Phive -Phive-thriftserver -Dhadoop.version=2.0.0-mr1-cdh4.2.0" "3032" &
  make_binary_release "hadoop2.3" "-Psparkr -Phadoop-2.3 -Phive -Phive-thriftserver -Pyarn" "3033" &
  make_binary_release "hadoop2.4" "-Psparkr -Phadoop-2.4 -Phive -Phive-thriftserver -Pyarn" "3034" &
  make_binary_release "mapr3" "-Pmapr3 -Psparkr -Phive -Phive-thriftserver" "3035" &
  make_binary_release "mapr4" "-Pmapr4 -Psparkr -Pyarn -Phive -Phive-thriftserver" "3036" &
  make_binary_release "hadoop2.4-without-hive" "-Psparkr -Phadoop-2.4 -Pyarn" "3037" &
  wait
  rm -rf spark-$RELEASE_VERSION-bin-*/

  # Copy data
  echo "Copying release tarballs"
  rc_folder=spark-$RELEASE_VERSION-$RC_NAME
  ssh $ASF_USERNAME@people.apache.org \
    mkdir /home/$ASF_USERNAME/public_html/$rc_folder
  scp spark-* \
    $ASF_USERNAME@people.apache.org:/home/$ASF_USERNAME/public_html/$rc_folder/

  # Docs
  cd spark
  sbt/sbt clean
  cd docs
  # Compile docs with Java 7 to use nicer format
  JAVA_HOME="$JAVA_7_HOME" PRODUCTION=1 RELEASE_VERSION="$RELEASE_VERSION" jekyll build
  echo "Copying release documentation"
  rc_docs_folder=${rc_folder}-docs
  ssh $ASF_USERNAME@people.apache.org \
    mkdir /home/$ASF_USERNAME/public_html/$rc_docs_folder
  rsync -r _site/* $ASF_USERNAME@people.apache.org:/home/$ASF_USERNAME/public_html/$rc_docs_folder

  echo "Release $RELEASE_VERSION completed:"
  echo "Git tag:\t $GIT_TAG"
  echo "Release commit:\t $release_hash"
  echo "Binary location:\t http://people.apache.org/~$ASF_USERNAME/$rc_folder"
  echo "Doc location:\t http://people.apache.org/~$ASF_USERNAME/$rc_docs_folder"
fi