Update deploy scripts for gha-webhook-listener compatibility (#23002)

* Soft fail Sentry uploads from Webpack

* Delete duplicated script

* Delint script

* Move symlink support from redeploy to deploy
pull/23038/head
Michael Telatynski 2022-08-09 13:23:41 +01:00 committed by GitHub
parent 9df3774886
commit 4b6d9a067b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 66 additions and 232 deletions

View File

@ -1,191 +0,0 @@
#!/usr/bin/env python
#
# download and unpack a element-web tarball.
#
# Allows `bundles` to be extracted to a common directory, and a link to
# config.json to be added.
from __future__ import print_function
import argparse
import os
import os.path
import subprocess
import sys
import tarfile
import shutil
import glob
try:
# python3
from urllib.request import urlretrieve
except ImportError:
# python2
from urllib import urlretrieve
class DeployException(Exception):
pass
def create_relative_symlink(linkname, target):
relpath = os.path.relpath(target, os.path.dirname(linkname))
print ("Symlink %s -> %s" % (linkname, relpath))
os.symlink(relpath, linkname)
def move_bundles(source, dest):
"""Move the contents of the 'bundles' directory to a common dir
We check that we will not be overwriting anything before we proceed.
Args:
source (str): path to 'bundles' within the extracted tarball
dest (str): target common directory
"""
if not os.path.isdir(dest):
os.mkdir(dest)
# build a map from source to destination, checking for non-existence as we go.
renames = {}
for f in os.listdir(source):
dst = os.path.join(dest, f)
if os.path.exists(dst):
print (
"Skipping bundle. The bundle includes '%s' which we have previously deployed."
% f
)
else:
renames[os.path.join(source, f)] = dst
for (src, dst) in renames.iteritems():
print ("Move %s -> %s" % (src, dst))
os.rename(src, dst)
class Deployer:
def __init__(self):
self.packages_path = "."
self.bundles_path = None
self.should_clean = False
# filename -> symlink path e.g 'config.localhost.json' => '../localhost/config.json'
self.symlink_paths = {}
self.verify_signature = True
def deploy(self, tarball, extract_path):
"""Download a tarball if necessary, and unpack it
Returns:
(str) the path to the unpacked deployment
"""
print("Deploying %s to %s" % (tarball, extract_path))
name_str = os.path.basename(tarball).replace(".tar.gz", "")
extracted_dir = os.path.join(extract_path, name_str)
if os.path.exists(extracted_dir):
raise DeployException('Cannot unpack %s: %s already exists' % (
tarball, extracted_dir))
downloaded = False
if tarball.startswith("http://") or tarball.startswith("https://"):
tarball = self.download_and_verify(tarball)
print("Downloaded file: %s" % tarball)
downloaded = True
try:
with tarfile.open(tarball) as tar:
tar.extractall(extract_path)
finally:
if self.should_clean and downloaded:
os.remove(tarball)
print ("Extracted into: %s" % extracted_dir)
if self.symlink_paths:
for link_path, file_path in self.symlink_paths.iteritems():
create_relative_symlink(
target=file_path,
linkname=os.path.join(extracted_dir, link_path)
)
if self.bundles_path:
extracted_bundles = os.path.join(extracted_dir, 'bundles')
move_bundles(source=extracted_bundles, dest=self.bundles_path)
# replace the extracted_bundles dir (which may not be empty if some
# bundles were skipped) with a symlink to the common dir.
shutil.rmtree(extracted_bundles)
create_relative_symlink(
target=self.bundles_path,
linkname=extracted_bundles,
)
return extracted_dir
def download_and_verify(self, url):
tarball = self.download_file(url)
if self.verify_signature:
sigfile = self.download_file(url + ".asc")
subprocess.check_call(["gpg", "--verify", sigfile, tarball])
return tarball
def download_file(self, url):
if not os.path.isdir(self.packages_path):
os.mkdir(self.packages_path)
local_filename = os.path.join(self.packages_path,
url.split('/')[-1])
sys.stdout.write("Downloading %s -> %s..." % (url, local_filename))
sys.stdout.flush()
urlretrieve(url, local_filename)
print ("Done")
return local_filename
if __name__ == "__main__":
parser = argparse.ArgumentParser("Deploy a Riot build on a web server.")
parser.add_argument(
"-p", "--packages-dir", default="./packages", help=(
"The directory to download the tarball into. (Default: '%(default)s')"
)
)
parser.add_argument(
"-e", "--extract-path", default="./deploys", help=(
"The location to extract .tar.gz files to. (Default: '%(default)s')"
)
)
parser.add_argument(
"-b", "--bundles-dir", nargs='?', default="./bundles", help=(
"A directory to move the contents of the 'bundles' directory to. A \
symlink to the bundles directory will also be written inside the \
extracted tarball. Example: './bundles'. \
(Default: '%(default)s')"
)
)
parser.add_argument(
"-c", "--clean", action="store_true", default=False, help=(
"Remove .tar.gz files after they have been downloaded and extracted. \
(Default: %(default)s)"
)
)
parser.add_argument(
"--include", nargs='*', default=['./config*.json'], help=(
"Symlink these files into the root of the deployed tarball. \
Useful for config files and home pages. Supports glob syntax. \
(Default: '%(default)s')"
)
)
parser.add_argument(
"tarball", help=(
"filename of tarball, or URL to download."
),
)
args = parser.parse_args()
deployer = Deployer()
deployer.packages_path = args.packages_dir
deployer.bundles_path = args.bundles_dir
deployer.should_clean = args.clean
for include in args.include:
deployer.symlink_paths.update({ os.path.basename(pth): pth for pth in glob.iglob(include) })
deployer.deploy(args.tarball, args.extract_path)

View File

@ -1,13 +1,12 @@
#!/usr/bin/env python #!/usr/bin/env python3
# #
# download and unpack a element-web tarball. # download and unpack a element-web tarball.
# #
# Allows `bundles` to be extracted to a common directory, and a link to # Allows `bundles` to be extracted to a common directory, and a link to
# config.json to be added. # config.json to be added.
from __future__ import print_function
import argparse import argparse
import errno
import os import os
import os.path import os.path
import subprocess import subprocess
@ -15,21 +14,26 @@ import sys
import tarfile import tarfile
import shutil import shutil
import glob import glob
from urllib.request import urlretrieve
try:
# python3
from urllib.request import urlretrieve
except ImportError:
# python2
from urllib import urlretrieve
class DeployException(Exception): class DeployException(Exception):
pass pass
def create_relative_symlink(linkname, target): def create_relative_symlink(linkname, target):
relpath = os.path.relpath(target, os.path.dirname(linkname)) relpath = os.path.relpath(target, os.path.dirname(linkname))
print ("Symlink %s -> %s" % (linkname, relpath)) print("Symlink %s -> %s" % (linkname, relpath))
os.symlink(relpath, linkname)
try:
os.symlink(relpath, linkname)
except OSError as e:
if e.errno == errno.EEXIST:
# atomic modification
os.symlink(relpath, linkname + ".tmp")
os.rename(linkname + ".tmp", linkname)
else:
raise e
def move_bundles(source, dest): def move_bundles(source, dest):
@ -50,33 +54,35 @@ def move_bundles(source, dest):
for f in os.listdir(source): for f in os.listdir(source):
dst = os.path.join(dest, f) dst = os.path.join(dest, f)
if os.path.exists(dst): if os.path.exists(dst):
print ( print(
"Skipping bundle. The bundle includes '%s' which we have previously deployed." "Skipping bundle. The bundle includes '%s' which we have previously deployed."
% f % f
) )
else: else:
renames[os.path.join(source, f)] = dst renames[os.path.join(source, f)] = dst
for (src, dst) in renames.iteritems(): for (src, dst) in renames.items():
print ("Move %s -> %s" % (src, dst)) print("Move %s -> %s" % (src, dst))
os.rename(src, dst) os.rename(src, dst)
class Deployer: class Deployer:
def __init__(self): def __init__(self):
self.packages_path = "." self.packages_path = "."
self.bundles_path = None self.bundles_path = None
self.should_clean = False self.should_clean = False
self.symlink_latest = None
# filename -> symlink path e.g 'config.localhost.json' => '../localhost/config.json' # filename -> symlink path e.g 'config.localhost.json' => '../localhost/config.json'
self.symlink_paths = {} self.symlink_paths = {}
self.verify_signature = True self.verify_signature = True
def deploy(self, tarball, extract_path): def fetch(self, tarball, extract_path):
"""Download a tarball if necessary, and unpack it """Download a tarball, verifies it if needed, and unpacks it
Returns: Returns:
(str) the path to the unpacked deployment (str) the path to the unpacked directory
""" """
print("Deploying %s to %s" % (tarball, extract_path)) print("Fetching %s to %s" % (tarball, extract_path))
name_str = os.path.basename(tarball).replace(".tar.gz", "") name_str = os.path.basename(tarball).replace(".tar.gz", "")
extracted_dir = os.path.join(extract_path, name_str) extracted_dir = os.path.join(extract_path, name_str)
@ -97,10 +103,15 @@ class Deployer:
if self.should_clean and downloaded: if self.should_clean and downloaded:
os.remove(tarball) os.remove(tarball)
print ("Extracted into: %s" % extracted_dir) print("Extracted into: %s" % extracted_dir)
return extracted_dir
def deploy(self, extracted_dir):
"""Applies symlinks and handles the bundles directory on an extracted tarball"""
print("Deploying %s" % extracted_dir)
if self.symlink_paths: if self.symlink_paths:
for link_path, file_path in self.symlink_paths.iteritems(): for link_path, file_path in self.symlink_paths.items():
create_relative_symlink( create_relative_symlink(
target=file_path, target=file_path,
linkname=os.path.join(extracted_dir, link_path) linkname=os.path.join(extracted_dir, link_path)
@ -117,7 +128,12 @@ class Deployer:
target=self.bundles_path, target=self.bundles_path,
linkname=extracted_bundles, linkname=extracted_bundles,
) )
return extracted_dir
if self.symlink_latest:
create_relative_symlink(
target=extracted_dir,
linkname=self.symlink_latest,
)
def download_and_verify(self, url): def download_and_verify(self, url):
tarball = self.download_file(url) tarball = self.download_file(url)
@ -139,6 +155,7 @@ class Deployer:
print ("Done") print ("Done")
return local_filename return local_filename
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser("Deploy a Riot build on a web server.") parser = argparse.ArgumentParser("Deploy a Riot build on a web server.")
parser.add_argument( parser.add_argument(
@ -173,8 +190,15 @@ if __name__ == "__main__":
) )
) )
parser.add_argument( parser.add_argument(
"tarball", help=( "-s", "--symlink", dest="symlink", default="./latest", help=(
"filename of tarball, or URL to download." "Write a symlink to this location pointing to the extracted tarball. \
New builds will keep overwriting this symlink. The symlink will point \
to the webapp directory INSIDE the tarball."
)
)
parser.add_argument(
"target", help=(
"filename of extracted directory, tarball, or URL to download."
), ),
) )
@ -184,8 +208,18 @@ if __name__ == "__main__":
deployer.packages_path = args.packages_dir deployer.packages_path = args.packages_dir
deployer.bundles_path = args.bundles_dir deployer.bundles_path = args.bundles_dir
deployer.should_clean = args.clean deployer.should_clean = args.clean
deployer.symlink_latest = args.symlink
for include in args.include: for include in args.include:
deployer.symlink_paths.update({ os.path.basename(pth): pth for pth in glob.iglob(include) }) deployer.symlink_paths.update({ os.path.basename(pth): pth for pth in glob.iglob(include) })
deployer.deploy(args.tarball, args.extract_path) if os.path.isdir(args.target):
# If the given directory contains a single directory then use that instead, the ci package wraps in an extra dir
files = os.listdir(args.target)
if len(files) == 1 and os.path.isdir(os.path.join(args.target, files[0])):
extracted_dir = os.path.join(args.target, files[0])
else:
extracted_dir = args.target
else:
extracted_dir = deployer.fetch(args.target, args.extract_path)
deployer.deploy(extracted_dir)

View File

@ -12,10 +12,9 @@
# - flask # - flask
# #
from __future__ import print_function from __future__ import print_function
import json, requests, tarfile, argparse, os, errno import requests, argparse, os, errno
import time import time
import traceback import traceback
from urlparse import urljoin
import glob import glob
import re import re
import shutil import shutil
@ -30,22 +29,11 @@ app = Flask(__name__)
deployer = None deployer = None
arg_extract_path = None arg_extract_path = None
arg_symlink = None
arg_webhook_token = None arg_webhook_token = None
arg_api_token = None arg_api_token = None
workQueue = Queue() workQueue = Queue()
def create_symlink(source, linkname):
try:
os.symlink(source, linkname)
except OSError, e:
if e.errno == errno.EEXIST:
# atomic modification
os.symlink(source, linkname + ".tmp")
os.rename(linkname + ".tmp", linkname)
else:
raise e
def req_headers(): def req_headers():
return { return {
@ -128,7 +116,7 @@ def on_receive_buildkite_poke():
artifacts_resp = requests.get(artifacts_url, headers=req_headers()) artifacts_resp = requests.get(artifacts_url, headers=req_headers())
artifacts_resp.raise_for_status() artifacts_resp.raise_for_status()
artifacts_array = artifacts_resp.json() artifacts_array = artifacts_resp.json()
artifact_to_deploy = None artifact_to_deploy = None
for artifact in artifacts_array: for artifact in artifacts_array:
if re.match(r"dist/.*.tar.gz", artifact['path']): if re.match(r"dist/.*.tar.gz", artifact['path']):
@ -173,7 +161,6 @@ def deploy_buildkite_artifact(artifact, pipeline_name, build_num):
traceback.print_exc() traceback.print_exc()
abort(400, e.message) abort(400, e.message)
create_symlink(source=extracted_dir, linkname=arg_symlink)
def deploy_tarball(artifact, build_dir): def deploy_tarball(artifact, build_dir):
"""Download a tarball from jenkins and unpack it """Download a tarball from jenkins and unpack it
@ -274,7 +261,6 @@ if __name__ == "__main__":
args = parser.parse_args() args = parser.parse_args()
arg_extract_path = args.extract arg_extract_path = args.extract
arg_symlink = args.symlink
arg_webbook_token = args.webhook_token arg_webbook_token = args.webhook_token
arg_api_token = args.api_token arg_api_token = args.api_token
arg_buildkite_org = args.buildkite_org arg_buildkite_org = args.buildkite_org
@ -285,6 +271,7 @@ if __name__ == "__main__":
deployer = Deployer() deployer = Deployer()
deployer.bundles_path = args.bundles_dir deployer.bundles_path = args.bundles_dir
deployer.should_clean = args.clean deployer.should_clean = args.clean
deployer.symlink_latest = args.symlink
for include in args.include: for include in args.include:
deployer.symlink_paths.update({ os.path.basename(pth): pth for pth in glob.iglob(include) }) deployer.symlink_paths.update({ os.path.basename(pth): pth for pth in glob.iglob(include) })
@ -298,7 +285,7 @@ if __name__ == "__main__":
(args.port, (args.port,
arg_extract_path, arg_extract_path,
" (clean after)" if deployer.should_clean else "", " (clean after)" if deployer.should_clean else "",
arg_symlink, args.symlink,
deployer.symlink_paths, deployer.symlink_paths,
) )
) )

View File

@ -631,6 +631,10 @@ module.exports = (env, argv) => {
new SentryCliPlugin({ new SentryCliPlugin({
release: process.env.VERSION, release: process.env.VERSION,
include: "./webapp/bundles", include: "./webapp/bundles",
errorHandler: (err, invokeErr, compilation) => {
compilation.warnings.push('Sentry CLI Plugin: ' + err.message);
console.log(`::warning title=Sentry error::${err.message}`);
},
}), }),
new webpack.EnvironmentPlugin(['VERSION']), new webpack.EnvironmentPlugin(['VERSION']),
].filter(Boolean), ].filter(Boolean),