From 7e6ffe5dd73dcaf38fbf38bdaa7698f66d441713 Mon Sep 17 00:00:00 2001 From: Teemu Suo-Anttila Date: Fri, 24 Aug 2018 12:02:38 +0300 Subject: [PATCH] Fix build report generation scripts (#11140) --- scripts/BuildDemos.py | 109 +++++++++----- scripts/BuildHelpers.py | 141 ++++++++++--------- scripts/DeployHelpers.py | 12 +- scripts/GenerateBuildTestAndStagingReport.py | 64 +++------ scripts/GeneratePostPublishReport.py | 59 ++++---- scripts/GeneratePublishReportPart1.py | 45 +++--- 6 files changed, 235 insertions(+), 195 deletions(-) diff --git a/scripts/BuildDemos.py b/scripts/BuildDemos.py index a23bb96a4a..a899de1505 100644 --- a/scripts/BuildDemos.py +++ b/scripts/BuildDemos.py @@ -16,14 +16,21 @@ from xml.etree.ElementTree import ElementTree # Validated demos. name -> git url demos = { - "dashboard" : ("https://github.com/vaadin/dashboard-demo.git","7.7"), + "dashboard" : ("https://github.com/vaadin/dashboard-demo.git", "7.7"), "parking" : ("https://github.com/vaadin/parking-demo.git", "7.7"), "addressbook" : ("https://github.com/vaadin/addressbook.git", "7.7"), - "grid-gwt" : ("https://github.com/vaadin/grid-gwt.git", "7.7"), "sampler" : ("demos/sampler", "7.7") # "my-demo" : ("my_demo_url_or_path", "my-demo-dev-branch") } +# List of built archetypes +archetypes = [ + "vaadin-archetype-widget", + "vaadin-archetype-application", + "vaadin-archetype-application-example", + "vaadin-archetype-application-multimodule" +] + status_dump = {"messages": []} def dump_status(error_occurred): @@ -33,6 +40,7 @@ def dump_status(error_occurred): def log_status(log_string): status_dump["messages"].append(log_string) print(log_string) + sys.stdout.flush() def checkout(folder, url, repoBranch = "master"): Repo.clone_from(url, join(resultPath, folder), branch = repoBranch) @@ -45,51 +53,76 @@ if __name__ == "__main__": log_status("BuildDemos depends on gitpython. Install it with `pip install gitpython`") dump_status(True) sys.exit(1) - from BuildHelpers import updateRepositories, mavenValidate, copyWarFiles, getLogFile, removeDir, getArgs, mavenInstall, resultPath, readPomFile, parser + from BuildHelpers import mavenValidate, copyWarFiles, getLogFile, removeDir, getArgs, resultPath, parser, dockerWrap, generateArchetype from DeployHelpers import deployWar # Add command line agrument for ignoring failing demos parser.add_argument("--ignore", type=str, help="Ignored demos", default="") + + # Control to skip demos and archetypes + parser.add_argument("--skipDemos", action="store_true", help="Skip building demos") + parser.add_argument("--skipArchetypes", action="store_true", help="Skip building archetypes") + args = getArgs() demosFailed = False ignoredDemos = args.ignore.split(",") - wars = [] - for demo in demos: - print("Validating demo %s" % (demo)) - try: - repo = demos[demo] - if (isinstance(repo, tuple)): - checkout(demo, repo[0], repo[1]) - else: - checkout(demo, repo) - if hasattr(args, "fwRepo") and args.fwRepo is not None: - updateRepositories(join(resultPath, demo), args.fwRepo) - if hasattr(args, "pluginRepo") and args.pluginRepo is not None: - updateRepositories(join(resultPath, demo), args.pluginRepo, postfix="plugin") - mavenValidate(demo, logFile=getLogFile(demo)) - wars.extend(copyWarFiles(demo)) - log_status("%s demo validation succeeded!" % (demo)) - except Exception as e: - log_status("%s demo validation failed: %s" % (demo, e)) - if demo not in ignoredDemos: - demosFailed = True - except EnvironmentError as e: - log_status("%s demo validation failed: %s" % (demo, e)) - if demo not in ignoredDemos: + if not args.skipDemos: + for demo in demos: + print("Validating demo %s" % (demo)) + try: + repo = demos[demo] + if (isinstance(repo, tuple)): + checkout(demo, repo[0], repo[1]) + else: + checkout(demo, repo) + mavenValidate(demo, logFile=getLogFile(demo)) + wars.extend(copyWarFiles(demo)) + log_status("%s demo validation succeeded!" % (demo)) + except Exception as e: + log_status("%s demo validation failed: %s" % (demo, e)) + if demo not in ignoredDemos: + demosFailed = True + except EnvironmentError as e: + log_status("%s demo validation failed: %s" % (demo, e)) + if demo not in ignoredDemos: + demosFailed = True + try: + removeDir(demo) + except: + pass + log_status("") + + if not args.skipArchetypes: + for archetype in archetypes: + artifactId = "test-%s-%s" % (archetype, args.version.replace(".", "-")) + try: + log = getLogFile(archetype) + generateArchetype(archetype, artifactId, args.pluginRepo, log) + mavenValidate(artifactId, logFile=log) + wars.extend(copyWarFiles(artifactId, name=archetype)) + log_status("%s validation succeeded!" % (archetype)) + except Exception as e: + print("Archetype %s build failed:" % (archetype), e) + if archetype not in ignoredDemos: + demosFailed = True + + try: + removeDir(artifactId) + except: + pass + log_status("") + + if args.deploy_mode: + for war in wars: + try: + deployWar(war) + except Exception as e: + log_status("War %s failed to deploy: %s" % (war, e)) demosFailed = True - try: - removeDir(demo) - except: - pass - print("") - - for war in wars: - try: - deployWar(war) - except Exception as e: - log_status("War %s failed to deploy: %s" % (war, e)) - demosFailed = True + else: + dockerWrap(args.version) + if demosFailed: dump_status(True) diff --git a/scripts/BuildHelpers.py b/scripts/BuildHelpers.py index 02fddc7af3..216f8e1646 100644 --- a/scripts/BuildHelpers.py +++ b/scripts/BuildHelpers.py @@ -10,7 +10,6 @@ from shutil import copy, rmtree from glob import glob # Directory where the resulting war files are stored -# TODO: deploy results resultPath = join("result", "demos") if not exists(resultPath): @@ -39,26 +38,27 @@ def parseArgs(): args = parser.parse_args() return args -# Function for determining the path for maven executable -def getMavenCommand(): +# Function for determining the path for an executable +def getCommand(command): # This method uses .split("\n")[0] which basically chooses the first result where/which returns. # Fixes the case with multiple maven installations available on PATH if platform.system() == "Windows": try: - return subprocess.check_output(["where", "mvn.cmd"], universal_newlines=True).split("\n")[0] + return subprocess.check_output(["where", "%s.cmd" % (command)], universal_newlines=True).split("\n")[0] except: try: - return subprocess.check_output(["where", "mvn.bat"], universal_newlines=True).split("\n")[0] + return subprocess.check_output(["where", "%s.bat" % (command)], universal_newlines=True).split("\n")[0] except: - print("Unable to locate mvn with where. Is the maven executable in your PATH?") + print("Unable to locate command %s with where. Is it in your PATH?" % (command)) else: try: - return subprocess.check_output(["which", "mvn"], universal_newlines=True).split("\n")[0] + return subprocess.check_output(["which", command], universal_newlines=True).split("\n")[0] except: - print("Unable to locate maven executable with which. Is the maven executable in your PATH?") + print("Unable to locate command %s with which. Is it in your PATH?" % (command)) return None -mavenCmd = getMavenCommand() +mavenCmd = getCommand("mvn") +dockerCmd = getCommand("docker") # Get command line arguments. Parses arguments if needed. def getArgs(): @@ -102,55 +102,26 @@ def copyWarFiles(artifactId, resultDir = resultPath, name = None): copiedWars.append(join(resultDir, deployName)) return copiedWars -def readPomFile(pomFile): - # pom.xml namespace workaround - root = ElementTree.parse(pomFile).getroot() - nameSpace = root.tag[1:root.tag.index('}')] - ElementTree.register_namespace('', nameSpace) - - # Read the pom.xml correctly - return ElementTree.parse(pomFile), nameSpace - -# Recursive pom.xml update script -def updateRepositories(path, repoUrl = None, version = None, postfix = "staging"): - # If versions are not supplied, parse arguments - if version is None: - version = getArgs().version - - # Read pom.xml - pomXml = join(path, "pom.xml") - if isfile(pomXml): - # Read the pom.xml correctly - tree, nameSpace = readPomFile(pomXml) - - # NameSpace needed for finding the repositories node - repoNode = tree.getroot().find("{%s}repositories" % (nameSpace)) - else: - return - - if repoNode is not None: - print("Add staging repositories to " + pomXml) - - # Add framework staging repository - addRepo(repoNode, "repository", "vaadin-%s-%s" % (version, postfix), repoUrl) - - # Find the correct pluginRepositories node - pluginRepo = tree.getroot().find("{%s}pluginRepositories" % (nameSpace)) - if pluginRepo is None: - # Add pluginRepositories node if needed - pluginRepo = ElementTree.SubElement(tree.getroot(), "pluginRepositories") - - # Add plugin staging repository - addRepo(pluginRepo, "pluginRepository", "vaadin-%s-%s" % (version, postfix), repoUrl) - - # Overwrite the modified pom.xml - tree.write(pomXml, encoding='UTF-8') - - # Recursive pom.xml search. - for i in listdir(path): - file = join(path, i) - if isdir(file): - updateRepositories(join(path, i), repoUrl, version, postfix) +# Generates and modifies a maven pom file +def generateArchetype(archetype, artifactId, repo, logFile, group="testpkg", archetypeGroup="com.vaadin"): + # Generate the required command line for archetype generation + args = getArgs() + cmd = [mavenCmd, "archetype:generate"] + cmd.append("-DarchetypeGroupId=%s" % (archetypeGroup)) + cmd.append("-DarchetypeArtifactId=%s" % (archetype)) + cmd.append("-DarchetypeVersion=%s" % (args.version)) + if repo is not None: + cmd.append("-DarchetypeRepository=%s" % repo) + cmd.append("-DgroupId=%s" % (group)) + cmd.append("-DartifactId=%s" % (artifactId)) + cmd.append("-Dversion=1.0-SNAPSHOT") + cmd.append("-DinteractiveMode=false") + if hasattr(args, "maven") and args.maven is not None: + cmd.extend(args.maven.strip('"').split(" ")) + + # Generate pom.xml + print("Generating archetype %s" % (archetype)) + subprocess.check_call(cmd, cwd=resultPath, stdout=logFile) # Add a repository of repoType to given repoNode with id and URL def addRepo(repoNode, repoType, id, url): @@ -170,9 +141,51 @@ def removeDir(subdir): return rmtree(join(resultPath, subdir)) -def mavenInstall(pomFile, jarFile = None, mvnCmd = mavenCmd, logFile = sys.stdout): - cmd = [mvnCmd, "install:install-file"] - cmd.append("-Dfile=%s" % (jarFile if jarFile is not None else pomFile)) - cmd.append("-DpomFile=%s" % (pomFile)) - print("executing: %s" % (" ".join(cmd))) - subprocess.check_call(cmd, stdout=logFile) +def dockerWrap(imageVersion, imageName = "demo-validation"): + dockerFileContent = """FROM jtomass/alpine-jre-bash:latest +LABEL maintainer="FrameworkTeam" + +COPY ./*.war /var/lib/jetty/webapps/ +USER root +RUN mkdir /opt +RUN chown -R jetty:jetty /opt +COPY ./index-generate.sh /opt/ +RUN chmod +x /opt/index-generate.sh + +USER jetty +RUN /opt/index-generate.sh + +RUN mkdir -p /var/lib/jetty/webapps/root && \ + cp /opt/index.html /var/lib/jetty/webapps/root && \ + chmod 644 /var/lib/jetty/webapps/root/index.html + +EXPOSE 8080 +""" + indexGenerateScript = """#!/bin/ash + +wars="/var/lib/jetty/webapps" +OUTPUT="/opt/index.html" + +echo "" >> $OUTPUT +""" + with open(join(resultPath, "Dockerfile"), "w") as dockerFile: + dockerFile.write(dockerFileContent) + with open(join(resultPath, "index-generate.sh"), "w") as indexScript: + indexScript.write(indexGenerateScript) + # build image + cmd = [dockerCmd, "build", "-t", "%s:%s" % (imageName, imageVersion), resultPath] + subprocess.check_call(cmd) + # save to tgz + cmd = [dockerCmd, "save", imageName] + dockerSave = subprocess.Popen(cmd, stdout=subprocess.PIPE) + subprocess.check_call(["gzip"], stdin=dockerSave.stdout, stdout=open(join(resultPath, "%s-%s.tgz" % (imageName, imageVersion)), "w")) + dockerSave.wait() + # delete from docker + cmd = [dockerCmd, "rmi", "%s:%s" % (imageName, imageVersion)] + subprocess.check_call(cmd) diff --git a/scripts/DeployHelpers.py b/scripts/DeployHelpers.py index 038e187b8b..e8079c1df1 100644 --- a/scripts/DeployHelpers.py +++ b/scripts/DeployHelpers.py @@ -14,9 +14,13 @@ from os.path import join, expanduser, basename from BuildHelpers import parser, getArgs from time import sleep -parser.add_argument("--deployUrl", help="Wildfly management URL") -parser.add_argument("--deployUser", help="Deployment user", default=None) -parser.add_argument("--deployPass", help="Deployment password", default=None) +group = parser.add_mutually_exclusive_group(required=True) +group.add_argument("--deploy", dest="deploy_mode", help="Deploy to a remote Wildfly instance", action="store_true") +group.add_argument("--docker", dest="deploy_mode", help="Wrap results into a Docker image", action="store_false") + +parser.add_argument("--deployUrl", help="Wildfly management URL to use with --deploy") +parser.add_argument("--deployUser", help="Deployment user to use with --deploy", default=None) +parser.add_argument("--deployPass", help="Deployment password to use with --deploy", default=None) serverUp = None @@ -120,4 +124,4 @@ def getAuth(): # Read the deploy url file and return the url def getUrl(): return getArgs().deployUrl - + diff --git a/scripts/GenerateBuildTestAndStagingReport.py b/scripts/GenerateBuildTestAndStagingReport.py index 25a61ed601..e7052e2095 100644 --- a/scripts/GenerateBuildTestAndStagingReport.py +++ b/scripts/GenerateBuildTestAndStagingReport.py @@ -1,10 +1,8 @@ from BuildDemos import demos -from BuildArchetypes import archetypes, getDeploymentContext import argparse, requests, json, subprocess, re, pickle parser = argparse.ArgumentParser() parser.add_argument("version", type=str, help="Vaadin version that was just built") -parser.add_argument("deployUrl", type=str, help="Base url of the deployment server") parser.add_argument("teamcityUser", type=str, help="Teamcity username to use") parser.add_argument("teamcityPassword", type=str, help="Password for given teamcity username") @@ -13,9 +11,7 @@ parser.add_argument("teamcityUrl", type=str, help="Address to the teamcity serve parser.add_argument("buildTypeId", type=str, help="The ID of this build step") parser.add_argument("buildId", type=str, help="ID of the build to generate this report for") -parser.add_argument("frameworkRepoUrl", type=str, help="URL to the framework staging repository") -parser.add_argument("archetypeRepoUrl", type=str, help="URL to the archetype staging repository") -parser.add_argument("pluginRepoUrl", type=str, help="URL to the plugin staging repository") +parser.add_argument("stagingRepoUrl", type=str, help="URL to the staging repository") args = parser.parse_args() buildResultUrl = "http://{}/viewLog.html?buildId={}&tab=buildResultsDiv&buildTypeId={}".format(args.teamcityUrl, args.buildId, args.buildTypeId) @@ -56,22 +52,14 @@ def getTestStatusHtml(): else: return createTableRow(traffic_light.format(color="red"), "Test status: there are " + str(test_failures_json["count"]) + " failing tests, check the build report".format(buildResultUrl)) -def getDemoValidationStatusHtml(): - status = pickle.load(open("result/demo_validation_status.pickle", "rb")) - if status["error"]: - return createTableRow(traffic_light.format(color="red"), getHtmlList(status["messages"])) - else: - return createTableRow(traffic_light.format(color="green"), getHtmlList(status["messages"])) - -def getDemoLinksHtml(): - demos_html = "Try demos" - link_list = list(map(lambda demo: "{demoName}".format(url=args.deployUrl, demoName=demo, version=args.version), demos)) - return demos_html + getHtmlList(link_list) - -def getArchetypeLinksHtml(): - archetypes_html = "Try archetypes" - link_list = list(map(lambda archetype: "{archetypeName}".format(url=args.deployUrl, archetypeName=archetype, context=getDeploymentContext(archetype, args.version)), archetypes)) - return archetypes_html + getHtmlList(link_list) +def getApiDiffHtml(): + apidiff_html = "Check API diff" + modules = [ + "client", "client-compiler", + "server", "shared", "widgets" + ] + link_list = list(map(lambda module: "{}".format(args.teamcityUrl, args.buildTypeId, args.buildId, module, module), modules)) + return apidiff_html + getHtmlList(link_list) def getDirs(url): page = requests.get(url) @@ -104,11 +92,11 @@ def checkStagingContents(url, allowedArtifacts): allowedDirs = getAllowedArtifactPaths(allowedArtifacts) return set(dirs) == set(allowedDirs) -def getStagingContentsHtml(repoUrl, allowedArtifacts, name): +def getStagingContentsHtml(repoUrl, allowedArtifacts): if checkStagingContents(repoUrl, allowedArtifacts): - return createTableRow(traffic_light.format(color="green"), "No extra artifacts found in the {} staging repository. Link to the repository.".format(name, repoUrl)) + return createTableRow(traffic_light.format(color="green"), "Expected artifacts found in the staging repository. Link to the repository.".format(repoUrl)) else: - return createTableRow(traffic_light.format(color="red"), "Extra artifacts found in the {} staging repository. Link to the repository.".format(name, repoUrl)) + return createTableRow(traffic_light.format(color="red"), "Extraneous or missing artifacts in the staging repository. Link to the repository.".format(repoUrl)) def completeArtifactName(artifactId, version): return 'com/vaadin/' + artifactId + '/' + version @@ -117,9 +105,7 @@ def completeArtifactNames(artifactIds, version): return list(map(lambda x: completeArtifactName(x, version), artifactIds)) -allowedPluginArtifacts = completeArtifactNames([ 'vaadin-maven-plugin' ], args.version) -allowedArchetypeArtifacts = completeArtifactNames([ 'vaadin-archetypes', 'vaadin-archetype-application', 'vaadin-archetype-application-multimodule', 'vaadin-archetype-application-example', 'vaadin-archetype-widget', 'vaadin-archetype-liferay-portlet' ], args.version) -allowedFrameworkArtifacts = completeArtifactNames([ 'vaadin-root', 'vaadin-bom', 'vaadin-shared', 'vaadin-server', 'vaadin-client', 'vaadin-client-compiler', 'vaadin-client-compiled', 'vaadin-push', 'vaadin-themes', 'vaadin-widgets' ], args.version) +allowedArtifacts = completeArtifactNames([ 'vaadin-maven-plugin', 'vaadin-archetypes', 'vaadin-archetype-application', 'vaadin-archetype-application-multimodule', 'vaadin-archetype-application-example', 'vaadin-archetype-widget', 'vaadin-archetype-liferay-portlet', 'vaadin-root', 'vaadin-shared', 'vaadin-server', 'vaadin-client', 'vaadin-client-compiler', 'vaadin-client-compiled', 'vaadin-push', 'vaadin-themes', 'vaadin-widgets', 'vaadin-testbench-api', 'vaadin-bom' ], args.version) content = "" traffic_light = "" @@ -144,30 +130,24 @@ except subprocess.CalledProcessError as e: raise e # check staging repositories don't contain extra artifacts -content += getStagingContentsHtml(args.frameworkRepoUrl, allowedFrameworkArtifacts, "framework") -content += getStagingContentsHtml(args.archetypeRepoUrl, allowedArchetypeArtifacts, "archetype") -content += getStagingContentsHtml(args.pluginRepoUrl, allowedPluginArtifacts, "plugin") +content += getStagingContentsHtml(args.stagingRepoUrl, allowedArtifacts) content += createTableRow("", "

Manual checks before publishing

") -# try demos -content += createTableRow("", getDemoLinksHtml()) -content += createTableRow("", getArchetypeLinksHtml()) + +content += createTableRow("", "If changing between branches or phases (stable, maintenance, alpha, beta, rc), check the phase change checklist") # link to release notes content += createTableRow("", "Check release notes".format(args.teamcityUrl, args.buildTypeId, args.buildId)) + # link to api diff -content += createTableRow("", "API Diff".format(args.teamcityUrl, args.buildTypeId, args.buildId)) +content += createTableRow("", getApiDiffHtml()) + +# check that GitHub issues are in the correct status +content += createTableRow("", "Check that closed GitHub issues have correct milestone") -# check that trac tickets are in the correct status -content += createTableRow("", "Check that trac tickets have correct status") -# pending release tickets without milestone -content += createTableRow("", "Pending-release tickets without milestone") +content += createTableRow("", "Check demos from docker image:
zcat < demo-validation-{version}.tgz |docker load && docker run --rm -p 8080:8080 demo-validation:{version} || docker rmi demo-validation:{version}
".format(version=args.version)) content += createTableRow("", "

Preparations before publishing

") -# close trac milestone -content += createTableRow("", "Close Trac Milestone (deselect \"retarget tickets\")".format(version=args.version)) -# verify pending release tickets still have milestone -content += createTableRow("", "Verify pending release tickets still have milestone {version}".format(version=args.version)) # link to build dependencies tab to initiate publish step content += createTableRow("", "

Start Publish Release from dependencies tab

".format(args.teamcityUrl, args.buildId, args.buildTypeId)) diff --git a/scripts/GeneratePostPublishReport.py b/scripts/GeneratePostPublishReport.py index 782729bc92..4b11daf754 100644 --- a/scripts/GeneratePostPublishReport.py +++ b/scripts/GeneratePostPublishReport.py @@ -13,42 +13,47 @@ buildResultUrl = "http://{}/viewLog.html?buildId={}&tab=buildResultsDiv&buildTyp (major, minor, maintenance) = args.version.split(".", 2) prerelease = "." in maintenance +def checkUrlStatus(url): + r = requests.get(url) + return r.status_code == 200 + def createTableRow(*columns): - html = "" - for column in columns: - html += "" - return html + "" + html = "" + for column in columns: + html += "" + return html + "" traffic_light = "" -content = "
" + column + "
" + column + "
" +def getTrafficLight(b): + return traffic_light.format(color="green") if b else traffic_light.format(color="red") -# Batch update tickets in trac -content += createTableRow("", "Batch update tickets in Trac") +def checkArchetypeMetaData(archetypeMetadataUrl, version): + archetype_metadata_request = requests.get(archetypeMetadataUrl) + if archetype_metadata_request.status_code != 200: + return createTableRow(traffic_light.format(color="black"), "Check archetype metadata: unable to retrieve metadata from {url}".format(url=archetypeMetadataUrl)) + else: + if "version=\"{version}\"".format(version=version) in archetype_metadata_request.content: + return createTableRow(traffic_light.format(color="green"), "Check archetype metadata: metadata is correct for {url}".format(url=archetypeMetadataUrl)) + else: + return createTableRow(traffic_light.format(color="red"), "Check archetype metadata: metadata seems to be incorrect for {url}".format(url=archetypeMetadataUrl)) -# Create milestone for next release -content += createTableRow("", "Create milestone for next release") +content = "
" + +tagOk = checkUrlStatus("https://github.com/vaadin/framework/releases/tag/{ver}".format(ver=args.version)) +content += createTableRow(getTrafficLight(tagOk), "Tag ok on github.com") # Tag and pin build content += createTableRow("", "Tag and pin build".format(url=buildResultUrl)) # Traffic light for archetype metadata -archetypeMetadataUrl = "" -if not prerelease: - archetypeMetadataUrl = "http://vaadin.com/download/maven-archetypes.xml" -else: - archetypeMetadataUrl ="http://vaadin.com/download/maven-archetypes-prerelease.xml" - -archetype_metadata_request = requests.get(archetypeMetadataUrl) -if archetype_metadata_request.status_code != 200: - content += createTableRow(traffic_light.format(color="black"), "Check archetype metadata: unable to retrieve metadata".format(url=archetypeMetadataUrl)) -else: - if "version=\"{version}\"".format(version=args.version) in archetype_metadata_request.content: - content += createTableRow(traffic_light.format(color="green"), "Check archetype metadata: metadata is correct".format(url=archetypeMetadataUrl)) - else: - content += createTableRow(traffic_light.format(color="red"), "Check archetype metadata: metadata is incorrect".format(url=archetypeMetadataUrl)) - -# TODO GitHub milestones +content += checkArchetypeMetaData("http://vaadin.com/download/eclipse-maven-archetypes.xml", args.version) +if prerelease: + content += checkArchetypeMetaData("http://vaadin.com/download/maven-archetypes-prerelease.xml", args.version) +content += createTableRow("", "Optionally check that old Eclipse metadata still refers to Vaadin 7") +content += createTableRow("", "Note that archetype metadata checks do not verify that the relevant sections are not commented out when changing from pre-release to stable and back!") + +content += createTableRow("", "Build and deploy new sampler if necessary") # Inform marketing and PO content += createTableRow("", "Inform marketing and PO about the release") @@ -57,9 +62,9 @@ content += createTableRow("", "Inform marketing and PO about the release") content += createTableRow("", "Update vaadin.version.latest and vaadin.version.next parameters in TeamCity".format(args.teamcityUrl, args.projectId)) # Link to GH release notes -content += createTableRow("", "Write release notes in GH") +content += createTableRow("", "Finish and publish release notes in GH") content += "
" with open("result/report.html", "wb") as f: - f.write(content) + f.write(content) diff --git a/scripts/GeneratePublishReportPart1.py b/scripts/GeneratePublishReportPart1.py index df9405bb51..272cfd8e8d 100644 --- a/scripts/GeneratePublishReportPart1.py +++ b/scripts/GeneratePublishReportPart1.py @@ -10,11 +10,11 @@ from os.path import exists, isdir from os import makedirs metadataChecks = { - 'https://vaadin.com/download/LATEST7': '^7\..*', - 'https://vaadin.com/download/VERSIONS_7': '^7\..*', - 'https://vaadin.com/download/release/7.7/LATEST': '^7\..*', + 'https://vaadin.com/download/LATEST8': '^8\..*', 'https://vaadin.com/download/LATEST': '^6\..*', - 'https://vaadin.com/download/PRERELEASES': '^8\..*' + 'https://vaadin.com/download/LATEST7': '^{ver}', + 'https://vaadin.com/download/VERSIONS_7': '^{ver}', + 'https://vaadin.com/download/release/7.7/LATEST':'^{ver}' } parser = argparse.ArgumentParser(description="Post-publish report generator") @@ -36,10 +36,9 @@ elif not isdir(resultPath): print("Result path is not a directory.") sys.exit(1) +# Latest 7 checks based on current version number. (major, minor, maintenance) = args.version.split(".", 2) -prerelease = "." in maintenance -if prerelease: - maintenance = maintenance.split('.')[0] +prerelease = ',' in maintenance def checkUrlContents(url, regexp): r = requests.get(url) @@ -51,9 +50,11 @@ def checkUrlStatus(url): metadataOk = True for url in metadataChecks: - metadataOk = metadataOk and checkUrlContents(url, metadataChecks[url].format(ver=args.version)) + pattern = metadataChecks[url].format(ver=args.version) + print("Checking: %s with pattern %s" % (url, pattern)) + metadataOk = metadataOk and checkUrlContents(url, pattern) -tagOk = checkUrlStatus("https://github.com/vaadin/vaadin/releases/tag/{ver}".format(ver=args.version)) +tagOk = checkUrlStatus("https://github.com/vaadin/framework/releases/tag/{ver}".format(ver=args.version)) if not prerelease: downloadPageOk = checkUrlStatus("https://vaadin.com/download/release/{maj}.{min}/{ver}/".format(maj=major, min=minor, ver=args.version)) @@ -65,30 +66,34 @@ content = """ - -""".format(metadataOk=getTrafficLight(metadataOk), tagOk=getTrafficLight(tagOk), downloadPageOk=getTrafficLight(downloadPageOk)) +""".format(metadataOk=getTrafficLight(metadataOk), downloadPageOk=getTrafficLight(downloadPageOk)) mavenUrl = "" if not prerelease: - mavenUrl = "http://repo1.maven.org/maven2/com/vaadin/vaadin-server/{ver}".format(ver=args.version) + mavenUrl = "http://repo1.maven.org/maven2/com/vaadin/vaadin-server/" content += "".format(ver=args.version, mvnUrl=mavenUrl) else: - mavenUrl = "http://maven.vaadin.com/vaadin-prereleases/com/vaadin/vaadin-server/{ver}".format(ver=args.version) + mavenUrl = "http://maven.vaadin.com/vaadin-prereleases/com/vaadin/vaadin-server/" content += "".format(ver=args.version, mvnUrl=mavenUrl) -content += "".format(version=args.version) +content += "" -if not prerelease: - content += '' - -content += """ - -""".format(version=args.version) +#content += """ +# +#""".format(version=args.version) if not prerelease: content += '' +content += "" + +# close GitHub milestone +content += "" + +# release notes +content += "" + content += """
{metadataOk}Metadata ok on vaadin.com
{tagOk}Tag ok on github.com
{downloadPageOk}Download folder on vaadin.com contains the version
Check {ver} is published to maven.org (might take a while)
Check {ver} is published as prerelease to maven.vaadin.com
Add version {version} to Trac
Create milestone for next version in GitHub
Set latest version to default
Verify uploaded to test.vaadin.com
Verify uploaded to test.vaadin.com
Verify API version list updated
Run the generated tag_repositories.sh script
Close GitHub Milestone and create one for next version
Prepare release notes in GH

Start Post-Publish Release from dependencies tab

-- 2.39.5