From 8a32f85bbe566480bcc46b1dbd03b88e8d442d6e Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Wed, 29 Mar 2017 13:23:25 +0000 Subject: [PATCH 01/22] Removed deployer, updated tempest tests, added job script, rearranged directory tree --- scripts/bin/get-results-html.sh | 10 + scripts/bin/get-tests.sh | 47 ++ scripts/bin/parallel-test-runner.sh | 141 +++++ scripts/bin/run-all-tests.sh | 108 ++++ scripts/bin/run.sh | 60 ++ scripts/bin/subunit2html.py | 727 +++++++++++++++++++++++++ scripts/bin/utils.sh | 105 ++++ scripts/job/.neutron-ovs-config.sh.swp | Bin 0 -> 12288 bytes scripts/job/common-job-script.sh | 177 ++++++ scripts/job/neutron-ovs-config.sh | 15 + scripts/job/ovs-config.sh | 17 + scripts/logs/collect_logs.sh | 27 + scripts/logs/utils.sh | 191 +++++++ scripts/logs/wsmancmd.py | 171 ++++++ templates/bundle.template | 79 +++ tests/neutron-ovs/excluded_tests.txt | 19 + tests/neutron-ovs/included_tests.txt | 40 ++ tests/neutron-ovs/isolated_tests.txt | 9 + tests/ovs/excluded_tests.txt | 0 tests/ovs/included_tests.txt | 11 + tests/ovs/isolated_tests.txt | 0 21 files changed, 1954 insertions(+) create mode 100755 scripts/bin/get-results-html.sh create mode 100755 scripts/bin/get-tests.sh create mode 100755 scripts/bin/parallel-test-runner.sh create mode 100755 scripts/bin/run-all-tests.sh create mode 100755 scripts/bin/run.sh create mode 100755 scripts/bin/subunit2html.py create mode 100755 scripts/bin/utils.sh create mode 100644 scripts/job/.neutron-ovs-config.sh.swp create mode 100755 scripts/job/common-job-script.sh create mode 100644 scripts/job/neutron-ovs-config.sh create mode 100644 scripts/job/ovs-config.sh create mode 100755 scripts/logs/collect_logs.sh create mode 100644 scripts/logs/utils.sh create mode 100755 scripts/logs/wsmancmd.py create mode 100644 templates/bundle.template create mode 100644 tests/neutron-ovs/excluded_tests.txt create mode 100644 tests/neutron-ovs/included_tests.txt create mode 100644 tests/neutron-ovs/isolated_tests.txt create mode 100644 tests/ovs/excluded_tests.txt create mode 100644 tests/ovs/included_tests.txt create mode 100644 tests/ovs/isolated_tests.txt diff --git a/scripts/bin/get-results-html.sh b/scripts/bin/get-results-html.sh new file mode 100755 index 0000000..784f961 --- /dev/null +++ b/scripts/bin/get-results-html.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +log_file=$1 +results_html_file=$2 + +f=$(tempfile) +cat $log_file | subunit-2to1 > $f +python subunit2html.py $f $results_html_file +rm $f + diff --git a/scripts/bin/get-tests.sh b/scripts/bin/get-tests.sh new file mode 100755 index 0000000..66c09b5 --- /dev/null +++ b/scripts/bin/get-tests.sh @@ -0,0 +1,47 @@ +#!/bin/bash +set -e + +array_to_regex() +{ + local ar=(${@}) + local regex="" + + for s in "${ar[@]}" + do + if [ "$regex" ]; then + regex+="\\|" + fi + regex+="^"$(echo $s | sed -e 's/[]\/$*.^|[]/\\&/g') + done + echo $regex +} + +tests_dir=$1 + +BASEDIR=$(dirname $0) + +include_tests_file=$2 +exclude_tests_file=$3 +isolated_tests_file=$4 + +include_tests=(`awk 'NF && $1!~/^#/' $include_tests_file`) + +if [ -f "$exclude_tests_file" ]; then + exclude_tests=(`awk 'NF && $1!~/^#/' $exclude_tests_file`) +fi + +if [ -f "$isolated_tests_file" ]; then + isolated_tests=(`awk 'NF && $1!~/^#/' $isolated_tests_file`) +fi + +exclude_tests=( ${exclude_tests[@]} ${isolated_tests[@]} ) + +include_regex=$(array_to_regex ${include_tests[@]}) +exclude_regex=$(array_to_regex ${exclude_tests[@]}) + +if [ ! "$exclude_regex" ]; then + exclude_regex='^$' +fi + +cd $tests_dir +testr list-tests | grep $include_regex | grep -v $exclude_regex diff --git a/scripts/bin/parallel-test-runner.sh b/scripts/bin/parallel-test-runner.sh new file mode 100755 index 0000000..945b2c8 --- /dev/null +++ b/scripts/bin/parallel-test-runner.sh @@ -0,0 +1,141 @@ +#!/bin/bash + +# Make sure we kill the entire process tree when exiting +trap 'kill 0' SIGINT SIGTERM + +function run_test_retry(){ + local tests_file=$1 + local tmp_log_file=$2 + local i=0 + local exit_code=0 + + pushd . > /dev/null + cd $tests_dir + + while : ; do + > $tmp_log_file + testr run --subunit --load-list=$tests_file > $tmp_log_file 2>&1 + subunit-stats $tmp_log_file > /dev/null + exit_code=$? + ((i++)) + ( [ $exit_code -eq 0 ] || [ $i -ge $max_attempts ] ) && break + echo "Test $tests_file failed. Retrying count: $i" + done + + popd > /dev/null + + echo $exit_code +} + +function get_tests_range() { + local i=$1 + if [ $i -lt ${#tests[@]} ]; then + local test=${tests[$i]} + local test_class=${test%.*} + local j=$i + if [ $run_isolated -eq 0 ]; then + for test in ${tests[@]:$((i+1))}; do + local test_class_match=${test%.*} + if [ "$test_class" == "$test_class_match" ]; then + ((j++)) + else + break + fi + done + fi + + echo $i $j + fi +} + +function get_next_test_idx_range() { + ( + flock -x 200 + local test_idx=$(<$cur_test_idx_file) + local test_idx_range=( $(get_tests_range $test_idx) ) + + if [ ${#test_idx_range[@]} -gt 0 ]; then + test_idx=${test_idx_range[1]} + ((test_idx++)) + echo $test_idx > $cur_test_idx_file + echo ${test_idx_range[@]} + fi + ) 200>$lock_file_1 +} + +function parallel_test_runner() { + local runner_id=$1 + while : ; do + local test_idx_range=( $(get_next_test_idx_range) ) + + if [ ${#test_idx_range[@]} -eq 0 ]; then + break + fi + + local range_start=${test_idx_range[0]} + local range_end=${test_idx_range[1]} + local tmp_tests_file=$(tempfile) + local l=$((range_end-range_start+1)) + + for test in ${tests[@]:$range_start:$l}; do + echo $test >> $tmp_tests_file + done + + local tmp_log_file="$tmp_log_file_base"_"$range_start" + + echo "Test runner $runner_id is starting tests from $((range_start+1)) to $((range_end+1)) out of ${#tests[@]}:" + cat $tmp_tests_file + echo + + local test_exit_code=$(run_test_retry $tmp_tests_file $tmp_log_file) + rm $tmp_tests_file + + echo "Test runner $runner_id finished tests from $((range_start+1)) to $((range_end+1)) out of ${#tests[@]} with exit code: $test_exit_code" + done +} + + +tests_file=$1 +tests_dir=$2 +log_file=$3 +max_parallel_tests=${4:-10} +max_attempts=${5:-5} +run_isolated=${6:-0} + +tests=(`awk '{print}' $tests_file`) + +cur_test_idx_file=$(tempfile) +echo 0 > $cur_test_idx_file + +lock_file_1=$(tempfile) +tmp_log_file_base=$(tempfile) + +pids=() +for i in $(seq 1 $max_parallel_tests); do + parallel_test_runner $i & + pids+=("$!") +done + +for pid in ${pids[@]}; do + wait $pid +done + +rm $cur_test_idx_file + +> $log_file +for i in $(seq 0 $((${#tests[@]}-1))); do + tmp_log_file="$tmp_log_file_base"_"$i" + if [ -f "$tmp_log_file" ]; then + cat $tmp_log_file >> $log_file + rm $tmp_log_file + fi +done + +rm $tmp_log_file_base +rm $lock_file_1 + +echo "Test execution completed in $SECONDS seconds." + +subunit-stats $log_file > /dev/null +exit $? + diff --git a/scripts/bin/run-all-tests.sh b/scripts/bin/run-all-tests.sh new file mode 100755 index 0000000..f5edd72 --- /dev/null +++ b/scripts/bin/run-all-tests.sh @@ -0,0 +1,108 @@ +#!/bin/bash + +function help() { + echo "Required parameters:" + echo " --include-file: the tempest test groups to be executed" + echo " --exclude-file: tempest tests that have to be excluded" + echo " --tests-dir: tempest execution folder" + echo "Optional parameters:" + echo " --isolated-file: tempest tests that require to be executed isolated" + echo " --parallel-tests: number of tempest tests to run in parallel (DEFAULT: 4)" + echo " --max-attempts: number of retries if a test fails (DEFAULT: 2)" + echo " --log-file: name of the tempest run log file (including full path)" + echo " --results-html-file: name of the html results file (including full path)" +} + +while [ $# -gt 0 ] +do + case $1 in + --include-file) + INCLUDE_FILE=$2 + shift;; + --exclude-file) + EXCLUDE_FILE=$2 + shift;; + --isolated-file) + ISOLATED_FILE=$2 + shift;; + --tests-dir) + TESTS_DIR=$2 + shift;; + --parallel-tests) + PARALLEL_TESTS=$2 + shift;; + --max-attempts) + MAX_ATTEMPTS=$2 + shift;; + --log-file) + LOG_FILE=$2 + shift;; + --results-html-file) + RESULTS_HTML_FILE=$2 + shift;; + *) + echo "no such option" + help + esac + shift +done + +if [ -z "$INCLUDE_FILE" ]; then echo "tempest include file must be provided"; exit 1; fi +if [ -z "$EXCLUDE_FILE" ]; then echo "tempest exclude file must be provided"; exit 1; fi +if [ -z "$TESTS_DIR" ]; then echo "tempest execution folder must be provided"; exit 1; fi +if [ -z "$PARALLEL_TESTS" ]; then PARALLEL_TESTS=4; fi +if [ -z "$MAX_ATTEMPTS" ]; then MAX_ATTEMPTS=2; fi +if [ -z "$LOG_FILE" ]; then LOG_FILE="/home/ubuntu/tempest/subunit-output.log"; fi +if [ -z "$RESULTS_HTML_FILE" ]; then RESULTS_HTML_FILE="/home/ubuntu/tempest/results.html"; fi + +BASEDIR=$(dirname $0) + +pushd $BASEDIR + +. $BASEDIR/utils.sh + +TESTS_FILE=$(tempfile) + +. $TESTS_DIR/.tox/venv/bin/activate + +$BASEDIR/get-tests.sh $TESTS_DIR $INCLUDE_FILE $EXCLUDE_FILE $ISOLATED_FILE > $TESTS_FILE + +echo "Running tests from: $TESTS_FILE" + +if [ ! -d "$TESTS_DIR/.testrepository" ]; then + push_dir + cd $TESTS_DIR + echo "Initializing testr" + testr init + pop_dir +fi + +$BASEDIR/parallel-test-runner.sh $TESTS_FILE $TESTS_DIR $LOG_FILE \ + $PARALLEL_TESTS $MAX_ATTEMPTS || true + +if [ -f "$ISOLATED_FILE" ]; then + echo "Running isolated tests from: $ISOLATED_FILE" + log_tmp=$(tempfile) + $BASEDIR/parallel-test-runner.sh $ISOLATED_FILE $TESTS_DIR $log_tmp \ + $PARALLEL_TESTS $MAX_ATTEMPTS 1 || true + + cat $log_tmp >> $LOG_FILE + rm $log_tmp +fi + +rm $TESTS_FILE + +deactivate + +echo "Generating HTML report..." +$BASEDIR/get-results-html.sh $LOG_FILE $RESULTS_HTML_FILE + +subunit-stats $LOG_FILE > /dev/null +exit_code=$? + +echo "Total execution time: $SECONDS seconds." + +popd + +exit $exit_code + diff --git a/scripts/bin/run.sh b/scripts/bin/run.sh new file mode 100755 index 0000000..4ad8ea4 --- /dev/null +++ b/scripts/bin/run.sh @@ -0,0 +1,60 @@ +#!/bin/bash +set -e + +BASEDIR=$(dirname $0) + +. $BASEDIR/utils.sh + + + + + + +tempest_dir="/opt/stack/tempest" +test_config_dir"$tempest_dir/config +test_logs_dir="$tempest_dir/logs" +subunit_log_file="subunit.log" +html_results_file="results.html" +max_parallel_tests=4 +max_attempts=3 + +project=$(basename $ZUUL_PROJECT) +filters_location="/home/ubuntu/$project-ci/devstack/tests" +include_file="$filters_location/included_tests.txt" +exclude_file="$filters_location/excluded_tests.txt" +isolated_file="$filters_location/isolated_tests.txt" + +$log_dir="/home/ubuntu/tempest" +if [ ! -d $log_dir ]; then mkdir -p $log_dir; fi + +$BASEDIR/run-all-tests.sh --tests-dir $tempest_dir \ + --parallel-tests $max_parallel_tests \ + --max-attempts $max_attempts \ + --log-file "$log_dir/$subunit_log_file" \ + --results-html-file "$log_dir/$html_results_file" \ + --include-file + -- exclude-file + --isolated-file + > $test_logs_dir/out.txt 2> $test_logs_dir/err.txt \ + || has_failed_tests=1 + +subunit-stats --no-passthrough "$log_dir/$subunit_log_file" || true + +<< 'TBD' + copy_devstack_config_files "$test_config_dir/devstack" + + for host_name in ${host_names[@]}; + do + exec_with_retry 15 2 get_win_host_config_files $host_name "$test_config_dir/$host_name" + exec_with_retry 5 0 get_win_system_info_log $host_name "$test_logs_dir/$host_name/systeminfo.log" + exec_with_retry 5 0 get_win_hotfixes_log $host_name "$test_logs_dir/$host_name/hotfixes.log" + exec_with_retry 15 2 get_win_host_log_files $host_name "$test_logs_dir/$host_name" + done + + echo "Removing symlinks from logs" + find "$test_logs_dir/" -type l -delete + echo "Compressing log files" + find "$test_logs_dir/" -name "*.log" -exec gzip {} \; +TBD + +exit $has_failed_tests diff --git a/scripts/bin/subunit2html.py b/scripts/bin/subunit2html.py new file mode 100755 index 0000000..96c289f --- /dev/null +++ b/scripts/bin/subunit2html.py @@ -0,0 +1,727 @@ +#!/usr/bin/python +""" +Utility to convert a subunit stream to an html results file. +Code is adapted from the pyunit Html test runner at +http://tungwaiyip.info/software/HTMLTestRunner.html + +Takes two arguments. First argument is path to subunit log file, second +argument is path of desired output file. Second argument is optional, +defaults to 'results.html'. + +Original HTMLTestRunner License: +------------------------------------------------------------------------ +Copyright (c) 2004-2007, Wai Yip Tung +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +* Neither the name Wai Yip Tung nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" + +import collections +import datetime +import io +import sys +import traceback +from xml.sax import saxutils + +import subunit +import testtools + +__version__ = '0.1' + + +class TemplateData(object): + """ + Define a HTML template for report customerization and generation. + + Overall structure of an HTML report + + HTML + +------------------------+ + | | + | | + | | + | STYLESHEET | + | +----------------+ | + | | | | + | +----------------+ | + | | + | | + | | + | | + | | + | HEADING | + | +----------------+ | + | | | | + | +----------------+ | + | | + | REPORT | + | +----------------+ | + | | | | + | +----------------+ | + | | + | ENDING | + | +----------------+ | + | | | | + | +----------------+ | + | | + | | + | | + +------------------------+ + """ + + STATUS = { + 0: 'pass', + 1: 'fail', + 2: 'error', + 3: 'skip', + } + + DEFAULT_TITLE = 'Unit Test Report' + DEFAULT_DESCRIPTION = '' + + # ------------------------------------------------------------------------ + # HTML Template + + HTML_TMPL = r""" + + + + %(title)s + + + %(stylesheet)s + + + + +%(heading)s +%(report)s +%(ending)s + + + +""" + # variables: (title, generator, stylesheet, heading, report, ending) + + # ------------------------------------------------------------------------ + # Stylesheet + # + # alternatively use a for external style sheet, e.g. + # + + STYLESHEET_TMPL = """ + +""" + + # ------------------------------------------------------------------------ + # Heading + # + + HEADING_TMPL = """
+

%(title)s

+%(parameters)s +

%(description)s

+
+ +""" # variables: (title, parameters, description) + + HEADING_ATTRIBUTE_TMPL = """ +

%(name)s: %(value)s

+""" # variables: (name, value) + + # ------------------------------------------------------------------------ + # Report + # + + REPORT_TMPL = """ +

Show +Summary +Failed +All +

+ ++++++++++ + + + + + + + + + + +%(test_list)s + + + + + + + + + + +
Test Group/Test caseCountPassFailErrorSkipView
Total%(count)s%(Pass)s%(fail)s%(error)s%(skip)s  
+""" # variables: (test_list, count, Pass, fail, error) + + REPORT_CLASS_TMPL = r""" + + %(desc)s + %(count)s + %(Pass)s + %(fail)s + %(error)s + %(skip)s + Detail + + +""" # variables: (style, desc, count, Pass, fail, error, cid) + + REPORT_TEST_WITH_OUTPUT_TMPL = r""" + +
%(desc)s
+ + + + + %(status)s + + + + + + +""" # variables: (tid, Class, style, desc, status) + + REPORT_TEST_NO_OUTPUT_TMPL = r""" + +
%(desc)s
+ %(status)s + +""" # variables: (tid, Class, style, desc, status) + + REPORT_TEST_OUTPUT_TMPL = r""" +%(id)s: %(output)s +""" # variables: (id, output) + + # ------------------------------------------------------------------------ + # ENDING + # + + ENDING_TMPL = """
 
""" + +# -------------------- The end of the Template class ------------------- + + +class ClassInfoWrapper(object): + def __init__(self, name, mod): + self.name = name + self.mod = mod + + def __repr__(self): + return "%s" % (self.name) + + +class HtmlOutput(testtools.TestResult): + """Output test results in html.""" + + def __init__(self, html_file='result.html'): + super(HtmlOutput, self).__init__() + self.success_count = 0 + self.failure_count = 0 + self.error_count = 0 + self.skip_count = 0 + self.result = [] + self.html_file = html_file + + def addSuccess(self, test): + self.success_count += 1 + output = test.shortDescription() + if output is None: + output = test.id() + self.result.append((0, test, output, '')) + + def addSkip(self, test, err): + output = test.shortDescription() + if output is None: + output = test.id() + self.skip_count += 1 + self.result.append((3, test, output, '')) + + def addError(self, test, err): + output = test.shortDescription() + if output is None: + output = test.id() + # Skipped tests are handled by SkipTest Exceptions. + #if err[0] == SkipTest: + # self.skip_count += 1 + # self.result.append((3, test, output, '')) + else: + self.error_count += 1 + _exc_str = self.formatErr(err) + self.result.append((2, test, output, _exc_str)) + + def addFailure(self, test, err): + print(test) + self.failure_count += 1 + _exc_str = self.formatErr(err) + output = test.shortDescription() + if output is None: + output = test.id() + self.result.append((1, test, output, _exc_str)) + + def formatErr(self, err): + exctype, value, tb = err + return ''.join(traceback.format_exception(exctype, value, tb)) + + def stopTestRun(self): + super(HtmlOutput, self).stopTestRun() + self.stopTime = datetime.datetime.now() + report_attrs = self._getReportAttributes() + generator = 'subunit2html %s' % __version__ + heading = self._generate_heading(report_attrs) + report = self._generate_report() + ending = self._generate_ending() + output = TemplateData.HTML_TMPL % dict( + title=saxutils.escape(TemplateData.DEFAULT_TITLE), + generator=generator, + stylesheet=TemplateData.STYLESHEET_TMPL, + heading=heading, + report=report, + ending=ending, + ) + if self.html_file: + with open(self.html_file, 'wb') as html_file: + html_file.write(output.encode('utf8')) + + def _getReportAttributes(self): + """Return report attributes as a list of (name, value).""" + status = [] + if self.success_count: + status.append('Pass %s' % self.success_count) + if self.failure_count: + status.append('Failure %s' % self.failure_count) + if self.error_count: + status.append('Error %s' % self.error_count) + if self.skip_count: + status.append('Skip %s' % self.skip_count) + if status: + status = ' '.join(status) + else: + status = 'none' + return [ + ('Status', status), + ] + + def _generate_heading(self, report_attrs): + a_lines = [] + for name, value in report_attrs: + line = TemplateData.HEADING_ATTRIBUTE_TMPL % dict( + name=saxutils.escape(name), + value=saxutils.escape(value), + ) + a_lines.append(line) + heading = TemplateData.HEADING_TMPL % dict( + title=saxutils.escape(TemplateData.DEFAULT_TITLE), + parameters=''.join(a_lines), + description=saxutils.escape(TemplateData.DEFAULT_DESCRIPTION), + ) + return heading + + def _generate_report(self): + rows = [] + sortedResult = self._sortResult(self.result) + for cid, (cls, cls_results) in enumerate(sortedResult): + # subtotal for a class + np = nf = ne = ns = 0 + for n, t, o, e in cls_results: + if n == 0: + np += 1 + elif n == 1: + nf += 1 + elif n == 2: + ne += 1 + else: + ns += 1 + + # format class description + if cls.mod == "__main__": + name = cls.name + else: + name = "%s" % (cls.name) + doc = cls.__doc__ and cls.__doc__.split("\n")[0] or "" + desc = doc and '%s: %s' % (name, doc) or name + + row = TemplateData.REPORT_CLASS_TMPL % dict( + style=(ne > 0 and 'errorClass' or nf > 0 + and 'failClass' or 'passClass'), + desc = desc, + count = np + nf + ne + ns, + Pass = np, + fail = nf, + error = ne, + skip = ns, + cid = 'c%s' % (cid + 1), + ) + rows.append(row) + + for tid, (n, t, o, e) in enumerate(cls_results): + self._generate_report_test(rows, cid, tid, n, t, o, e) + + report = TemplateData.REPORT_TMPL % dict( + test_list=''.join(rows), + count=str(self.success_count + self.failure_count + + self.error_count + self.skip_count), + Pass=str(self.success_count), + fail=str(self.failure_count), + error=str(self.error_count), + skip=str(self.skip_count), + ) + return report + + def _sortResult(self, result_list): + # unittest does not seems to run in any particular order. + # Here at least we want to group them together by class. + rmap = {} + classes = [] + for n, t, o, e in result_list: + if hasattr(t, '_tests'): + for inner_test in t._tests: + self._add_cls(rmap, classes, inner_test, + (n, inner_test, o, e)) + else: + self._add_cls(rmap, classes, t, (n, t, o, e)) + classort = lambda s: str(s) + sortedclasses = sorted(classes, key=classort) + r = [(cls, rmap[str(cls)]) for cls in sortedclasses] + return r + + def _add_cls(self, rmap, classes, test, data_tuple): + if hasattr(test, 'test'): + test = test.test + if test.__class__ == subunit.RemotedTestCase: + #print(test._RemotedTestCase__description.rsplit('.', 1)[0]) + cl = test._RemotedTestCase__description.rsplit('.', 1)[0] + mod = cl.rsplit('.', 1)[0] + cls = ClassInfoWrapper(cl, mod) + else: + cls = ClassInfoWrapper(str(test.__class__), str(test.__module__)) + if not str(cls) in rmap: + rmap[str(cls)] = [] + classes.append(cls) + rmap[str(cls)].append(data_tuple) + + def _generate_report_test(self, rows, cid, tid, n, t, o, e): + # e.g. 'pt1.1', 'ft1.1', etc + # ptx.x for passed/skipped tests and ftx.x for failed/errored tests. + has_output = bool(o or e) + tid = ((n == 0 or n == 3) and + 'p' or 'f') + 't%s.%s' % (cid + 1, tid + 1) + name = t.id().split('.')[-1] + # if shortDescription is not the function name, use it + if t.shortDescription().find(name) == -1: + doc = t.shortDescription() + else: + doc = None + desc = doc and ('%s: %s' % (name, doc)) or name + tmpl = (has_output and TemplateData.REPORT_TEST_WITH_OUTPUT_TMPL + or TemplateData.REPORT_TEST_NO_OUTPUT_TMPL) + + script = TemplateData.REPORT_TEST_OUTPUT_TMPL % dict( + id=tid, + output=saxutils.escape(o + e), + ) + + row = tmpl % dict( + tid=tid, + Class=((n == 0 or n == 3) and 'hiddenRow' or 'none'), + style=(n == 2 and 'errorCase' or + (n == 1 and 'failCase' or 'none')), + desc=desc, + script=script, + status=TemplateData.STATUS[n], + ) + rows.append(row) + if not has_output: + return + + def _generate_ending(self): + return TemplateData.ENDING_TMPL + + def startTestRun(self): + super(HtmlOutput, self).startTestRun() + + +class FileAccumulator(testtools.StreamResult): + + def __init__(self): + super(FileAccumulator, self).__init__() + self.route_codes = collections.defaultdict(io.BytesIO) + + def status(self, **kwargs): + if kwargs.get('file_name') != 'stdout': + return + file_bytes = kwargs.get('file_bytes') + if not file_bytes: + return + route_code = kwargs.get('route_code') + stream = self.route_codes[route_code] + stream.write(file_bytes) + + +def main(): + if len(sys.argv) < 2: + print("Need at least one argument: path to subunit log.") + exit(1) + subunit_file = sys.argv[1] + if len(sys.argv) > 2: + html_file = sys.argv[2] + else: + html_file = 'results.html' + + html_result = HtmlOutput(html_file) + stream = open(subunit_file, 'rb') + + # Feed the subunit stream through both a V1 and V2 parser. + # Depends on having the v2 capable libraries installed. + # First V2. + # Non-v2 content and captured non-test output will be presented as file + # segments called stdout. + suite = subunit.ByteStreamToStreamResult(stream, non_subunit_name='stdout') + # The HTML output code is in legacy mode. + result = testtools.StreamToExtendedDecorator(html_result) + # Divert non-test output + accumulator = FileAccumulator() + result = testtools.StreamResultRouter(result) + result.add_rule(accumulator, 'test_id', test_id=None) + result.startTestRun() + suite.run(result) + # Now reprocess any found stdout content as V1 subunit + for bytes_io in accumulator.route_codes.values(): + bytes_io.seek(0) + suite = subunit.ProtocolTestCase(bytes_io) + suite.run(html_result) + result.stopTestRun() + + +if __name__ == '__main__': + main() diff --git a/scripts/bin/utils.sh b/scripts/bin/utils.sh new file mode 100755 index 0000000..0cedacd --- /dev/null +++ b/scripts/bin/utils.sh @@ -0,0 +1,105 @@ +#!/bin/bash +set -e + +BASEDIR=$(dirname $0) + +function push_dir() { + pushd . > /dev/null +} + +function pop_dir() { + popd > /dev/null +} + +function exec_with_retry () { + local max_retries=$1 + local interval=${2} + local cmd=${@:3} + + local counter=0 + while [ $counter -lt $max_retries ]; do + local exit_code=0 + eval $cmd || exit_code=$? + if [ $exit_code -eq 0 ]; then + return 0 + fi + let counter=counter+1 + + if [ -n "$interval" ]; then + sleep $interval + fi + done + return $exit_code +} + + +function copy_devstack_config_files() { + local dest_dir=$1 + + mkdir -p $dest_dir + + cp -r /etc/ceilometer $dest_dir + cp -r /etc/cinder $dest_dir + cp -r /etc/glance $dest_dir + cp -r /etc/heat $dest_dir + cp -r /etc/keystone $dest_dir + cp -r /etc/nova $dest_dir + cp -r /etc/neutron $dest_dir + cp -r /etc/swift $dest_dir + + mkdir $dest_dir/tempest + check_copy_dir $tempest_dir/etc $dest_dir/tempest +} + +function copy_devstack_log_files() { + local dest_dir=$1 + + mkdir -p $dest_dir + + cp -r /opt/stack/logs $dest_dir + cp -r /etc/cinder $dest_dir + cp -r /etc/glance $dest_dir + cp -r /etc/heat $dest_dir + cp -r /etc/keystone $dest_dir + cp -r /etc/nova $dest_dir + cp -r /etc/neutron $dest_dir + cp -r /etc/swift $dest_dir + + mkdir $dest_dir/tempest + check_copy_dir $tempest_dir/etc $dest_dir/tempest +} + +function copy_devstack_config_files() { + local dest_dir=$1 + + mkdir -p $dest_dir + + cp -r /etc/ceilometer $dest_dir + cp -r /etc/cinder $dest_dir + cp -r /etc/glance $dest_dir + cp -r /etc/heat $dest_dir + cp -r /etc/keystone $dest_dir + cp -r /etc/nova $dest_dir + cp -r /etc/neutron $dest_dir + cp -r /etc/swift $dest_dir + + mkdir $dest_dir/tempest + check_copy_dir $tempest_dir/etc $dest_dir/tempest +} + +function mount_windows_share() { + local host=$1 + local user=$2 + local pass=$3 + local domain=$4 + + mkdir -p /mnt/$host + sudo mount -t cifs //$host/C$ /mnt/ -o username=$user,password=$pass,domain=$domain +} + +function umount_windows_share(){ + local host=$1 + + sudo umount /mnt/$host +} + diff --git a/scripts/job/.neutron-ovs-config.sh.swp b/scripts/job/.neutron-ovs-config.sh.swp new file mode 100644 index 0000000000000000000000000000000000000000..0f64e250ece69aa6bf0f17197c5cf92269993b6e GIT binary patch literal 12288 zcmeI2&yUUCtVJETj@ozocYds&(EKHN6~7%48MQ;&GFn%30(IG zdHwS5AKtuwhy3+|knM6S&DMVL|JNCp=R)e4uGx;yF1Zw}4x?$#Pkq5)Elg0hP1DL* zDFR(8(HR|_waY;Atx+JiJ3T8~xe{w%Z>EJ%K>|qN1_bVsB#uIx_|SWB{OKppZcqbL zkN^@u0!RP}AOR$R1dsp{xUB?CcSL@M=HEMP;N!!5G@1_|aY6z}00|%gB!C2v01`j~ zNB{{S0VIF~ZXp3y5%T*TLO%Zhnuq`Y|M&O*KXCI0j8}IF`RyY@eu1%qaRlSZhlI>w z+=uZerbZ6TWV!YhSRt)A7)aBckErn>Zqy@hWfvDm75;cblRXb?o9ABTSAM=8z1EDYY2 zy5a1msc>|%fWw%E)iqbwF4}fn?)uHmHdh^2yIJg?`q8Z9%7~TVmEUOLkqaS_6Kh~F;K>Y`$X6_OhudA{(h2_irC!yryP(kb2*oodf` z2?qO4ZNXps61FN?U39GiF$`%qr)ff?gywTvhR2R_N|&y}PH8w)Qd-5750!+jBAQN= z%vF{NxYSu|c|s#s;f&5*1uCDqO1`A4p_0-lc6YH?rgcsxSx!Iutb7=gP!_st0kKhs zkd=*h!A3M7W<`@>Wl)wbv3(sU2B@avXOU-a);?xTBcr2T0K<5+I)k3!CuW`WZdYW^ zlghyru3}v=RC-qomEIK-C36+)ije}Ya24wcSFx^e73+$j0zAPL6J=WGWD?`_v#*m6 zV|rJ(igm?M*+0$EF03m?G2n`k0> $BUNDLE_LOCATION + +cat $BUNDLE_LOCATION + +$DEPLOYER_PATH/deployer.py --clouds-and-credentials $DEPLOYER_PATH/$CI_CREDS deploy --template $BUNDLE_LOCATION --max-unit-retries 10 --timeout 7200 --search-string $UUID +build_exit_code=$? + +source $WORKSPACE/nodes + +exec_with_retry 5 2 ssh -tt -o 'PasswordAuthentication=no' -o 'StrictHostKeyChecking=no' -o 'UserKnownHostsFile=/dev/null' -i $JUJU_SSH_KEY ubuntu@$DEVSTACK \ + "git clone https://github.com/cloudbase/common-ci.git /home/ubuntu/common-ci" +clone_exit_code=$? + +exec_with_retry 5 2 ssh -tt -o 'PasswordAuthentication=no' -o 'StrictHostKeyChecking=no' -o 'UserKnownHostsFile=/dev/null' -i $JUJU_SSH_KEY ubuntu@$DEVSTACK \ + "git -C /home/ubuntu/common-ci checkout charms" +checkout_exit_code=$? + + +if [[ $build_exit_code -eq 0 ]]; then + #run tempest + + exec_with_retry 5 2 ssh -tt -o 'PasswordAuthentication=no' -o 'StrictHostKeyChecking=no' -o 'UserKnownHostsFile=/dev/null' -i $JUJU_SSH_KEY ubuntu@$DEVSTACK \ + "mkdir -p /home/ubuntu/tempest" + ssh -tt -o 'PasswordAuthentication=no' -o 'StrictHostKeyChecking=no' -o 'UserKnownHostsFile=/dev/null' -i $JUJU_SSH_KEY ubuntu@$DEVSTACK \ + "/home/ubuntu/common-ci/scripts/bin/run-all-tests.sh --include-file /home/ubuntu/common-ci/tests/$project/included_tests.txt \ + --exclude-file /home/ubuntu/common-ci/tests/$project/excluded_tests.txt --isolated-file /home/ubuntu/common-ci/tests/$project/isolated_tests.txt \ + --tests-dir /opt/stack/tempest --parallel-tests 10 --max-attempts 2" + tests_exit_code=$? +fi + +######################### Collect logs ######################### +LOG_DIR="logs/${UUID}" +if [ $LOG_DIR ]; then + rm -rf $LOG_DIR +fi +mkdir -p "$LOG_DIR" + +ssh -tt -o 'PasswordAuthentication=no' -o 'StrictHostKeyChecking=no' -o 'UserKnownHostsFile=/dev/null' -i $JUJU_SSH_KEY ubuntu@$DEVSTACK \ + "sudo /home/ubuntu/common-ci/scripts/logs/collect-logs.sh" + +scp -o 'PasswordAuthentication=no' -o 'StrictHostKeyChecking=no' -o 'UserKnownHostsFile=/dev/null' -i $JUJU_SSH_KEY \ +ubuntu@$DEVSTACK:/home/ubuntu/aggregate.tar.gz $LOG_DIR/aggregate.tar.gz + +tar -zxf $LOG_DIR/aggregate.tar.gz -C $LOG_DIR/ +rm $LOG_DIR/aggregate.tar.gz + +source $WORKSPACE/common-ci/scripts/logs/utils.sh + +for hv in $(echo $HYPERV | tr "," "\n"); do + HV_LOGS=$LOG_DIR/hyperv-logs/$hv + HV_CONFS=$LOG_DIR/hyperv-config/$hv + mkdir -p $HV_LOGS + mkdir -p $HV_CONFS + + get_win_files $hv "\openstack\log" $HV_LOGS + get_win_files $hv "\openstack\etc" $HV_CONFS + get_win_files $hv "\juju\log" $HV_LOGS + + run_wsman_cmd $hv 'systeminfo' > $HV_LOGS/systeminfo.log + run_wsman_cmd $hv 'wmic qfe list' > $HV_LOGS/windows-hotfixes.log + run_wsman_cmd $hv 'c:\python27\scripts\pip freeze' > $HV_LOGS/pip-freeze.log + run_wsman_cmd $hv 'ipconfig /all' > $HV_LOGS/ipconfig.log + run_wsman_cmd $hv 'sc qc nova-compute' > $HV_LOGS/nova-compute-service.log + run_wsman_cmd $hv 'sc qc neutron-openvswitch-agent' > $HV_LOGS/neutron-openvswitch-agent-service.log + + run_wsman_ps $hv 'get-netadapter ^| Select-object *' > $HV_LOGS/get-netadapter.log + run_wsman_ps $hv 'get-vmswitch ^| Select-object *' > $HV_LOGS/get-vmswitch.log + run_wsman_ps $hv 'get-WmiObject win32_logicaldisk ^| Select-object *' > $HV_LOGS/disk-free.log + run_wsman_ps $hv 'get-netfirewallprofile ^| Select-Object *' > $HV_LOGS/firewall.log + + run_wsman_ps $hv 'get-process ^| Select-Object *' > $HV_LOGS/get-process.log + run_wsman_ps $hv 'get-service ^| Select-Object *' > $HV_LOGS/get-service.log +done + +wget http://10.20.1.3:8080/job/$JOB_NAME/$BUILD_ID/consoleText -O $LOG_DIR/console.log + +find $LOG_DIR -name "*.log" -exec gzip {} \; + +tar -zcf $LOG_DIR/aggregate.tar.gz $LOG_DIR + +if [ $project == "ovs" ]; then + if [ ! $UUID ]; then + exit 1 + fi + REMOTE_LOG_PATH="/srv/logs/ovs/tempest-run/$UUID" +elif [ $network_type == "ovs" ]; then + if [ ! $project ] || [ ! $ZUUL_CHANGE ] || [ ! $ZUUL_PATCHSET ]; then + exit 1 + fi + REMOTE_LOG_PATH="/srv/logs/${project}-ovs/$ZUUL_CHANGE/$ZUUL_PATCHSET" +else + if [ ! $project ] || [ ! $ZUUL_CHANGE ] || [ ! $ZUUL_PATCHSET ]; then + exit 1 + fi + REMOTE_LOG_PATH="/srv/logs/$project/$ZUUL_CHANGE/$ZUUL_PATCHSET" +fi + +# Copy logs to remote log server +echo "Creating logs destination folder" +ssh -tt -o 'PasswordAuthentication=no' -o 'StrictHostKeyChecking=no' -o 'UserKnownHostsFile=/dev/null' -i $LOGS_SSH_KEY logs@$LOGS_SERVER \ + "rm -r $REMOTE_LOG_PATH" +ssh -tt -o 'PasswordAuthentication=no' -o 'StrictHostKeyChecking=no' -o 'UserKnownHostsFile=/dev/null' -i $LOGS_SSH_KEY logs@$LOGS_SERVER \ + "mkdir -p $REMOTE_LOG_PATH" + #"if [ ! -d $REMOTE_LOG_PATH ]; then mkdir -p $REMOTE_LOG_PATH; else rm -r $REMOTE_LOG_PATH/*; fi" + +echo "Uploading logs" +scp -o "UserKnownHostsFile /dev/null" -o "StrictHostKeyChecking no" -i $LOGS_SSH_KEY $LOG_DIR/aggregate.tar.gz logs@$LOGS_SERVER:$REMOTE_LOG_PATH/aggregate.tar.gz + +echo "Extracting logs" +ssh -o "UserKnownHostsFile /dev/null" -o "StrictHostKeyChecking no" -i $LOGS_SSH_KEY logs@$LOGS_SERVER "tar -xvf $REMOTE_LOG_PATH/aggregate.tar.gz -C $REMOTE_LOG_PATH/ --strip 1" + +# Remove local logs +rm -rf $LOG_DIR +############################################## + +if [[ $build_exit_code -ne 0 ]]; then + echo "CI Error while deploying environment" + exit 1 +fi + +if [[ $clone_exit_code -ne 0 ]]; then + echo "CI Error while cloning the scripts repository" + exit 1 +fi + +if [[ $checkout_exit_code -ne 0 ]]; then + echo "CI Error while checking out the scripts repository" + exit 1 +fi + +if [[ $tests_exit_code -ne 0 ]]; then + echo "Tempest tests execution finished with a failure status" + exit 1 +fi + +if [ "$DEBUG" != "YES" ]; then + #destroy charms, services and used nodes. + $DEPLOYER_PATH/deployer.py --clouds-and-credentials $DEPLOYER_PATH/$CI_CREDS teardown --search-string $UUID +fi + +exit 0 diff --git a/scripts/job/neutron-ovs-config.sh b/scripts/job/neutron-ovs-config.sh new file mode 100644 index 0000000..b1d02e4 --- /dev/null +++ b/scripts/job/neutron-ovs-config.sh @@ -0,0 +1,15 @@ +CI_CREDS="neutron-ovs-creds.yaml" +test_signing="false" +data_port="00:07:43:13:97:c8 00:07:43:13:96:b8 00:07:43:13:a6:08 00:07:43:14:d2:e8 00:07:43:13:f1:48 00:07:43:13:f1:88 00:07:43:13:b3:88 00:07:43:13:b5:18 00:07:43:13:ea:78 00:07:43:13:f1:68 00:07:43:13:9b:f8 00:07:43:14:12:c8 00:07:43:14:12:78 00:07:43:13:f1:58 00:07:43:14:12:88 00:07:43:14:12:98 00:07:43:13:a0:f8 00:07:43:13:9a:78 00:07:43:14:18:18 00:07:43:13:a1:48 00:07:43:14:1f:38 00:07:43:14:1b:48 00:07:43:14:18:38 00:07:43:13:f4:b8 00:07:43:13:98:48 00:07:43:13:f4:f8 00:07:43:14:18:98 00:07:43:13:f1:28 00:07:43:14:1a:18" +external_port="00:07:43:13:97:c0 00:07:43:13:96:b0 00:07:43:13:a6:00 00:07:43:14:d2:e0 00:07:43:13:f1:40 00:07:43:13:f1:80 00:07:43:13:b3:80 00:07:43:13:b5:10 00:07:43:13:ea:70 00:07:43:13:f1:60 00:07:43:13:9b:f0 00:07:43:14:12:c0 00:07:43:14:12:70 00:07:43:13:f1:50 00:07:43:14:12:80 00:07:43:14:12:90 00:07:43:13:a0:f0 00:07:43:13:9a:70 00:07:43:14:18:10 00:07:43:13:a1:40 00:07:43:14:1f:30 00:07:43:14:1b:40 00:07:43:14:18:30 00:07:43:13:f4:b0 00:07:43:13:98:40 00:07:43:13:f4:f0 00:07:43:14:18:90 00:07:43:13:f1:20 00:07:43:14:1a:10" +prep_project="True" +os_data_network="10.21.2.0/23" +hyperv_cherry_picks="https://review.openstack.org/openstack/neutron|refs/changes/41/417141/2|master" +devstack_cherry_picks="https://git.openstack.org/openstack/tempest|refs/changes/49/383049/13|master,https://git.openstack.org/openstack/tempest|refs/changes/28/384528/9|master" +win_user="Administrator" +win_password="Passw0rd" +ovs_installer="http://10.20.1.14:8080/openvswitch-hyperv-2.6.1-certified.msi" +heat_image_url="http://10.20.1.14:8080/cirros-latest.vhdx" +test_image_url="http://10.20.1.14:8080/cirros-latest.vhdx" +scenario_img="cirros-latest.vhdx" +vmswitch_management="false" diff --git a/scripts/job/ovs-config.sh b/scripts/job/ovs-config.sh new file mode 100644 index 0000000..f5f10da --- /dev/null +++ b/scripts/job/ovs-config.sh @@ -0,0 +1,17 @@ +CI_CREDS="ovs-creds.yaml" +test_signing="true" +data_port="E4:1D:2D:22:A0:30 E4:1D:2D:22:A6:30 E4:1D:2D:22:A1:E0 24:8A:07:77:3D:00" +external_port="18:A9:05:58:F7:76 00:23:7D:D2:CF:02 00:23:7D:D2:D8:D2 00:23:7D:D2:D8:72" +zuul_branch="master" +prep_project="False" +os_data_network="10.12.3.0/24" +hyperv_cherry_picks="https://review.openstack.org/openstack/neutron|refs/changes/41/417141/2|master" +devstack_cherry_picks="https://git.openstack.org/openstack/tempest|refs/changes/49/383049/13|master,https://git.openstack.org/openstack/tempest|refs/changes/28/384528/9|master" +win_user="Administrator" +win_password="Passw0rd" +ovs_installer="http://10.20.1.14:8080/ovs/$UUID/OpenvSwitch.msi" +ovs_certificate="http://10.20.1.14:8080/ovs/$UUID/package.cer" +heat_image_url="http://10.20.1.14:8080/cirros-latest.vhdx" +test_image_url="http://10.20.1.14:8080/cirros-latest.vhdx" +scenario_img="cirros-latest.vhdx" +vmswitch_management="false" diff --git a/scripts/logs/collect_logs.sh b/scripts/logs/collect_logs.sh new file mode 100755 index 0000000..a620da6 --- /dev/null +++ b/scripts/logs/collect_logs.sh @@ -0,0 +1,27 @@ +#!/bin/bash +set +e + +BASEDIR=$(dirname $0) +DEVSTACK_LOGS="/opt/stack/logs/screen" +DEVSTACK_LOG_DIR="/opt/stack/logs" + +TEMPEST_LOGS="/home/ubuntu/tempest" + +LOG_DST="/home/ubuntu/aggregate" +LOG_DST_DEVSTACK="$LOG_DST/devstack-logs" +CONFIG_DST_DEVSTACK="$LOG_DST/devstack-config" + +TAR="tar" +GZIP="gzip -f" + +source $BASEDIR/utils.sh + +emit_info "Collecting devstack logs" +archive_devstack_logs +emit_info "Collecting devstack configs" +archive_devstack_configs +emit_info "Collecting tempest files" +archive_tempest_files + +# Archive everything +pushd $LOG_DST; tar -zcf "$LOG_DST.tar.gz" .; popd diff --git a/scripts/logs/utils.sh b/scripts/logs/utils.sh new file mode 100644 index 0000000..1f782ae --- /dev/null +++ b/scripts/logs/utils.sh @@ -0,0 +1,191 @@ +#!/bin/bash + +BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +LOG_DST="/home/ubuntu/aggregate" + +TAR="tar" +GZIP="gzip -f" + +function emit_error() { + echo "ERROR: $1" + exit 1 +} + +function emit_warning() { + echo "WARNING: $1" + return 0 +} + +function emit_info() { + echo "INFO: $1" + return 0 +} + +function run_wsman_cmd() { + local host=$1 + local cmd=$2 + $BASEDIR/wsmancmd.py -s -H $host -a certificate -c /home/ubuntu/.ssl/winrm_client_cert.pem -k /home/ubuntu/.ssl/winrm_client_cert.key "$cmd" +} + +function get_win_files() { + local host=$1 + local remote_dir=$2 + local local_dir=$3 + if [ ! -d "$local_dir" ];then + mkdir "$local_dir" + fi + smbclient "//$host/C\$" -c "prompt OFF; cd $remote_dir" -U "$win_user%$win_password" + if [ $? -ne 0 ];then + echo "Folder $remote_dir does not exists" + return 0 + fi + smbclient "//$host/C\$" -c "prompt OFF; recurse ON; lcd $local_dir; cd $remote_dir; mget *" -U "$win_user%$win_password" +} + +function run_wsman_ps() { + local host=$1 + local cmd=$2 + run_wsman_cmd $host "powershell -NonInteractive -ExecutionPolicy RemoteSigned -Command $cmd" +} + +function get_win_hotfixes() { + local host=$1 + run_wsman_cmd $host "wmic qfe list" +} + +function get_win_system_info() { + local host=$1 + run_wsman_cmd $host "systeminfo" +} + +function get_win_time() { + local host=$1 + # Seconds since EPOCH + host_time=`run_wsman_ps $host "[Math]::Truncate([double]::Parse((Get-Date (get-date).ToUniversalTime() -UFormat %s)))" 2>&1` + # Skip the newline + echo ${host_time::-1} +} + +function get_win_hotfixes_log() { + local win_host=$1 + local log_file=$2 + emit_info "Getting hotfixes details for host: $win_host" + get_win_hotfixes $win_host > $log_file +} + +function get_win_system_info_log() { + local win_host=$1 + local log_file=$2 + emit_info "Getting system info for host: $win_host" + get_win_system_info $win_host > $log_file +} + +function get_win_host_log_files() { + local host_name=$1 + local local_dir=$2 + get_win_files $host_name "$host_logs_dir" $local_dir +} + +function get_win_host_config_files() { + local host_name=$1 + local local_dir=$2 + mkdir -p $local_dir + + get_win_files $host_name $host_config_dir $local_dir +} + +function check_host_time() { + local host1=$1 + local host2=$2 + host1_time=`get_win_time $host1` + host2_time=`get_win_time $host2` + local_time=`date +%s` + + local delta1=$((local_time - host1_time)) + local delta2=$((local_time - host2_time)) + if [ ${delta1#-} -gt 120 ]; + then + emit_info "Host $host1 time offset compared to this host is too high: $delta" + return 1 + fi + if [ ${delta2#-} -gt 120 ]; + then + emit_info "Host $host2 time offset compared to this host is too high: $delta" + return 1 + fi + return 0 +} + +function archive_devstack_logs() { + local LOG_DST_DEVSTACK=${1:-$LOG_DST/devstack-logs} + local DEVSTACK_LOGS="/opt/stack/logs/screen" + + if [ ! -d "$LOG_DST_DEVSTACK" ] + then + mkdir -p "$LOG_DST_DEVSTACK" || emit_error "L30: Failed to create $LOG_DST_DEVSTACK" + fi + + for i in `ls -A $DEVSTACK_LOGS` + do + if [ -h "$DEVSTACK_LOGS/$i" ] + then + REAL=$(readlink "$DEVSTACK_LOGS/$i") + $GZIP -c "$REAL" > "$LOG_DST_DEVSTACK/$i.gz" || emit_warning "L38: Failed to archive devstack logs: $i" + fi + done + $GZIP -c /var/log/mysql/error.log > "$LOG_DST_DEVSTACK/mysql_error.log.gz" + $GZIP -c /var/log/cloud-init.log > "$LOG_DST_DEVSTACK/cloud-init.log.gz" + $GZIP -c /var/log/cloud-init-output.log > "$LOG_DST_DEVSTACK/cloud-init-output.log.gz" + $GZIP -c /var/log/dmesg > "$LOG_DST_DEVSTACK/dmesg.log.gz" + $GZIP -c /var/log/kern.log > "$LOG_DST_DEVSTACK/kern.log.gz" + $GZIP -c /var/log/syslog > "$LOG_DST_DEVSTACK/syslog.log.gz" + + mkdir -p "$LOG_DST_DEVSTACK/rabbitmq" + cp /var/log/rabbitmq/* "$LOG_DST_DEVSTACK/rabbitmq" + sudo rabbitmqctl status > "$LOG_DST_DEVSTACK/rabbitmq/status.txt" 2>&1 + $GZIP $LOG_DST_DEVSTACK/rabbitmq/* + mkdir -p "$LOG_DST_DEVSTACK/openvswitch" + cp /var/log/openvswitch/* "$LOG_DST_DEVSTACK/openvswitch" + $GZIP $LOG_DST_DEVSTACK/openvswitch/* + for j in `ls -A /var/log/juju`; do + $GZIP -c /var/log/juju/$j > "$LOG_DST_DEVSTACK/$j.gz" + done +} + +function archive_devstack_configs() { + local CONFIG_DST_DEVSTACK=${1:-$LOG_DST/devstack-config} + + if [ ! -d "$CONFIG_DST_DEVSTACK" ] + then + mkdir -p "$CONFIG_DST_DEVSTACK" || emit_warning "L38: Failed to archive devstack configs" + fi + + for i in cinder glance keystone neutron nova openvswitch + do + cp -r -L "/etc/$i" "$CONFIG_DST_DEVSTACK/$i" || continue + done + for file in `find "$CONFIG_DST_DEVSTACK/$i" -type f` + do + $GZIP $file + done + + $GZIP -c /home/ubuntu/devstack/local.conf > "$CONFIG_DST_DEVSTACK/local.conf.gz" + $GZIP -c /opt/stack/tempest/etc/tempest.conf > "$CONFIG_DST_DEVSTACK/tempest.conf.gz" + df -h > "$CONFIG_DST_DEVSTACK/df.txt" 2>&1 && $GZIP "$CONFIG_DST_DEVSTACK/df.txt" + iptables-save > "$CONFIG_DST_DEVSTACK/iptables.txt" 2>&1 && $GZIP "$CONFIG_DST_DEVSTACK/iptables.txt" + dpkg-query -l > "$CONFIG_DST_DEVSTACK/dpkg-l.txt" 2>&1 && $GZIP "$CONFIG_DST_DEVSTACK/dpkg-l.txt" + pip freeze > "$CONFIG_DST_DEVSTACK/pip-freeze.txt" 2>&1 && $GZIP "$CONFIG_DST_DEVSTACK/pip-freeze.txt" + ps axwu > "$CONFIG_DST_DEVSTACK/pidstat.txt" 2>&1 && $GZIP "$CONFIG_DST_DEVSTACK/pidstat.txt" + ifconfig -a -v > "$CONFIG_DST_DEVSTACK/ifconfig.txt" 2>&1 && $GZIP "$CONFIG_DST_DEVSTACK/ifconfig.txt" + sudo ovs-vsctl -v show > "$CONFIG_DST_DEVSTACK/ovs_bridges.txt" 2>&1 && $GZIP "$CONFIG_DST_DEVSTACK/ovs_bridges.txt" +} + +function archive_tempest_files() { + local TEMPEST_LOGS="/home/ubuntu/tempest" + + for i in `ls -A $TEMPEST_LOGS` + do + $GZIP "$TEMPEST_LOGS/$i" -c > "$LOG_DST/$i.gz" || emit_error "L133: Failed to archive tempest logs" + done +} + diff --git a/scripts/logs/wsmancmd.py b/scripts/logs/wsmancmd.py new file mode 100755 index 0000000..32ca789 --- /dev/null +++ b/scripts/logs/wsmancmd.py @@ -0,0 +1,171 @@ +#!/usr/bin/python + +# Copyright 2013 Cloudbase Solutions Srl +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import getopt +import sys + +from winrm import protocol + +AUTH_BASIC = "basic" +AUTH_KERBEROS = "kerberos" +AUTH_CERTIFICATE = "certificate" + +DEFAULT_PORT_HTTP = 5985 +DEFAULT_PORT_HTTPS = 5986 + +CODEPAGE_UTF8 = 65001 + +def print_usage(): + print ("%s [-U ] [-H ] [-P ] [-s] " + "[-a ] " + "[-u ] [-p ] " + "[-c -k ] " + " [cmd_args]" % sys.argv[0]) + + +def parse_args(): + args_ok = False + auth = AUTH_BASIC + username = None + password = None + url = None + host = None + port = None + use_ssl = False + cmd = None + cert_pem = None + cert_key_pem = None + is_powershell_cmd = False + + try: + show_usage = False + opts, args = getopt.getopt(sys.argv[1:], "hsU:H:P:u:p:c:k:a:", + "powershell") + for opt, arg in opts: + if opt == "-h": + show_usage = True + if opt == "-s": + use_ssl = True + if opt == "-H": + host = arg + if opt == "-P": + port = arg + if opt == "-U": + url = arg + elif opt == "-a": + auth = arg + elif opt == "-u": + username = arg + elif opt == "-p": + password = arg + elif opt == "-c": + cert_pem = arg + elif opt == "-k": + cert_key_pem = arg + elif opt == "--powershell": + is_powershell_cmd = True + + cmd = args + + if (show_usage or not + (cmd and + (url and not host and not port and not use_ssl) or + host and ((bool(port) ^ bool(use_ssl) or + not port and not use_ssl)) and + (auth == AUTH_BASIC and username and password or + auth == AUTH_CERTIFICATE and cert_pem and cert_key_pem or + auth == AUTH_KERBEROS))): + print_usage() + else: + args_ok = True + + except getopt.GetoptError: + print_usage() + + return (args_ok, url, host, use_ssl, port, auth, username, password, + cert_pem, cert_key_pem, cmd, is_powershell_cmd) + + +def run_wsman_cmd(url, auth, username, password, cert_pem, cert_key_pem, cmd): + protocol.Protocol.DEFAULT_TIMEOUT = 3600 + + if not auth: + auth = AUTH_BASIC + + auth_transport_map = {AUTH_BASIC: 'plaintext', + AUTH_KERBEROS: 'kerberos', + AUTH_CERTIFICATE: 'ssl'} + + p = protocol.Protocol(endpoint=url, + transport=auth_transport_map[auth], + username=username, + password=password, + cert_pem=cert_pem, + cert_key_pem=cert_key_pem) + + shell_id = p.open_shell(codepage=CODEPAGE_UTF8) + + command_id = p.run_command(shell_id, cmd[0], cmd[1:]) + std_out, std_err, status_code = p.get_command_output(shell_id, command_id) + + p.cleanup_command(shell_id, command_id) + p.close_shell(shell_id) + + return (std_out, std_err, status_code) + + +def get_url(url, host, use_ssl, port): + if url: + return url + else: + if not port: + if use_ssl: + port = DEFAULT_PORT_HTTPS + else: + port = DEFAULT_PORT_HTTP + + if use_ssl: + protocol = "https" + else: + protocol = "http" + + return ("%(protocol)s://%(host)s:%(port)s/wsman" % locals()) + + +def main(): + exit_code = 1 + + (args_ok, url, host, use_ssl, port, auth, username, password, + cert_pem, cert_key_pem, cmd, is_powershell_cmd) = parse_args() + if args_ok: + url = get_url(url, host, use_ssl, port) + + if is_powershell_cmd: + cmd = ["powershell.exe", "-ExecutionPolicy", "RemoteSigned", + "-NonInteractive", "-Command"] + cmd + + std_out, std_err, exit_code = run_wsman_cmd(url, auth, username, + password, cert_pem, + cert_key_pem, cmd) + sys.stdout.write(std_out) + sys.stderr.write(std_err) + + sys.exit(exit_code) + + +if __name__ == "__main__": + main() diff --git a/templates/bundle.template b/templates/bundle.template new file mode 100644 index 0000000..ad76dda --- /dev/null +++ b/templates/bundle.template @@ -0,0 +1,79 @@ +relations: + - ["devstack-${UUID}", "hyperv-${UUID}"] + - ["active-directory", "hyperv-${UUID}"] + +services: + active-directory: + charm: cs:~cloudbaseit/active-directory-5 + num_units: 1 + series: win2016 + constraints: "tags=$ADTAGS" + options: + administrator-password: "Passw0rd" + safe-mode-password: "Passw0rd" + domain-name: openvswitch.local + domain-user: "openvswitch" + domain-user-password: "Passw0rd" + devstack-${UUID}: + charm: /home/ubuntu/charms/ubuntu/devstack + num_units: 1 + constraints: "tags=$TAGS" + series: xenial + options: + disabled-services: horizon n-novnc n-net n-cpu ceilometer-acompute s-proxy s-object s-container s-account + enabled-services: rabbit mysql key n-api n-crt n-obj n-cond n-sch n-cauth + neutron q-svc q-agt q-dhcp q-l3 q-meta q-lbaas q-fwaas q-metering q-vpn + g-api g-reg cinder c-api c-vol c-sch heat h-api h-api-cfn h-api-cw h-eng tempest + extra-packages: build-essential libpython-all-dev python-all python-dev python3-all + python3-dev g++ g++-4.8 pkg-config libvirt-dev smbclient libxml2-dev libxslt1-dev zlib1g-dev + extra-python-packages: "git+https://github.com/petrutlucian94/pywinrm.git lxml==3.6.4" + heat-image-url: ${heat_image_url} + test-image-url: ${test_image_url} + ml2-mechanism: openvswitch + tenant-network-type: vxlan + enable-tunneling: True + enable-live-migration: True + ntp-server: pool.ntp.org + vlan-range: 2500:2550 + nameservers: 10.20.1.37 + enable-vlans: False + scenario-img: ${scenario_img} + cherry-picks: ${devstack_cherry_picks} + #scenario-img: ubuntu_final.vhdx.zip + pypi-mirror: http://10.20.1.8:8080/cloudbase/CI/+simple/ + data-port: ${data_port} + external-port: ${external_port} + zuul-branch: ${ZUUL_BRANCH} + zuul-change: "${ZUUL_CHANGE}" + zuul-project: ${ZUUL_PROJECT} + zuul-ref: ${ZUUL_REF} + zuul-url: ${ZUUL_URL} + prep-project: ${prep_project} + pip-version: "pip==8.1.1" + hyperv-${UUID}: + charm: /home/ubuntu/charms/windows/hyper-v-ci + num_units: 2 + series: win2016 + constraints: "tags=$TAGS" + options: + administrator-password: ${win_password} + vmswitch-management: ${vmswitch_management} + ovs-installer-url: ${ovs_installer} + ovs-certificate-url: ${ovs_certificate} + test-signing: ${test_signing} + network-type: ovs + os-data-network: ${os_data_network} + extra-python-packages: "setuptools SQLAlchemy==0.9.8 wmi oslo.i18n==1.7.0 pbr==1.2.0 oslo.messaging==4.5.1 lxml==3.6.4" + post-python-packages: "kombu==4.0.1 amqp==2.1.3 SQLAlchemy==1.0.17" + git-user-email: "mcapsali@gmail.com" + git-user-name: "capsali" + # cherry-picks: "neutron|https://review.openstack.org/openstack/neutron|refs/changes/77/227077/11|21818de8a9041d3e7e63922bb9fa1edc5475adee" + cherry-picks: "${hyperv_cherry_picks}" + pypi-mirror: http://10.20.1.8:8080/cloudbase/CI/+simple/ + data-port: ${data_port} + zuul-branch: ${ZUUL_BRANCH} + zuul-change: "${ZUUL_CHANGE}" + zuul-project: ${ZUUL_PROJECT} + zuul-ref: ${ZUUL_REF} + zuul-url: ${ZUUL_URL} + pip-version: "pip==8.1.1" \ No newline at end of file diff --git a/tests/neutron-ovs/excluded_tests.txt b/tests/neutron-ovs/excluded_tests.txt new file mode 100644 index 0000000..842dccd --- /dev/null +++ b/tests/neutron-ovs/excluded_tests.txt @@ -0,0 +1,19 @@ +# Rescue (to be removed when the feature is added) +tempest.api.compute.servers.test_server_rescue.ServerRescueTestJSON. +tempest.api.compute.servers.test_server_rescue_negative.ServerRescueNegativeTestJSON. + +# Hyper-V does not support attaching vNics to a running instance +tempest.api.compute.servers.test_attach_interfaces.AttachInterfacesTestJSON.test_create_list_show_delete_interfaces +tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_hotplug_nic +tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_port_security_macspoofing_port + +# See Tempest bug: https://bugs.launchpad.net/tempest/+bug/1363986 +tempest.scenario.test_security_groups_basic_ops.TestSecurityGroupsBasicOps + +#Need investigation - were failing on cinder ci as well +tempest.scenario.test_volume_boot_pattern.TestVolumeBootPattern.test_volume_boot_pattern +tempest.scenario.test_volume_boot_pattern.TestVolumeBootPatternV2.test_volume_boot_pattern +tempest.api.compute.admin.test_live_migration.LiveBlockMigration + +# Temporary added to exclude on 05/07/2016 +tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_create_backup diff --git a/tests/neutron-ovs/included_tests.txt b/tests/neutron-ovs/included_tests.txt new file mode 100644 index 0000000..67b4b58 --- /dev/null +++ b/tests/neutron-ovs/included_tests.txt @@ -0,0 +1,40 @@ +tempest.api.network +tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_created_server_vcpus +tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_duplicate_network_nics +tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_multiple_nics_order +tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_server_details + +tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_created_server_vcpus +tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_duplicate_network_nics +tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_multiple_nics_order +tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_server_details + +tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_lock_unlock_server +tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_pause_unpause_server +tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_reboot_server_hard +tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_reboot_server_soft +tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_rebuild_server +tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_rebuild_server_in_stop_state +tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_resize_server_confirm +tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_resize_server_confirm_from_stopped +tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_resize_server_revert +# if using compute_hyperv, add test_shelve_unshelve_server +#tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_shelve_unshelve_server +tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_stop_start_server +tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_suspend_resume_server + +tempest.scenario.test_minimum_basic.TestMinimumBasicScenario.test_minimum_basic_scenario +tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps +tempest.scenario.test_network_basic_ops.TestNetworkBasicOps +tempest.scenario.test_network_v6.TestGettingAddress + +tempest.scenario.test_server_advanced_ops.TestServerAdvancedOps.test_server_sequence_suspend_resume +tempest.scenario.test_server_basic_ops.TestServerBasicOps.test_server_basicops +tempest.scenario.test_shelve_instance.TestShelveInstance.test_shelve_instance +tempest.scenario.test_shelve_instance.TestShelveInstance.test_shelve_volume_backed_instance +tempest.scenario.test_snapshot_pattern.TestSnapshotPattern.test_snapshot_pattern +tempest.scenario.test_stamp_pattern.TestStampPattern.test_stamp_pattern +tempest.scenario.test_volume_boot_pattern.TestVolumeBootPattern.test_create_ebs_image_and_check_boot +tempest.scenario.test_volume_boot_pattern.TestVolumeBootPattern.test_volume_boot_pattern +tempest.scenario.test_volume_boot_pattern.TestVolumeBootPatternV2.test_create_ebs_image_and_check_boot +tempest.scenario.test_volume_boot_pattern.TestVolumeBootPatternV2.test_volume_boot_pattern diff --git a/tests/neutron-ovs/isolated_tests.txt b/tests/neutron-ovs/isolated_tests.txt new file mode 100644 index 0000000..70f9e7b --- /dev/null +++ b/tests/neutron-ovs/isolated_tests.txt @@ -0,0 +1,9 @@ +tempest.scenario.test_minimum_basic.TestMinimumBasicScenario.test_minimum_basic_scenario +tempest.scenario.test_network_basic_ops.TestNetworkBasicOps +tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps +tempest.scenario.test_server_advanced_ops.TestServerAdvancedOps.test_server_sequence_suspend_resume +tempest.scenario.test_server_basic_ops.TestServerBasicOps.test_server_basicops +tempest.scenario.test_volume_boot_pattern.TestVolumeBootPattern.test_create_ebs_image_and_check_boot +tempest.scenario.test_volume_boot_pattern.TestVolumeBootPatternV2.test_create_ebs_image_and_check_boot +tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON +tempest.api.compute.servers.test_create_server.ServersTestManualDisk diff --git a/tests/ovs/excluded_tests.txt b/tests/ovs/excluded_tests.txt new file mode 100644 index 0000000..e69de29 diff --git a/tests/ovs/included_tests.txt b/tests/ovs/included_tests.txt new file mode 100644 index 0000000..ffd1fe6 --- /dev/null +++ b/tests/ovs/included_tests.txt @@ -0,0 +1,11 @@ +tempest.api.network +tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_created_server_vcpus +tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_duplicate_network_nics +tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_multiple_nics_order +tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_server_details +tempest.api.compute.admin.test_live_migration +tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps +tempest.scenario.test_security_groups_basic_ops.TestSecurityGroupsBasicOps +tempest.scenario.test_network_basic_ops.TestNetworkBasicOps +tempest.scenario.test_network_v6 +tempest.scenario.test_minimum_basic.TestMinimumBasicScenario.test_minimum_basic_scenario diff --git a/tests/ovs/isolated_tests.txt b/tests/ovs/isolated_tests.txt new file mode 100644 index 0000000..e69de29 From 9c41952b7ad69ed5f3e8a761fbfe4944ed06237c Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Wed, 29 Mar 2017 13:40:55 +0000 Subject: [PATCH 02/22] Removed old directories --- deployer/.gitignore | 2 - deployer/deployer.py | 352 ----------- deployer/helpers/__init__.py | 0 deployer/helpers/maasclient.py | 205 ------ deployer/helpers/utils.py | 186 ------ deployer/requirements.txt | 3 - devstack/bin/get-results-html.sh | 10 - devstack/bin/get-tests.sh | 47 -- devstack/bin/parallel-test-runner.sh | 141 ----- devstack/bin/run-all-tests.sh | 108 ---- devstack/bin/run.sh | 60 -- devstack/bin/subunit2html.py | 727 ---------------------- devstack/bin/utils.sh | 105 ---- devstack/tests/cinder/excluded_tests.txt | 0 devstack/tests/cinder/included_tests.txt | 1 - devstack/tests/neutron/excluded_tests.txt | 0 devstack/tests/neutron/included_tests.txt | 1 - devstack/tests/nova/excluded_tests.txt | 30 - devstack/tests/nova/included_tests.txt | 1 - devstack/tests/nova/isolated_tests.txt | 38 -- infra/deployer/BundleGenerator.py | 116 ---- infra/logs/collect_logs.sh | 0 infra/logs/utils.sh | 86 --- 23 files changed, 2219 deletions(-) delete mode 100644 deployer/.gitignore delete mode 100755 deployer/deployer.py delete mode 100644 deployer/helpers/__init__.py delete mode 100644 deployer/helpers/maasclient.py delete mode 100644 deployer/helpers/utils.py delete mode 100644 deployer/requirements.txt delete mode 100755 devstack/bin/get-results-html.sh delete mode 100755 devstack/bin/get-tests.sh delete mode 100755 devstack/bin/parallel-test-runner.sh delete mode 100755 devstack/bin/run-all-tests.sh delete mode 100755 devstack/bin/run.sh delete mode 100755 devstack/bin/subunit2html.py delete mode 100755 devstack/bin/utils.sh delete mode 100644 devstack/tests/cinder/excluded_tests.txt delete mode 100644 devstack/tests/cinder/included_tests.txt delete mode 100644 devstack/tests/neutron/excluded_tests.txt delete mode 100644 devstack/tests/neutron/included_tests.txt delete mode 100644 devstack/tests/nova/excluded_tests.txt delete mode 100644 devstack/tests/nova/included_tests.txt delete mode 100644 devstack/tests/nova/isolated_tests.txt delete mode 100644 infra/deployer/BundleGenerator.py delete mode 100644 infra/logs/collect_logs.sh delete mode 100644 infra/logs/utils.sh diff --git a/deployer/.gitignore b/deployer/.gitignore deleted file mode 100644 index c9b568f..0000000 --- a/deployer/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -*.pyc -*.swp diff --git a/deployer/deployer.py b/deployer/deployer.py deleted file mode 100755 index 04efe61..0000000 --- a/deployer/deployer.py +++ /dev/null @@ -1,352 +0,0 @@ -#!/usr/bin/env python - -import os -import sys - -PYTHON_PATH = os.path.dirname(os.path.abspath(os.path.normpath(sys.argv[0]))) -sys.path.append(PYTHON_PATH) - -from gevent import monkey -monkey.patch_all() - -import gevent -import sys -import argparse -import yaml -import tempfile -import os -import logging - -LOG = logging.getLogger() -LOG.setLevel(logging.DEBUG) -ch = logging.StreamHandler(sys.stdout) -ch.setLevel(logging.DEBUG) -formatter = logging.Formatter( - '%(asctime)s - %(name)s - %(levelname)s - %(message)s') -ch.setFormatter(formatter) -LOG.addHandler(ch) - - -import helpers.utils as utils -import helpers.maasclient as maasclient -import jujuclient -import netaddr -import socket - -from gevent import subprocess -from gevent.queue import Queue, Empty - -from jujuclient import Environment - - -parser = argparse.ArgumentParser(prog="Deployer") -subparsers = parser.add_subparsers(dest="action") - -deploy_parser = subparsers.add_parser('deploy') -teardown_parser = subparsers.add_parser('teardown') - -teardown_parser.add_argument("--search-string", dest="search_string", - type=str, required=True, help="Deploy uuid") -teardown_parser.add_argument("--template", dest="template", - type=str, required=False, help="Juju deployer template") - -deploy_parser.add_argument("--search-string", dest="search_string", - type=str, required=False, help="Deploy uuid") -deploy_parser.add_argument("--template", dest="template", - type=str, required=True, help="Juju deployer template") - - -def exception_handler(green): - LOG.error("Greenlet %r failed with an exception" % green) - sys.exit(1) - - -class MaaSInstanceWatcher(maasclient.Nodes): - - def __init__(self, maas_url, maas_token, queue): - super(MaaSInstanceWatcher, self).__init__(maas_url, maas_token) - self.queue = queue - self.watchers = [] - - def _watch(self, node): - node_state = None - if isinstance(node, maasclient.Node) is False: - raise ValueError("Function got invalid type: %r" % type(node)) - while True: - status = node.substatus() - if node_state != status: - LOG.debug("Node %s changed status to: %s" % (node.data["hostname"], status)) - node_state = status - payload = {"status": status, "instance": node.data["resource_uri"]} - self.queue.put(payload) - if status == maasclient.FAILED_DEPLOYMENT: - return - gevent.sleep(5) - - def start_watcher(self, node): - LOG.debug("Starting watcher for node: %s" % node) - n = self.get(node) - e = gevent.spawn(self._watch, n) - e.link_exception(exception_handler) - self.watchers.append(e) - - -class Deployer(object): - - def __init__(self, options): - self.options = options - self.juju = Environment.connect('maas') - self.search_string = options.search_string - self.bundle = self.options.template - #self.bundle_generator = utils.BundleGenerator(self.options) - self.home = os.environ.get("HOME", "/tmp") - self.workdir = os.path.join(self.home, ".deployer") - self.channel = Queue() - self.eventlets = [] - env_config = self.juju.get_env_config() - self.maas_watcher = MaaSInstanceWatcher( - env_config["Config"]["maas-server"], - env_config["Config"]["maas-oauth"], - self.channel) - - def _ensure_dependencies(self): - pkgs = [] - if utils.which("juju-deployer") is None: - utils.add_apt_ppa("ppa:juju/stable") - utils.apt_update() - pkgs.append("juju-deployer") - if len(pkgs) > 0: - utils.install_apt_packages(pkgs) - - def _ensure_workdir(self): - if os.path.isdir(self.workdir) is False: - os.makedirs(self.workdir, 0o700) - - def _run_deployer(self, bundle): - if os.path.isfile(bundle) is False: - raise Exception("No such bundle file: %s" % bundle) - args = [ - "juju-deployer", "--local-mods", "-S", "-c", bundle - ] - subprocess.check_call(args) - - #def _render_yaml(self, project): - # proj = project.split("/")[-1] - # func = getattr(self.bundle_generator, "%s_bundle" % proj) - # if not func: - # raise ValueError( - # "Project %s is not supported by bundler" % project) - # bundle = func() - # bundle_file = os.path.join(self.workdir, self.search_string) - # with open(bundle_file, "w") as fd: - # yaml.dump(bundle, stream=fd, default_flow_style=False, - # allow_unicode=True, encoding=None) - # return bundle_file - - def _start_maas_watcher(self, machine): - """ - poll MaaS API to monitor machine status. If it switches to - Failed Deployment, then raise an exception - """ - e = gevent.spawn(self.maas_watcher.start_watcher, machine) - e.link_exception(exception_handler) - self.eventlets.append(e) - - def _consume_events(self): - LOG.debug("Starting Consumer") - while True: - try: - event = self.channel.get_nowait() - if event.get("status") == maasclient.FAILED_DEPLOYMENT: - raise Exception("Node %s entered failed deployment state" % - event.get("instance")) - except Empty: - gevent.sleep(1) - continue - - @utils.exec_retry(retry=5) - def _juju_status(self, *args, **kw): - return self.juju.status(*args, **kw) - - def _get_machines(self, status): - machines = [] - m = status.get("Machines") - if m is None: - return machines - for i in m.keys(): - instanceId = m[i].get("InstanceId") - if instanceId == "pending": - continue - machines.append(m[i].get("InstanceId")) - return machines - - def _get_machine_ids(self, status): - m = status.get("Machines") - if m is None: - return [] - return m.keys() - - def _get_service_names(self, status): - m = status.get("Services") - if m is None: - return [] - return m.keys() - - def _analize_units(self, units, debug=False): - all_active = True - for i in units.keys(): - unit = units[i] - if debug: - LOG.debug( - "Unit %s has status: %r" % (i, unit["Workload"]["Status"])) - if unit["UnitAgent"]["Status"] == "error": - raise Exception("Unit %s is in error state: %s" % - (i, unit["UnitAgent"]["Err"])) - if unit["Workload"]["Status"] == "error": - raise Exception("Unit %s workload is in error state: %s" % - (i, unit["Workload"]["Info"])) - if unit["Err"] is not None: - raise Exception("Unit %s is in error state: %s" % - (i, unit["Err"])) - if unit["Workload"]["Status"] != "active": - all_active = False - return all_active - - def _analize_machines(self, machines): - for i in machines.keys(): - machine = machines.get(i) - if machine["Err"]: - raise Exception("MaaS returned error when allocating %s: %s" % - (i, machine["Err"])) - agent = machine.get("Agent") - if agent: - status = agent.get("Status") - info = agent.get("Info") - err = agent.get("Err") - if status == "error" or err: - raise Exception( - "Machine agent is in error state: %r" % info) - - def _resolve_address(self, addr): - try: - netaddr.IPAddress(addr) - return addr - except netaddr.core.AddrFormatError: - return socket.gethostbyname(addr) - - def _write_unit_ips(self, units): - unit_ips = {} - for i in units: - name = i.split("/")[0][:-len("-%s" % self.search_string)].replace('-', "_") - addr = self.juju.get_private_address(i)["PrivateAddress"] - ip = self._resolve_address(addr) - if name in unit_ips: - unit_ips[name] += ",%s" % ip - else: - unit_ips[name] = ip - nodes = os.path.join(os.getcwd(), "nodes") - with open(nodes, "w") as fd: - for i in unit_ips.keys(): - fd.write("%s=%s\n" % (i.upper(), unit_ips[i])) - - def _analize(self, status, debug=False): - """ - Return True if charms have reached active workload state, False if not - raise error any charm reaches error state. - """ - services = status.get("Services") - if services is None: - return False - all_units = {} - for i in services.keys(): - svc = services.get(i) - units = svc.get("Units") - all_units.update(units) - # TODO: only do this if there are changes, not on every iteration. - try: - self._write_unit_ips(all_units) - except jujuclient.EnvError: - LOG.debug("Cound not write unit ips") - all_active = self._analize_units(all_units, debug) - if all_active: - return True - # Juju retains the error returned by the MaaS API in case MaaS - # errored out while the acquire API call was made. In this scenario, - # MaaS will not return a usable node. - machines = status.get("Machines") - if machines is None: - return False - self._analize_machines(machines) - - def _poll_services(self): - """ - This poller works under the assumption that the charms being deployed - have implemented status-set calls that tell us when workload status - changed to active. Poll services, units and instances until all units - have workload status set to active, or untill any of them error out. - """ - LOG.debug("Starting poller") - watched_machines = [] - iteration = 0 - while True: - status = self._juju_status(filters=("*%s*" % self.search_string)) - #LOG.debug("%r" % status) - debug = False - if iteration % 1 == 0: - debug = True - all_active = self._analize(status, debug=debug) - if all_active: - break - machines = self._get_machines(status) - diff = set(machines).difference(set(watched_machines)) - new_machines = list(diff) - for i in new_machines: - self._start_maas_watcher(i) - watched_machines.append(i) - iteration += 1 - gevent.sleep(3) - - def _wait_for_teardown(self, machines=[]): - while True: - has_machines = False - status = self._juju_status() - state_machines = status.get("Machines", {}) - for i in machines: - if state_machines.get(i): - has_machines = True - if has_machines is False: - break - gevent.sleep(3) - - def deploy(self): - self._ensure_workdir() - self._ensure_dependencies() - #bundle = self._render_yaml(self.options.zuul_project) - self._run_deployer(self.bundle) - - e = gevent.spawn(self._consume_events) - e.link_exception(exception_handler) - self.eventlets.append(e) - - self._poll_services() - gevent.killall(self.eventlets) - gevent.killall(self.maas_watcher.watchers) - - def teardown(self): - status = self._juju_status(filters=("*%s*" % self.search_string)) - machines = self._get_machine_ids(status) - service_names = self._get_service_names(status) - for i in service_names: - self.juju.destroy_service(i) - self.juju.destroy_machines(machines, force=True) - self._wait_for_teardown(machines) - - -if __name__ == '__main__': - opt = parser.parse_args() - deployer = Deployer(opt) - if opt.action == "deploy": - deployer.deploy() - if opt.action == "teardown": - deployer.teardown() - diff --git a/deployer/helpers/__init__.py b/deployer/helpers/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/deployer/helpers/maasclient.py b/deployer/helpers/maasclient.py deleted file mode 100644 index 8d8bdbb..0000000 --- a/deployer/helpers/maasclient.py +++ /dev/null @@ -1,205 +0,0 @@ -import oauth.oauth as oauth -import httplib2 -import uuid -import urlparse -import json -import datetime -import logging - -LOG = logging.getLogger() - -DEFAULT = 0 -#: The node has been created and has a system ID assigned to i -NEW = 0 -#: Testing and other commissioning steps are taking place. -COMMISSIONING = 1 -#: The commissioning step failed. -FAILED_COMMISSIONING = 2 -#: The node can't be contacted. -MISSING = 3 -#: The node is in the general pool ready to be deployed. -READY = 4 -#: The node is ready for named deployment. -RESERVED = 5 -#: The node has booted into the operating system of its owner' -#: and is ready for use. -DEPLOYED = 6 -#: The node has been removed from service manually until an ad -#: overrides the retirement. -RETIRED = 7 -#: The node is broken: a step in the node lifecyle failed. -#: More details can be found in the node's event log. -BROKEN = 8 -#: The node is being installed. -DEPLOYING = 9 -#: The node has been allocated to a user and is ready for depl -ALLOCATED = 10 -#: The deployment of the node failed. -FAILED_DEPLOYMENT = 11 -#: The node is powering down after a release request. -RELEASING = 12 -#: The releasing of the node failed. -FAILED_RELEASING = 13 -#: The node is erasing its disks. -DISK_ERASING = 14 -#: The node failed to erase its disks. -FAILED_DISK_ERASING = 15 - - - -class MaaSBaseClass(object): - - VERBS = ( - "GET", - "PUT", - "POST", - "DELETE", - ) - - def __init__(self, maas_url, token): - self.maas_url = maas_url - self.url = urlparse.urlparse(self.maas_url) - self.token = token - self._parse_token(token) - - def _parse_token(self, token): - t = token.split(":") - if len(t) != 3: - raise ValueError("Invalid MaaS token") - self.consumer_key = t[0] - self.key = t[1] - self.secret = t[2] - - def _validate_verb(self, verb, body): - if verb not in self.VERBS: - raise ValueError("%s is not supported" % verb) - if verb == "DELETE": - # DELETE requests must have body None - return None - return body - - def _check_response(self, response): - status = response.get("status") - if int(status) > 299: - raise Exception("Request returned status %s" % status) - - @property - def _api_path(self): - uri = "api/1.0/" - return "%s/%s" % (self.url.path.rstrip("/"), uri) - - def _get_resource_uri(self, subresource=None, op=None, params=None): - resource = self.RESOURCE - #LOG.debug("%r --> %r" %(resource, self._api_path)) - if self.RESOURCE.startswith(self._api_path): - resource = self.RESOURCE[len(self._api_path):] - - uri = "api/1.0/%s/" % resource.strip("/") - if subresource: - uri += "%s/" % subresource.strip("/") - if op: - uri = "%s?op=%s" % (uri, op) - if params: - for i in params.keys(): - uri += "&%s=%s" % (i, params[i]) - return uri - - def _dispatch(self, uri, method, body=None): - body = self._validate_verb(method, body) - - resource_tok_string = "oauth_token_secret=%s&oauth_token=%s" % (self.secret, self.key) - resource_token = oauth.OAuthToken.from_string(resource_tok_string) - consumer_token = oauth.OAuthConsumer(self.consumer_key, "") - - oauth_request = oauth.OAuthRequest.from_consumer_and_token( - consumer_token, token=resource_token, http_url=self.maas_url, - parameters={'oauth_nonce': uuid.uuid4().get_hex()}) - - oauth_request.sign_request( - oauth.OAuthSignatureMethod_PLAINTEXT(), consumer_token, - resource_token) - - headers = oauth_request.to_header() - url = "%s/%s" % (self.maas_url, uri) - http = httplib2.Http() - LOG.debug("Sending request to: %s" % url) - response, content = http.request(url, method, body=body, headers=headers) - self._check_response(response) - body = json.loads(content) - return body - - -class ResourceMixin(object): - - def _refresh_data(self): - if self._requested is None: - self._data = self._get() - self._requested = datetime.datetime.utcnow() - - delta = datetime.datetime.utcnow() - self._requested - if delta > datetime.timedelta(seconds=30): - self._data = self._get() - self._requested = datetime.datetime.utcnow() - - if self._data is None: - self._data = self._get() - self._requested = datetime.datetime.utcnow() - return - - @property - def data(self): - self._refresh_data() - return self._data - - -class Node(MaaSBaseClass, ResourceMixin): - - def __init__(self, maas_url, maas_token, resource): - super(Node, self).__init__(maas_url, maas_token) - self.RESOURCE = resource - self._data = None - self._requested = None - - def status(self): - self._refresh_data() - status = self.data.get("status") - if status: - return int(status) - - def substatus(self): - self._refresh_data() - status = self.data.get("substatus") - if status: - return int(status) - - def _get(self): - nodes = self._get_resource_uri() - return self._dispatch(nodes, "GET") - - -class Events(MaaSBaseClass, ResourceMixin): - - RESOURCE = "events" - - def get(self, node=None): - params = None - if node: - params = {"id": node} - return self._get(params=params) - - def _get(self, params=None): - events = self._get_resource_uri(op="query", params=params) - print events - return self._dispatch(events, "GET") - - -class Nodes(MaaSBaseClass): - - RESOURCE = "nodes" - - def list(self): - nodes = self._get_resource_uri(op="list") - return self._dispatch(nodes, "GET") - - def get(self, resource): - return Node(self.maas_url, self.token, resource) diff --git a/deployer/helpers/utils.py b/deployer/helpers/utils.py deleted file mode 100644 index 23cf175..0000000 --- a/deployer/helpers/utils.py +++ /dev/null @@ -1,186 +0,0 @@ -import os -import platform -import gevent - -from gevent import subprocess - -SYS = platform.system() - - -def exec_retry(retry=5): - def wrap(f): - def wrap_f(*args, **kw): - count = 0 - err = "" - while count < retry: - try: - return f(*args, **kw) - break - except Exception as err: - gevent.sleep(3) - err = err - count += 1 - return f(*args, **kw) - return wrap_f - return wrap - - -def is_exe(path): - if os.path.isfile(path) is False: - return False - if SYS == "Windows": - pathext = os.environ.get("PATHEXT", ".COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC") - for i in pathext.split(os.pathsep): - if path.endswith(i): - return True - else: - if os.access(path, os.X_OK): - return True - return False - - -def which(program): - fpath, fname = os.path.split(program) - if fpath: - if is_exe(program): - return program - else: - for path in os.environ["PATH"].split(os.pathsep): - exe_file = os.path.join(path, program) - if is_exe(exe_file): - return exe_file - return None - - -def add_apt_ppa(ppa): - subprocess.check_call([ - "sudo", "-n", "apt-add-repository", "-y", ppa, - ]) - - -def install_apt_packages(pkgs): - apt = ["sudo", "-n", "apt-get", "-y", "--option=Dpkg::Options::=--force-confold", "install"] - apt.extend(pkgs) - subprocess.check_call(apt) - - -def apt_update(): - subprocess.check_call(["sudo", "-n", "apt-get", "update"]) - - -class BundleGenerator(object): - _AD_GIT_URL = 'https://github.com/cloudbase/active-directory.git' - _DEVSTACK_GIT_URL = 'https://github.com/cloudbase/devstack-charm.git' - _HYPER_V_GIT_URL = 'https://github.com/cloudbase/hyperv-charm' - - def __init__(self, options): - self.options = options - - def _get_non_null_values(self, dictionary): - return dict((key, value) for key, value in dictionary.iteritems() - if value is not None) - - def _get_service(self, git_url, charm, nr_units, options): - return {'branch': git_url, - 'charm': charm, - 'num_units': nr_units, - 'options': self._get_non_null_values(options)} - - def _get_ad_service(self, nr_units, domain_name, admin_password, - admin_username=None): - ad_options = {'domain-name': domain_name, - 'administrator': admin_username, - 'password': admin_password} - return self._get_service(self._AD_GIT_URL, - 'local:win2012r2/active-directory', - nr_units, ad_options) - - def _get_hyper_v_service(self, nr_units, download_mirror, extra_python_packages=None, - git_user_email=None, git_user_name=None, wheel_mirror=None, - ppy_mirror=None, vmswitch_name=None, vmswitch_management=None, - ad_user_name=None, enable_freerdp_console=None): - hyper_v_options = {'download-mirror': download_mirror, - 'extra-python-packages': extra_python_packages, - 'git-user-email': git_user_email, - 'git-user-name': git_user_name, - 'wheel-mirror': wheel_mirror, - 'ppy-mirror': ppy_mirror, - 'vmswitch-name': vmswitch_name, - 'vmswitch-management': vmswitch_management, - 'ad-user-name': ad_user_name, - 'enable-freerdp-console': enable_freerdp_console} - return self._get_service(self._HYPER_V_GIT_URL, - 'local:win2012hvr2/hyper-v-ci', - nr_units, hyper_v_options) - - def _get_devstack_service(self, nr_units, vlan_range, heat_image_url, test_image_url, - disabled_services=None, enable_plugins=None, - enabled_services=None, extra_packages=None, - extra_python_packages=None): - devstack_options = {'disabled-services': disabled_services, - 'enable-plugin': enable_plugins, - 'enabled-services': enabled_services, - 'extra-packages': extra_packages, - 'extra-python-packages': extra_python_packages, - 'heat-image-url': heat_image_url, - 'test-image-url': test_image_url, - 'vlan-range': vlan_range} - return self._get_service(self._DEVSTACK_GIT_URL, 'local:trusty/devstack', - nr_units, devstack_options) - - def _get_overrides_options(self, data_ports, external_ports, zuul_branch, zuul_change, - zuul_project, zuul_ref, zuul_url): - return {'data-port': data_ports, - 'external-port': external_ports, - 'zuul-branch': zuul_branch, - 'zuul-change': zuul_change, - 'zuul-project': zuul_project, - 'zuul-ref': zuul_ref, - 'zuul-url': zuul_url} - - def nova_bundle(self): - overrides_options = self._get_overrides_options(self.options.data_ports, - self.options.external_ports, self.options.zuul_branch, - self.options.zuul_change, self.options.zuul_project, - self.options.zuul_ref, self.options.zuul_url) - - hyper_v_service = self._get_hyper_v_service( - nr_units=self.options.nr_hyper_v_units, - download_mirror='http://64.119.130.115/bin', - extra_python_packages=self.options.hyper_v_extra_python_packages, - git_user_email='hyper-v_ci@microsoft.com', - git_user_name='Hyper-V CI', - wheel_mirror='http://64.119.130.115/wheels') - - devstack_service = self._get_devstack_service( - nr_units=self.options.nr_devstack_units, - disabled_services=self.options.devstack_disabled_services, - enable_plugins=self.options.devstack_enabled_plugins, - enabled_services=self.options.devstack_enabled_services, - extra_packages=self.options.devstack_extra_packages, - extra_python_packages=self.options.devstack_extra_python_packages, - heat_image_url='http://10.255.251.230/Fedora.vhdx', - test_image_url='http://10.255.251.230/cirros.vhdx', - vlan_range=self.options.vlan_range) - - hyper_v_service_name = 'hyper-v-ci-%s' % self.options.zuul_uuid - devstack_service_name = 'devstack-%s' % self.options.zuul_uuid - bundle_content = { - 'nova': {'overrides': overrides_options, - 'relations': [[devstack_service_name, hyper_v_service_name]], - 'services': {devstack_service_name: devstack_service, - hyper_v_service_name: hyper_v_service} } - } - - if self.options.nr_ad_units > 0: - ad_service_name = "active-directory" - ad_charm = self._get_ad_service( - nr_units=self.options.nr_ad_units, - domain_name=self.options.ad_domain_name, - admin_password=self.options.ad_admin_password) - ad_charm_dict = {ad_service_name: ad_charm} - bundle_content['nova']['relations'].append([hyper_v_service_name, - ad_service_name]) - bundle_content['nova']['services'].update(ad_charm_dict) - - return bundle_content diff --git a/deployer/requirements.txt b/deployer/requirements.txt deleted file mode 100644 index 4c0e710..0000000 --- a/deployer/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -gevent==1.0 -gzr+lp:~gabriel-samfira/python-jujuclient/python-jujuclient -pyyaml diff --git a/devstack/bin/get-results-html.sh b/devstack/bin/get-results-html.sh deleted file mode 100755 index 784f961..0000000 --- a/devstack/bin/get-results-html.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash - -log_file=$1 -results_html_file=$2 - -f=$(tempfile) -cat $log_file | subunit-2to1 > $f -python subunit2html.py $f $results_html_file -rm $f - diff --git a/devstack/bin/get-tests.sh b/devstack/bin/get-tests.sh deleted file mode 100755 index 66c09b5..0000000 --- a/devstack/bin/get-tests.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash -set -e - -array_to_regex() -{ - local ar=(${@}) - local regex="" - - for s in "${ar[@]}" - do - if [ "$regex" ]; then - regex+="\\|" - fi - regex+="^"$(echo $s | sed -e 's/[]\/$*.^|[]/\\&/g') - done - echo $regex -} - -tests_dir=$1 - -BASEDIR=$(dirname $0) - -include_tests_file=$2 -exclude_tests_file=$3 -isolated_tests_file=$4 - -include_tests=(`awk 'NF && $1!~/^#/' $include_tests_file`) - -if [ -f "$exclude_tests_file" ]; then - exclude_tests=(`awk 'NF && $1!~/^#/' $exclude_tests_file`) -fi - -if [ -f "$isolated_tests_file" ]; then - isolated_tests=(`awk 'NF && $1!~/^#/' $isolated_tests_file`) -fi - -exclude_tests=( ${exclude_tests[@]} ${isolated_tests[@]} ) - -include_regex=$(array_to_regex ${include_tests[@]}) -exclude_regex=$(array_to_regex ${exclude_tests[@]}) - -if [ ! "$exclude_regex" ]; then - exclude_regex='^$' -fi - -cd $tests_dir -testr list-tests | grep $include_regex | grep -v $exclude_regex diff --git a/devstack/bin/parallel-test-runner.sh b/devstack/bin/parallel-test-runner.sh deleted file mode 100755 index 945b2c8..0000000 --- a/devstack/bin/parallel-test-runner.sh +++ /dev/null @@ -1,141 +0,0 @@ -#!/bin/bash - -# Make sure we kill the entire process tree when exiting -trap 'kill 0' SIGINT SIGTERM - -function run_test_retry(){ - local tests_file=$1 - local tmp_log_file=$2 - local i=0 - local exit_code=0 - - pushd . > /dev/null - cd $tests_dir - - while : ; do - > $tmp_log_file - testr run --subunit --load-list=$tests_file > $tmp_log_file 2>&1 - subunit-stats $tmp_log_file > /dev/null - exit_code=$? - ((i++)) - ( [ $exit_code -eq 0 ] || [ $i -ge $max_attempts ] ) && break - echo "Test $tests_file failed. Retrying count: $i" - done - - popd > /dev/null - - echo $exit_code -} - -function get_tests_range() { - local i=$1 - if [ $i -lt ${#tests[@]} ]; then - local test=${tests[$i]} - local test_class=${test%.*} - local j=$i - if [ $run_isolated -eq 0 ]; then - for test in ${tests[@]:$((i+1))}; do - local test_class_match=${test%.*} - if [ "$test_class" == "$test_class_match" ]; then - ((j++)) - else - break - fi - done - fi - - echo $i $j - fi -} - -function get_next_test_idx_range() { - ( - flock -x 200 - local test_idx=$(<$cur_test_idx_file) - local test_idx_range=( $(get_tests_range $test_idx) ) - - if [ ${#test_idx_range[@]} -gt 0 ]; then - test_idx=${test_idx_range[1]} - ((test_idx++)) - echo $test_idx > $cur_test_idx_file - echo ${test_idx_range[@]} - fi - ) 200>$lock_file_1 -} - -function parallel_test_runner() { - local runner_id=$1 - while : ; do - local test_idx_range=( $(get_next_test_idx_range) ) - - if [ ${#test_idx_range[@]} -eq 0 ]; then - break - fi - - local range_start=${test_idx_range[0]} - local range_end=${test_idx_range[1]} - local tmp_tests_file=$(tempfile) - local l=$((range_end-range_start+1)) - - for test in ${tests[@]:$range_start:$l}; do - echo $test >> $tmp_tests_file - done - - local tmp_log_file="$tmp_log_file_base"_"$range_start" - - echo "Test runner $runner_id is starting tests from $((range_start+1)) to $((range_end+1)) out of ${#tests[@]}:" - cat $tmp_tests_file - echo - - local test_exit_code=$(run_test_retry $tmp_tests_file $tmp_log_file) - rm $tmp_tests_file - - echo "Test runner $runner_id finished tests from $((range_start+1)) to $((range_end+1)) out of ${#tests[@]} with exit code: $test_exit_code" - done -} - - -tests_file=$1 -tests_dir=$2 -log_file=$3 -max_parallel_tests=${4:-10} -max_attempts=${5:-5} -run_isolated=${6:-0} - -tests=(`awk '{print}' $tests_file`) - -cur_test_idx_file=$(tempfile) -echo 0 > $cur_test_idx_file - -lock_file_1=$(tempfile) -tmp_log_file_base=$(tempfile) - -pids=() -for i in $(seq 1 $max_parallel_tests); do - parallel_test_runner $i & - pids+=("$!") -done - -for pid in ${pids[@]}; do - wait $pid -done - -rm $cur_test_idx_file - -> $log_file -for i in $(seq 0 $((${#tests[@]}-1))); do - tmp_log_file="$tmp_log_file_base"_"$i" - if [ -f "$tmp_log_file" ]; then - cat $tmp_log_file >> $log_file - rm $tmp_log_file - fi -done - -rm $tmp_log_file_base -rm $lock_file_1 - -echo "Test execution completed in $SECONDS seconds." - -subunit-stats $log_file > /dev/null -exit $? - diff --git a/devstack/bin/run-all-tests.sh b/devstack/bin/run-all-tests.sh deleted file mode 100755 index f5edd72..0000000 --- a/devstack/bin/run-all-tests.sh +++ /dev/null @@ -1,108 +0,0 @@ -#!/bin/bash - -function help() { - echo "Required parameters:" - echo " --include-file: the tempest test groups to be executed" - echo " --exclude-file: tempest tests that have to be excluded" - echo " --tests-dir: tempest execution folder" - echo "Optional parameters:" - echo " --isolated-file: tempest tests that require to be executed isolated" - echo " --parallel-tests: number of tempest tests to run in parallel (DEFAULT: 4)" - echo " --max-attempts: number of retries if a test fails (DEFAULT: 2)" - echo " --log-file: name of the tempest run log file (including full path)" - echo " --results-html-file: name of the html results file (including full path)" -} - -while [ $# -gt 0 ] -do - case $1 in - --include-file) - INCLUDE_FILE=$2 - shift;; - --exclude-file) - EXCLUDE_FILE=$2 - shift;; - --isolated-file) - ISOLATED_FILE=$2 - shift;; - --tests-dir) - TESTS_DIR=$2 - shift;; - --parallel-tests) - PARALLEL_TESTS=$2 - shift;; - --max-attempts) - MAX_ATTEMPTS=$2 - shift;; - --log-file) - LOG_FILE=$2 - shift;; - --results-html-file) - RESULTS_HTML_FILE=$2 - shift;; - *) - echo "no such option" - help - esac - shift -done - -if [ -z "$INCLUDE_FILE" ]; then echo "tempest include file must be provided"; exit 1; fi -if [ -z "$EXCLUDE_FILE" ]; then echo "tempest exclude file must be provided"; exit 1; fi -if [ -z "$TESTS_DIR" ]; then echo "tempest execution folder must be provided"; exit 1; fi -if [ -z "$PARALLEL_TESTS" ]; then PARALLEL_TESTS=4; fi -if [ -z "$MAX_ATTEMPTS" ]; then MAX_ATTEMPTS=2; fi -if [ -z "$LOG_FILE" ]; then LOG_FILE="/home/ubuntu/tempest/subunit-output.log"; fi -if [ -z "$RESULTS_HTML_FILE" ]; then RESULTS_HTML_FILE="/home/ubuntu/tempest/results.html"; fi - -BASEDIR=$(dirname $0) - -pushd $BASEDIR - -. $BASEDIR/utils.sh - -TESTS_FILE=$(tempfile) - -. $TESTS_DIR/.tox/venv/bin/activate - -$BASEDIR/get-tests.sh $TESTS_DIR $INCLUDE_FILE $EXCLUDE_FILE $ISOLATED_FILE > $TESTS_FILE - -echo "Running tests from: $TESTS_FILE" - -if [ ! -d "$TESTS_DIR/.testrepository" ]; then - push_dir - cd $TESTS_DIR - echo "Initializing testr" - testr init - pop_dir -fi - -$BASEDIR/parallel-test-runner.sh $TESTS_FILE $TESTS_DIR $LOG_FILE \ - $PARALLEL_TESTS $MAX_ATTEMPTS || true - -if [ -f "$ISOLATED_FILE" ]; then - echo "Running isolated tests from: $ISOLATED_FILE" - log_tmp=$(tempfile) - $BASEDIR/parallel-test-runner.sh $ISOLATED_FILE $TESTS_DIR $log_tmp \ - $PARALLEL_TESTS $MAX_ATTEMPTS 1 || true - - cat $log_tmp >> $LOG_FILE - rm $log_tmp -fi - -rm $TESTS_FILE - -deactivate - -echo "Generating HTML report..." -$BASEDIR/get-results-html.sh $LOG_FILE $RESULTS_HTML_FILE - -subunit-stats $LOG_FILE > /dev/null -exit_code=$? - -echo "Total execution time: $SECONDS seconds." - -popd - -exit $exit_code - diff --git a/devstack/bin/run.sh b/devstack/bin/run.sh deleted file mode 100755 index 4ad8ea4..0000000 --- a/devstack/bin/run.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash -set -e - -BASEDIR=$(dirname $0) - -. $BASEDIR/utils.sh - - - - - - -tempest_dir="/opt/stack/tempest" -test_config_dir"$tempest_dir/config -test_logs_dir="$tempest_dir/logs" -subunit_log_file="subunit.log" -html_results_file="results.html" -max_parallel_tests=4 -max_attempts=3 - -project=$(basename $ZUUL_PROJECT) -filters_location="/home/ubuntu/$project-ci/devstack/tests" -include_file="$filters_location/included_tests.txt" -exclude_file="$filters_location/excluded_tests.txt" -isolated_file="$filters_location/isolated_tests.txt" - -$log_dir="/home/ubuntu/tempest" -if [ ! -d $log_dir ]; then mkdir -p $log_dir; fi - -$BASEDIR/run-all-tests.sh --tests-dir $tempest_dir \ - --parallel-tests $max_parallel_tests \ - --max-attempts $max_attempts \ - --log-file "$log_dir/$subunit_log_file" \ - --results-html-file "$log_dir/$html_results_file" \ - --include-file - -- exclude-file - --isolated-file - > $test_logs_dir/out.txt 2> $test_logs_dir/err.txt \ - || has_failed_tests=1 - -subunit-stats --no-passthrough "$log_dir/$subunit_log_file" || true - -<< 'TBD' - copy_devstack_config_files "$test_config_dir/devstack" - - for host_name in ${host_names[@]}; - do - exec_with_retry 15 2 get_win_host_config_files $host_name "$test_config_dir/$host_name" - exec_with_retry 5 0 get_win_system_info_log $host_name "$test_logs_dir/$host_name/systeminfo.log" - exec_with_retry 5 0 get_win_hotfixes_log $host_name "$test_logs_dir/$host_name/hotfixes.log" - exec_with_retry 15 2 get_win_host_log_files $host_name "$test_logs_dir/$host_name" - done - - echo "Removing symlinks from logs" - find "$test_logs_dir/" -type l -delete - echo "Compressing log files" - find "$test_logs_dir/" -name "*.log" -exec gzip {} \; -TBD - -exit $has_failed_tests diff --git a/devstack/bin/subunit2html.py b/devstack/bin/subunit2html.py deleted file mode 100755 index 96c289f..0000000 --- a/devstack/bin/subunit2html.py +++ /dev/null @@ -1,727 +0,0 @@ -#!/usr/bin/python -""" -Utility to convert a subunit stream to an html results file. -Code is adapted from the pyunit Html test runner at -http://tungwaiyip.info/software/HTMLTestRunner.html - -Takes two arguments. First argument is path to subunit log file, second -argument is path of desired output file. Second argument is optional, -defaults to 'results.html'. - -Original HTMLTestRunner License: ------------------------------------------------------------------------- -Copyright (c) 2004-2007, Wai Yip Tung -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -* Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. -* Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. -* Neither the name Wai Yip Tung nor the names of its contributors may be - used to endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS -IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER -OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""" - -import collections -import datetime -import io -import sys -import traceback -from xml.sax import saxutils - -import subunit -import testtools - -__version__ = '0.1' - - -class TemplateData(object): - """ - Define a HTML template for report customerization and generation. - - Overall structure of an HTML report - - HTML - +------------------------+ - | | - | | - | | - | STYLESHEET | - | +----------------+ | - | | | | - | +----------------+ | - | | - | | - | | - | | - | | - | HEADING | - | +----------------+ | - | | | | - | +----------------+ | - | | - | REPORT | - | +----------------+ | - | | | | - | +----------------+ | - | | - | ENDING | - | +----------------+ | - | | | | - | +----------------+ | - | | - | | - | | - +------------------------+ - """ - - STATUS = { - 0: 'pass', - 1: 'fail', - 2: 'error', - 3: 'skip', - } - - DEFAULT_TITLE = 'Unit Test Report' - DEFAULT_DESCRIPTION = '' - - # ------------------------------------------------------------------------ - # HTML Template - - HTML_TMPL = r""" - - - - %(title)s - - - %(stylesheet)s - - - - -%(heading)s -%(report)s -%(ending)s - - - -""" - # variables: (title, generator, stylesheet, heading, report, ending) - - # ------------------------------------------------------------------------ - # Stylesheet - # - # alternatively use a for external style sheet, e.g. - # - - STYLESHEET_TMPL = """ - -""" - - # ------------------------------------------------------------------------ - # Heading - # - - HEADING_TMPL = """
-

%(title)s

-%(parameters)s -

%(description)s

-
- -""" # variables: (title, parameters, description) - - HEADING_ATTRIBUTE_TMPL = """ -

%(name)s: %(value)s

-""" # variables: (name, value) - - # ------------------------------------------------------------------------ - # Report - # - - REPORT_TMPL = """ -

Show -Summary -Failed -All -

- ---------- - - - - - - - - - - -%(test_list)s - - - - - - - - - - -
Test Group/Test caseCountPassFailErrorSkipView
Total%(count)s%(Pass)s%(fail)s%(error)s%(skip)s  
-""" # variables: (test_list, count, Pass, fail, error) - - REPORT_CLASS_TMPL = r""" - - %(desc)s - %(count)s - %(Pass)s - %(fail)s - %(error)s - %(skip)s - Detail - - -""" # variables: (style, desc, count, Pass, fail, error, cid) - - REPORT_TEST_WITH_OUTPUT_TMPL = r""" - -
%(desc)s
- - - - - %(status)s - - - - - - -""" # variables: (tid, Class, style, desc, status) - - REPORT_TEST_NO_OUTPUT_TMPL = r""" - -
%(desc)s
- %(status)s - -""" # variables: (tid, Class, style, desc, status) - - REPORT_TEST_OUTPUT_TMPL = r""" -%(id)s: %(output)s -""" # variables: (id, output) - - # ------------------------------------------------------------------------ - # ENDING - # - - ENDING_TMPL = """
 
""" - -# -------------------- The end of the Template class ------------------- - - -class ClassInfoWrapper(object): - def __init__(self, name, mod): - self.name = name - self.mod = mod - - def __repr__(self): - return "%s" % (self.name) - - -class HtmlOutput(testtools.TestResult): - """Output test results in html.""" - - def __init__(self, html_file='result.html'): - super(HtmlOutput, self).__init__() - self.success_count = 0 - self.failure_count = 0 - self.error_count = 0 - self.skip_count = 0 - self.result = [] - self.html_file = html_file - - def addSuccess(self, test): - self.success_count += 1 - output = test.shortDescription() - if output is None: - output = test.id() - self.result.append((0, test, output, '')) - - def addSkip(self, test, err): - output = test.shortDescription() - if output is None: - output = test.id() - self.skip_count += 1 - self.result.append((3, test, output, '')) - - def addError(self, test, err): - output = test.shortDescription() - if output is None: - output = test.id() - # Skipped tests are handled by SkipTest Exceptions. - #if err[0] == SkipTest: - # self.skip_count += 1 - # self.result.append((3, test, output, '')) - else: - self.error_count += 1 - _exc_str = self.formatErr(err) - self.result.append((2, test, output, _exc_str)) - - def addFailure(self, test, err): - print(test) - self.failure_count += 1 - _exc_str = self.formatErr(err) - output = test.shortDescription() - if output is None: - output = test.id() - self.result.append((1, test, output, _exc_str)) - - def formatErr(self, err): - exctype, value, tb = err - return ''.join(traceback.format_exception(exctype, value, tb)) - - def stopTestRun(self): - super(HtmlOutput, self).stopTestRun() - self.stopTime = datetime.datetime.now() - report_attrs = self._getReportAttributes() - generator = 'subunit2html %s' % __version__ - heading = self._generate_heading(report_attrs) - report = self._generate_report() - ending = self._generate_ending() - output = TemplateData.HTML_TMPL % dict( - title=saxutils.escape(TemplateData.DEFAULT_TITLE), - generator=generator, - stylesheet=TemplateData.STYLESHEET_TMPL, - heading=heading, - report=report, - ending=ending, - ) - if self.html_file: - with open(self.html_file, 'wb') as html_file: - html_file.write(output.encode('utf8')) - - def _getReportAttributes(self): - """Return report attributes as a list of (name, value).""" - status = [] - if self.success_count: - status.append('Pass %s' % self.success_count) - if self.failure_count: - status.append('Failure %s' % self.failure_count) - if self.error_count: - status.append('Error %s' % self.error_count) - if self.skip_count: - status.append('Skip %s' % self.skip_count) - if status: - status = ' '.join(status) - else: - status = 'none' - return [ - ('Status', status), - ] - - def _generate_heading(self, report_attrs): - a_lines = [] - for name, value in report_attrs: - line = TemplateData.HEADING_ATTRIBUTE_TMPL % dict( - name=saxutils.escape(name), - value=saxutils.escape(value), - ) - a_lines.append(line) - heading = TemplateData.HEADING_TMPL % dict( - title=saxutils.escape(TemplateData.DEFAULT_TITLE), - parameters=''.join(a_lines), - description=saxutils.escape(TemplateData.DEFAULT_DESCRIPTION), - ) - return heading - - def _generate_report(self): - rows = [] - sortedResult = self._sortResult(self.result) - for cid, (cls, cls_results) in enumerate(sortedResult): - # subtotal for a class - np = nf = ne = ns = 0 - for n, t, o, e in cls_results: - if n == 0: - np += 1 - elif n == 1: - nf += 1 - elif n == 2: - ne += 1 - else: - ns += 1 - - # format class description - if cls.mod == "__main__": - name = cls.name - else: - name = "%s" % (cls.name) - doc = cls.__doc__ and cls.__doc__.split("\n")[0] or "" - desc = doc and '%s: %s' % (name, doc) or name - - row = TemplateData.REPORT_CLASS_TMPL % dict( - style=(ne > 0 and 'errorClass' or nf > 0 - and 'failClass' or 'passClass'), - desc = desc, - count = np + nf + ne + ns, - Pass = np, - fail = nf, - error = ne, - skip = ns, - cid = 'c%s' % (cid + 1), - ) - rows.append(row) - - for tid, (n, t, o, e) in enumerate(cls_results): - self._generate_report_test(rows, cid, tid, n, t, o, e) - - report = TemplateData.REPORT_TMPL % dict( - test_list=''.join(rows), - count=str(self.success_count + self.failure_count + - self.error_count + self.skip_count), - Pass=str(self.success_count), - fail=str(self.failure_count), - error=str(self.error_count), - skip=str(self.skip_count), - ) - return report - - def _sortResult(self, result_list): - # unittest does not seems to run in any particular order. - # Here at least we want to group them together by class. - rmap = {} - classes = [] - for n, t, o, e in result_list: - if hasattr(t, '_tests'): - for inner_test in t._tests: - self._add_cls(rmap, classes, inner_test, - (n, inner_test, o, e)) - else: - self._add_cls(rmap, classes, t, (n, t, o, e)) - classort = lambda s: str(s) - sortedclasses = sorted(classes, key=classort) - r = [(cls, rmap[str(cls)]) for cls in sortedclasses] - return r - - def _add_cls(self, rmap, classes, test, data_tuple): - if hasattr(test, 'test'): - test = test.test - if test.__class__ == subunit.RemotedTestCase: - #print(test._RemotedTestCase__description.rsplit('.', 1)[0]) - cl = test._RemotedTestCase__description.rsplit('.', 1)[0] - mod = cl.rsplit('.', 1)[0] - cls = ClassInfoWrapper(cl, mod) - else: - cls = ClassInfoWrapper(str(test.__class__), str(test.__module__)) - if not str(cls) in rmap: - rmap[str(cls)] = [] - classes.append(cls) - rmap[str(cls)].append(data_tuple) - - def _generate_report_test(self, rows, cid, tid, n, t, o, e): - # e.g. 'pt1.1', 'ft1.1', etc - # ptx.x for passed/skipped tests and ftx.x for failed/errored tests. - has_output = bool(o or e) - tid = ((n == 0 or n == 3) and - 'p' or 'f') + 't%s.%s' % (cid + 1, tid + 1) - name = t.id().split('.')[-1] - # if shortDescription is not the function name, use it - if t.shortDescription().find(name) == -1: - doc = t.shortDescription() - else: - doc = None - desc = doc and ('%s: %s' % (name, doc)) or name - tmpl = (has_output and TemplateData.REPORT_TEST_WITH_OUTPUT_TMPL - or TemplateData.REPORT_TEST_NO_OUTPUT_TMPL) - - script = TemplateData.REPORT_TEST_OUTPUT_TMPL % dict( - id=tid, - output=saxutils.escape(o + e), - ) - - row = tmpl % dict( - tid=tid, - Class=((n == 0 or n == 3) and 'hiddenRow' or 'none'), - style=(n == 2 and 'errorCase' or - (n == 1 and 'failCase' or 'none')), - desc=desc, - script=script, - status=TemplateData.STATUS[n], - ) - rows.append(row) - if not has_output: - return - - def _generate_ending(self): - return TemplateData.ENDING_TMPL - - def startTestRun(self): - super(HtmlOutput, self).startTestRun() - - -class FileAccumulator(testtools.StreamResult): - - def __init__(self): - super(FileAccumulator, self).__init__() - self.route_codes = collections.defaultdict(io.BytesIO) - - def status(self, **kwargs): - if kwargs.get('file_name') != 'stdout': - return - file_bytes = kwargs.get('file_bytes') - if not file_bytes: - return - route_code = kwargs.get('route_code') - stream = self.route_codes[route_code] - stream.write(file_bytes) - - -def main(): - if len(sys.argv) < 2: - print("Need at least one argument: path to subunit log.") - exit(1) - subunit_file = sys.argv[1] - if len(sys.argv) > 2: - html_file = sys.argv[2] - else: - html_file = 'results.html' - - html_result = HtmlOutput(html_file) - stream = open(subunit_file, 'rb') - - # Feed the subunit stream through both a V1 and V2 parser. - # Depends on having the v2 capable libraries installed. - # First V2. - # Non-v2 content and captured non-test output will be presented as file - # segments called stdout. - suite = subunit.ByteStreamToStreamResult(stream, non_subunit_name='stdout') - # The HTML output code is in legacy mode. - result = testtools.StreamToExtendedDecorator(html_result) - # Divert non-test output - accumulator = FileAccumulator() - result = testtools.StreamResultRouter(result) - result.add_rule(accumulator, 'test_id', test_id=None) - result.startTestRun() - suite.run(result) - # Now reprocess any found stdout content as V1 subunit - for bytes_io in accumulator.route_codes.values(): - bytes_io.seek(0) - suite = subunit.ProtocolTestCase(bytes_io) - suite.run(html_result) - result.stopTestRun() - - -if __name__ == '__main__': - main() diff --git a/devstack/bin/utils.sh b/devstack/bin/utils.sh deleted file mode 100755 index 0cedacd..0000000 --- a/devstack/bin/utils.sh +++ /dev/null @@ -1,105 +0,0 @@ -#!/bin/bash -set -e - -BASEDIR=$(dirname $0) - -function push_dir() { - pushd . > /dev/null -} - -function pop_dir() { - popd > /dev/null -} - -function exec_with_retry () { - local max_retries=$1 - local interval=${2} - local cmd=${@:3} - - local counter=0 - while [ $counter -lt $max_retries ]; do - local exit_code=0 - eval $cmd || exit_code=$? - if [ $exit_code -eq 0 ]; then - return 0 - fi - let counter=counter+1 - - if [ -n "$interval" ]; then - sleep $interval - fi - done - return $exit_code -} - - -function copy_devstack_config_files() { - local dest_dir=$1 - - mkdir -p $dest_dir - - cp -r /etc/ceilometer $dest_dir - cp -r /etc/cinder $dest_dir - cp -r /etc/glance $dest_dir - cp -r /etc/heat $dest_dir - cp -r /etc/keystone $dest_dir - cp -r /etc/nova $dest_dir - cp -r /etc/neutron $dest_dir - cp -r /etc/swift $dest_dir - - mkdir $dest_dir/tempest - check_copy_dir $tempest_dir/etc $dest_dir/tempest -} - -function copy_devstack_log_files() { - local dest_dir=$1 - - mkdir -p $dest_dir - - cp -r /opt/stack/logs $dest_dir - cp -r /etc/cinder $dest_dir - cp -r /etc/glance $dest_dir - cp -r /etc/heat $dest_dir - cp -r /etc/keystone $dest_dir - cp -r /etc/nova $dest_dir - cp -r /etc/neutron $dest_dir - cp -r /etc/swift $dest_dir - - mkdir $dest_dir/tempest - check_copy_dir $tempest_dir/etc $dest_dir/tempest -} - -function copy_devstack_config_files() { - local dest_dir=$1 - - mkdir -p $dest_dir - - cp -r /etc/ceilometer $dest_dir - cp -r /etc/cinder $dest_dir - cp -r /etc/glance $dest_dir - cp -r /etc/heat $dest_dir - cp -r /etc/keystone $dest_dir - cp -r /etc/nova $dest_dir - cp -r /etc/neutron $dest_dir - cp -r /etc/swift $dest_dir - - mkdir $dest_dir/tempest - check_copy_dir $tempest_dir/etc $dest_dir/tempest -} - -function mount_windows_share() { - local host=$1 - local user=$2 - local pass=$3 - local domain=$4 - - mkdir -p /mnt/$host - sudo mount -t cifs //$host/C$ /mnt/ -o username=$user,password=$pass,domain=$domain -} - -function umount_windows_share(){ - local host=$1 - - sudo umount /mnt/$host -} - diff --git a/devstack/tests/cinder/excluded_tests.txt b/devstack/tests/cinder/excluded_tests.txt deleted file mode 100644 index e69de29..0000000 diff --git a/devstack/tests/cinder/included_tests.txt b/devstack/tests/cinder/included_tests.txt deleted file mode 100644 index 4833bbb..0000000 --- a/devstack/tests/cinder/included_tests.txt +++ /dev/null @@ -1 +0,0 @@ -volume diff --git a/devstack/tests/neutron/excluded_tests.txt b/devstack/tests/neutron/excluded_tests.txt deleted file mode 100644 index e69de29..0000000 diff --git a/devstack/tests/neutron/included_tests.txt b/devstack/tests/neutron/included_tests.txt deleted file mode 100644 index 60e7f84..0000000 --- a/devstack/tests/neutron/included_tests.txt +++ /dev/null @@ -1 +0,0 @@ -tempest.api.network diff --git a/devstack/tests/nova/excluded_tests.txt b/devstack/tests/nova/excluded_tests.txt deleted file mode 100644 index bd268d1..0000000 --- a/devstack/tests/nova/excluded_tests.txt +++ /dev/null @@ -1,30 +0,0 @@ -# Hyper-V does not support attaching vNics to a running instance before Threshold -# On Threshold it is supported, requiring Generation 2 -tempest.api.compute.servers.test_attach_interfaces.AttachInterfacesTestJSON.test_create_list_show_delete_interfaces -tempest.api.compute.servers.test_attach_interfaces.AttachInterfacesTestXML.test_create_list_show_delete_interfaces -tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_hotplug_nic - -# Unsupported consoles (Hyper-V uses RDP, not VNC or SPICE) -tempest.api.compute.v3.servers.test_server_actions.ServerActionsV3Test.test_get_spice_console -tempest.api.compute.v3.servers.test_server_actions.ServerActionsV3Test.test_get_vnc_console - -# See Neutron bug https://bugs.launchpad.net/neutron/+bug/1277285 -# Note that corresponding JSON tests pass -tempest.api.network.admin.test_dhcp_agent_scheduler.DHCPAgentSchedulersTestXML.test_add_remove_network_from_dhcp_agent -tempest.api.network.admin.test_l3_agent_scheduler.L3AgentSchedulerTestXML. -tempest.api.network.admin.test_agent_management.AgentManagementTestXML. - -# See Tempest bug: https://bugs.launchpad.net/tempest/+bug/1363986 -tempest.scenario.test_security_groups_basic_ops.TestSecurityGroupsBasicOps.test_cross_tenant_traffic -tempest.scenario.test_security_groups_basic_ops.TestSecurityGroupsBasicOps.test_multiple_security_groups - -# Fails on DevStack. Not related to Hyper-V -tempest.scenario.test_load_balancer_basic.TestLoadBalancerBasic.test_load_balancer_basic - -# Fails on DevStack. requires investigation. -tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_rebuild - -# Fails on DevStack. requires investigation. -# Note that corresponding XML tests pass -tempest.api.compute.admin.test_simple_tenant_usage.TenantUsagesTestJSON.test_get_usage_tenant -tempest.api.compute.admin.test_simple_tenant_usage.TenantUsagesTestJSON.test_get_usage_tenant_with_non_admin_user diff --git a/devstack/tests/nova/included_tests.txt b/devstack/tests/nova/included_tests.txt deleted file mode 100644 index df80fb2..0000000 --- a/devstack/tests/nova/included_tests.txt +++ /dev/null @@ -1 +0,0 @@ -tempest. diff --git a/devstack/tests/nova/isolated_tests.txt b/devstack/tests/nova/isolated_tests.txt deleted file mode 100644 index 3d77ec2..0000000 --- a/devstack/tests/nova/isolated_tests.txt +++ /dev/null @@ -1,38 +0,0 @@ -tempest.api.compute.admin.test_migrations.MigrationsAdminTest.test_list_migrations_in_flavor_resize_situation -tempest.api.compute.admin.test_servers_negative.ServersAdminNegativeTestJSON.test_resize_server_using_overlimit_ram -tempest.api.compute.admin.test_servers_negative.ServersAdminNegativeTestJSON.test_resize_server_using_overlimit_vcpus -tempest.api.compute.admin.test_servers_negative.ServersAdminNegativeTestXML.test_resize_server_using_overlimit_ram -tempest.api.compute.admin.test_servers_negative.ServersAdminNegativeTestXML.test_resize_server_using_overlimit_vcpus -tempest.api.compute.servers.test_delete_server.DeleteServersTestJSON.test_delete_server_while_in_verify_resize_state -tempest.api.compute.servers.test_delete_server.DeleteServersTestXML.test_delete_server_while_in_verify_resize_state -tempest.api.compute.servers.test_disk_config.ServerDiskConfigTestJSON.test_resize_server_from_auto_to_manual -tempest.api.compute.servers.test_disk_config.ServerDiskConfigTestJSON.test_resize_server_from_manual_to_auto -tempest.api.compute.servers.test_disk_config.ServerDiskConfigTestXML.test_resize_server_from_auto_to_manual -tempest.api.compute.servers.test_disk_config.ServerDiskConfigTestXML.test_resize_server_from_manual_to_auto -tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_resize_server_confirm -tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_resize_server_confirm_from_stopped -tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_resize_server_revert -tempest.api.compute.servers.test_server_actions.ServerActionsTestXML.test_resize_server_confirm -tempest.api.compute.servers.test_server_actions.ServerActionsTestXML.test_resize_server_confirm_from_stopped -tempest.api.compute.servers.test_server_actions.ServerActionsTestXML.test_resize_server_revert -tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_resize_nonexistent_server -tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_resize_server_with_non_existent_flavor -tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_resize_server_with_null_flavor -tempest.api.compute.servers.test_servers_negative.ServersNegativeTestXML.test_resize_nonexistent_server -tempest.api.compute.servers.test_servers_negative.ServersNegativeTestXML.test_resize_server_with_non_existent_flavor -tempest.api.compute.servers.test_servers_negative.ServersNegativeTestXML.test_resize_server_with_null_flavor -tempest.api.compute.test_authorization.AuthorizationTestJSON.test_resize_server_for_alt_account_fails -tempest.api.compute.test_authorization.AuthorizationTestXML.test_resize_server_for_alt_account_fails -tempest.api.compute.v3.admin.test_migrations.MigrationsAdminV3Test.test_list_migrations_in_flavor_resize_situation -tempest.api.compute.v3.admin.test_servers_negative.ServersAdminNegativeV3Test.test_resize_server_using_overlimit_ram -tempest.api.compute.v3.admin.test_servers_negative.ServersAdminNegativeV3Test.test_resize_server_using_overlimit_vcpus -tempest.api.compute.v3.servers.test_delete_server.DeleteServersV3Test.test_delete_server_while_in_verify_resize_state -tempest.api.compute.v3.servers.test_server_actions.ServerActionsV3Test.test_resize_server_confirm -tempest.api.compute.v3.servers.test_server_actions.ServerActionsV3Test.test_resize_server_confirm_from_stopped -tempest.api.compute.v3.servers.test_server_actions.ServerActionsV3Test.test_resize_server_revert -tempest.api.compute.v3.servers.test_servers_negative.ServersNegativeV3Test.test_resize_nonexistent_server -tempest.api.compute.v3.servers.test_servers_negative.ServersNegativeV3Test.test_resize_server_with_non_existent_flavor -tempest.api.compute.v3.servers.test_servers_negative.ServersNegativeV3Test.test_resize_server_with_null_flavor -tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_resize -tempest.scenario.test_server_advanced_ops.TestServerAdvancedOps.test_resize_server_confirm - diff --git a/infra/deployer/BundleGenerator.py b/infra/deployer/BundleGenerator.py deleted file mode 100644 index 109c2b6..0000000 --- a/infra/deployer/BundleGenerator.py +++ /dev/null @@ -1,116 +0,0 @@ - -class BundleGenerator(object): - _AD_GIT_URL = 'https://github.com/cloudbase/active-directory.git' - _DEVSTACK_GIT_URL = 'https://github.com/cloudbase/devstack-charm.git' - _HYPER_V_GIT_URL = 'https://github.com/cloudbase/hyperv-charm' - - def __init__(self, options): - self.options = options - - def _get_non_null_values(self, dictionary): - return dict((key, value) for key, value in dictionary.iteritems() - if value is not None) - - def _get_service(self, git_url, charm, nr_units, options): - return {'branch': git_url, - 'charm': charm, - 'num_units': nr_units, - 'options': self._get_non_null_values(options)} - - def _get_ad_service(self, nr_units, domain_name, admin_password, - admin_username=None): - ad_options = {'domain-name': domain_name, - 'administrator': admin_username, - 'password': admin_password} - return self._get_service(self._AD_GIT_URL, - 'local:win2012r2/active-directory', - nr_units, ad_options) - - def _get_hyper_v_service(self, nr_units, download_mirror, extra_python_packages=None, - git_user_email=None, git_user_name=None, wheel_mirror=None, - ppy_mirror=None, vmswitch_name=None, vmswitch_management=None, - ad_user_name=None, enable_freerdp_console=None): - hyper_v_options = {'download-mirror': download_mirror, - 'extra-python-packages': extra_python_packages, - 'git-user-email': git_user_email, - 'git-user-name': git_user_name, - 'wheel-mirror': wheel_mirror, - 'ppy-mirror': ppy_mirror, - 'vmswitch-name': vmswitch_name, - 'vmswitch-management': vmswitch_management, - 'ad-user-name': ad_user_name, - 'enable-freerdp-console': enable_freerdp_console} - return self._get_service(self._HYPER_V_GIT_URL, - 'local:win2012hvr2/hyper-v-ci', - nr_units, hyper_v_options) - - def _get_devstack_service(self, nr_units, vlan_range, heat_image_url, test_image_url, - disabled_services=None, enable_plugins=None, - enabled_services=None, extra_packages=None, - extra_python_packages=None): - devstack_options = {'disabled-services': disabled_services, - 'enable-plugin': enable_plugins, - 'enabled-services': enabled_services, - 'extra-packages': extra_packages, - 'extra-python-packages': extra_python_packages, - 'heat-image-url': heat_image_url, - 'test-image-url': test_image_url, - 'vlan-range': vlan_range} - return self._get_service(self._DEVSTACK_GIT_URL, 'local:trusty/devstack', - nr_units, devstack_options) - - def _get_overrides_options(self, data_ports, external_ports, zuul_branch, zuul_change, - zuul_project, zuul_ref, zuul_url): - return {'data-port': data_ports, - 'external-port': external_ports, - 'zuul-branch': zuul_branch, - 'zuul-change': zuul_change, - 'zuul-project': zuul_project, - 'zuul-ref': zuul_ref, - 'zuul-url': zuul_url} - - def nova_bundle(self): - overrides_options = self._get_overrides_options(self.options.data_ports, - self.options.external_ports, self.options.zuul_branch, - self.options.zuul_change, self.options.zuul_project, - self.options.zuul_ref, self.options.zuul_url) - - hyper_v_service = self._get_hyper_v_service( - nr_units=self.options.nr_hyper_v_units, - download_mirror='http://64.119.130.115/bin', - extra_python_packages=self.options.hyper_v_extra_python_packages, - git_user_email='hyper-v_ci@microsoft.com', - git_user_name='Hyper-V CI', - wheel_mirror='http://64.119.130.115/wheels') - - devstack_service = self._get_devstack_service( - nr_units=self.options.nr_devstack_units, - disabled_services=self.options.devstack_disabled_services, - enable_plugins=self.options.devstack_enabled_plugins, - enabled_services=self.options.devstack_enabled_services, - extra_packages=self.options.devstack_extra_packages, - extra_python_packages=self.options.devstack_extra_python_packages, - heat_image_url='http://10.255.251.230/Fedora.vhdx', - test_image_url='http://10.255.251.230/cirros.vhdx', - vlan_range=self.options.vlan_range) - - hyper_v_service_name = 'hyper-v-ci-%s' % self.options.zuul_uuid - devstack_service_name = 'devstack-%s' % self.options.zuul_uuid - bundle_content = { - 'nova': {'overrides': overrides_options, - 'relations': [[devstack_service_name, hyper_v_service_name]], - 'services': {devstack_service_name: devstack_service, - hyper_v_service_name: hyper_v_service} } - } - - if self.options.nr_ad_units > 0: - ad_charm = self._get_ad_service( - nr_units=self.options.nr_ad_units, - domain_name=self.options.ad_domain_name, - admin_password=self.options.ad_admin_password) - ad_charm_dict = {'active-directory': ad_charm} - bundle_content['nova']['relations'].append([hyper_v_service_name, - 'active-directory']) - bundle_content['nova']['services'].update(ad_charm_dict) - - return bundle_content diff --git a/infra/logs/collect_logs.sh b/infra/logs/collect_logs.sh deleted file mode 100644 index e69de29..0000000 diff --git a/infra/logs/utils.sh b/infra/logs/utils.sh deleted file mode 100644 index 613756f..0000000 --- a/infra/logs/utils.sh +++ /dev/null @@ -1,86 +0,0 @@ -function run_wsman_cmd() { - local host=$1 - local cmd=$2 - $BASEDIR/wsmancmd.py -u $win_user -p $win_password -U https://$1:5986/wsman $cmd -} - -function get_win_files() { - local host=$1 - local remote_dir=$2 - local local_dir=$3 - smbclient "//$host/C\$" -c "lcd $local_dir; cd $remote_dir; prompt; mget *" -U "$win_user%$win_password" -} - -function run_wsman_ps() { - local host=$1 - local cmd=$2 - run_wsman_cmd $host "powershell -NonInteractive -ExecutionPolicy RemoteSigned -Command $cmd" -} - -function get_win_hotfixes() { - local host=$1 - run_wsman_cmd $host "wmic qfe list" -} - -function get_win_system_info() { - local host=$1 - run_wsman_cmd $host "systeminfo" -} - -function get_win_time() { - local host=$1 - # Seconds since EPOCH - host_time=`run_wsman_ps $host "[Math]::Truncate([double]::Parse((Get-Date (get-date).ToUniversalTime() -UFormat %s)))" 2>&1` - # Skip the newline - echo ${host_time::-1} -} - -function get_win_hotfixes_log() { - local win_host=$1 - local log_file=$2 - echo "Getting hotfixes details for host: $win_host" - get_win_hotfixes $win_host > $log_file -} - -function get_win_system_info_log() { - local win_host=$1 - local log_file=$2 - echo "Getting system info for host: $win_host" - get_win_system_info $win_host > $log_file -} - -function get_win_host_log_files() { - local host_name=$1 - local local_dir=$2 - get_win_files $host_name "$host_logs_dir" $local_dir -} - -function get_win_host_config_files() { - local host_name=$1 - local local_dir=$2 - mkdir -p $local_dir - - get_win_files $host_name $host_config_dir $local_dir -} - -function check_host_time() { - local host1=$1 - local host2=$2 - host1_time=`get_win_time $host1` - host2_time=`get_win_time $host2` - local_time=`date +%s` - - local delta1=$((local_time - host1_time)) - local delta2=$((local_time - host2_time)) - if [ ${delta1#-} -gt 120 ]; - then - echo "Host $host1 time offset compared to this host is too high: $delta" - return 1 - fi - if [ ${delta2#-} -gt 120 ]; - then - echo "Host $host2 time offset compared to this host is too high: $delta" - return 1 - fi - return 0 -} From 88c15e93343e5fd38d0200bc11acaa00ffc2e4a4 Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Wed, 29 Mar 2017 14:31:23 +0000 Subject: [PATCH 03/22] Added nova tests. Now the config variables are sources inside common-job-script --- scripts/job/.neutron-ovs-config.sh.swp | Bin 12288 -> 0 bytes scripts/{job => jobs}/common-job-script.sh | 13 ++++--- scripts/{job => jobs}/neutron-ovs-config.sh | 0 scripts/{job => jobs}/ovs-config.sh | 0 tests/nova/excluded_tests.txt | 22 +++++++++++ tests/nova/included_tests.txt | 2 + tests/nova/isolated_tests.txt | 40 ++++++++++++++++++++ 7 files changed, 71 insertions(+), 6 deletions(-) delete mode 100644 scripts/job/.neutron-ovs-config.sh.swp rename scripts/{job => jobs}/common-job-script.sh (99%) rename scripts/{job => jobs}/neutron-ovs-config.sh (100%) rename scripts/{job => jobs}/ovs-config.sh (100%) create mode 100644 tests/nova/excluded_tests.txt create mode 100644 tests/nova/included_tests.txt create mode 100644 tests/nova/isolated_tests.txt diff --git a/scripts/job/.neutron-ovs-config.sh.swp b/scripts/job/.neutron-ovs-config.sh.swp deleted file mode 100644 index 0f64e250ece69aa6bf0f17197c5cf92269993b6e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12288 zcmeI2&yUUCtVJETj@ozocYds&(EKHN6~7%48MQ;&GFn%30(IG zdHwS5AKtuwhy3+|knM6S&DMVL|JNCp=R)e4uGx;yF1Zw}4x?$#Pkq5)Elg0hP1DL* zDFR(8(HR|_waY;Atx+JiJ3T8~xe{w%Z>EJ%K>|qN1_bVsB#uIx_|SWB{OKppZcqbL zkN^@u0!RP}AOR$R1dsp{xUB?CcSL@M=HEMP;N!!5G@1_|aY6z}00|%gB!C2v01`j~ zNB{{S0VIF~ZXp3y5%T*TLO%Zhnuq`Y|M&O*KXCI0j8}IF`RyY@eu1%qaRlSZhlI>w z+=uZerbZ6TWV!YhSRt)A7)aBckErn>Zqy@hWfvDm75;cblRXb?o9ABTSAM=8z1EDYY2 zy5a1msc>|%fWw%E)iqbwF4}fn?)uHmHdh^2yIJg?`q8Z9%7~TVmEUOLkqaS_6Kh~F;K>Y`$X6_OhudA{(h2_irC!yryP(kb2*oodf` z2?qO4ZNXps61FN?U39GiF$`%qr)ff?gywTvhR2R_N|&y}PH8w)Qd-5750!+jBAQN= z%vF{NxYSu|c|s#s;f&5*1uCDqO1`A4p_0-lc6YH?rgcsxSx!Iutb7=gP!_st0kKhs zkd=*h!A3M7W<`@>Wl)wbv3(sU2B@avXOU-a);?xTBcr2T0K<5+I)k3!CuW`WZdYW^ zlghyru3}v=RC-qomEIK-C36+)ije}Ya24wcSFx^e73+$j0zAPL6J=WGWD?`_v#*m6 zV|rJ(igm?M*+0$EF03m?G2n`k0 Date: Wed, 29 Mar 2017 14:37:00 +0000 Subject: [PATCH 04/22] changed git repo to clone --- scripts/jobs/common-job-script.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/jobs/common-job-script.sh b/scripts/jobs/common-job-script.sh index 464f126..4d65e8f 100755 --- a/scripts/jobs/common-job-script.sh +++ b/scripts/jobs/common-job-script.sh @@ -45,7 +45,7 @@ build_exit_code=$? source $WORKSPACE/nodes exec_with_retry 5 2 ssh -tt -o 'PasswordAuthentication=no' -o 'StrictHostKeyChecking=no' -o 'UserKnownHostsFile=/dev/null' -i $JUJU_SSH_KEY ubuntu@$DEVSTACK \ - "git clone https://github.com/cloudbase/common-ci.git /home/ubuntu/common-ci" + "git clone https://github.com/capsali/common-ci.git /home/ubuntu/common-ci" clone_exit_code=$? exec_with_retry 5 2 ssh -tt -o 'PasswordAuthentication=no' -o 'StrictHostKeyChecking=no' -o 'UserKnownHostsFile=/dev/null' -i $JUJU_SSH_KEY ubuntu@$DEVSTACK \ From 41f8757bd95cf693cfe402953d741783b361ccb5 Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Wed, 29 Mar 2017 15:56:23 +0000 Subject: [PATCH 05/22] minor adjustments --- scripts/jobs/common-job-script.sh | 10 ++++------ scripts/jobs/ovs-config.sh | 2 +- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/scripts/jobs/common-job-script.sh b/scripts/jobs/common-job-script.sh index 4d65e8f..d7d1065 100755 --- a/scripts/jobs/common-job-script.sh +++ b/scripts/jobs/common-job-script.sh @@ -113,18 +113,16 @@ wget http://10.20.1.3:8080/job/$JOB_NAME/$BUILD_ID/consoleText -O $LOG_DIR/conso find $LOG_DIR -name "*.log" -exec gzip {} \; -tar -zcf $LOG_DIR/aggregate.tar.gz $LOG_DIR +pushd $LOG_DIR +tar -zcf aggregate.tar.gz . +popd +#tar -zcf $LOG_DIR/aggregate.tar.gz $LOG_DIR if [ $project == "ovs" ]; then if [ ! $UUID ]; then exit 1 fi REMOTE_LOG_PATH="/srv/logs/ovs/tempest-run/$UUID" -elif [ $network_type == "ovs" ]; then - if [ ! $project ] || [ ! $ZUUL_CHANGE ] || [ ! $ZUUL_PATCHSET ]; then - exit 1 - fi - REMOTE_LOG_PATH="/srv/logs/${project}-ovs/$ZUUL_CHANGE/$ZUUL_PATCHSET" else if [ ! $project ] || [ ! $ZUUL_CHANGE ] || [ ! $ZUUL_PATCHSET ]; then exit 1 diff --git a/scripts/jobs/ovs-config.sh b/scripts/jobs/ovs-config.sh index f5f10da..67179d3 100644 --- a/scripts/jobs/ovs-config.sh +++ b/scripts/jobs/ovs-config.sh @@ -2,7 +2,7 @@ CI_CREDS="ovs-creds.yaml" test_signing="true" data_port="E4:1D:2D:22:A0:30 E4:1D:2D:22:A6:30 E4:1D:2D:22:A1:E0 24:8A:07:77:3D:00" external_port="18:A9:05:58:F7:76 00:23:7D:D2:CF:02 00:23:7D:D2:D8:D2 00:23:7D:D2:D8:72" -zuul_branch="master" +ZUUL_BRANCH="master" prep_project="False" os_data_network="10.12.3.0/24" hyperv_cherry_picks="https://review.openstack.org/openstack/neutron|refs/changes/41/417141/2|master" From df97448a5cc7a85ef3eb4a7c5920454c51fac13d Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Wed, 29 Mar 2017 16:32:05 +0000 Subject: [PATCH 06/22] added ipv6 flag in the bundle --- scripts/jobs/neutron-ovs-config.sh | 1 + scripts/jobs/ovs-config.sh | 1 + 2 files changed, 2 insertions(+) diff --git a/scripts/jobs/neutron-ovs-config.sh b/scripts/jobs/neutron-ovs-config.sh index b1d02e4..091530a 100644 --- a/scripts/jobs/neutron-ovs-config.sh +++ b/scripts/jobs/neutron-ovs-config.sh @@ -6,6 +6,7 @@ prep_project="True" os_data_network="10.21.2.0/23" hyperv_cherry_picks="https://review.openstack.org/openstack/neutron|refs/changes/41/417141/2|master" devstack_cherry_picks="https://git.openstack.org/openstack/tempest|refs/changes/49/383049/13|master,https://git.openstack.org/openstack/tempest|refs/changes/28/384528/9|master" +disable_ipv6="false" win_user="Administrator" win_password="Passw0rd" ovs_installer="http://10.20.1.14:8080/openvswitch-hyperv-2.6.1-certified.msi" diff --git a/scripts/jobs/ovs-config.sh b/scripts/jobs/ovs-config.sh index 67179d3..0abe992 100644 --- a/scripts/jobs/ovs-config.sh +++ b/scripts/jobs/ovs-config.sh @@ -7,6 +7,7 @@ prep_project="False" os_data_network="10.12.3.0/24" hyperv_cherry_picks="https://review.openstack.org/openstack/neutron|refs/changes/41/417141/2|master" devstack_cherry_picks="https://git.openstack.org/openstack/tempest|refs/changes/49/383049/13|master,https://git.openstack.org/openstack/tempest|refs/changes/28/384528/9|master" +disable_ipv6="false" win_user="Administrator" win_password="Passw0rd" ovs_installer="http://10.20.1.14:8080/ovs/$UUID/OpenvSwitch.msi" From b42ee536d586c31a4f4ece0ecdde72d1f007f428 Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Wed, 29 Mar 2017 16:36:00 +0000 Subject: [PATCH 07/22] minor changes --- templates/bundle.template | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/templates/bundle.template b/templates/bundle.template index ad76dda..bd9a547 100644 --- a/templates/bundle.template +++ b/templates/bundle.template @@ -29,6 +29,7 @@ services: extra-python-packages: "git+https://github.com/petrutlucian94/pywinrm.git lxml==3.6.4" heat-image-url: ${heat_image_url} test-image-url: ${test_image_url} + disable-ipv6: ${disable_ipv6} ml2-mechanism: openvswitch tenant-network-type: vxlan enable-tunneling: True @@ -76,4 +77,4 @@ services: zuul-project: ${ZUUL_PROJECT} zuul-ref: ${ZUUL_REF} zuul-url: ${ZUUL_URL} - pip-version: "pip==8.1.1" \ No newline at end of file + pip-version: "pip==8.1.1" From af5cdf9853d66b35c7f6be81aa57158a2821d69e Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Wed, 29 Mar 2017 17:35:30 +0000 Subject: [PATCH 08/22] fixed typo in file naming --- scripts/jobs/common-job-script.sh | 4 ++-- scripts/logs/{collect_logs.sh => collect-logs.sh} | 0 tests/neutron-ovs/{excluded_tests.txt => excluded-tests.txt} | 0 tests/neutron-ovs/{included_tests.txt => included-tests.txt} | 0 tests/neutron-ovs/{isolated_tests.txt => isolated-tests.txt} | 0 tests/nova/{excluded_tests.txt => excluded-tests.txt} | 0 tests/nova/{included_tests.txt => included-tests.txt} | 0 tests/nova/{isolated_tests.txt => isolated-tests.txt} | 0 tests/ovs/{excluded_tests.txt => excluded-tests.txt} | 0 tests/ovs/{included_tests.txt => included-tests.txt} | 0 tests/ovs/{isolated_tests.txt => isolated-tests.txt} | 0 11 files changed, 2 insertions(+), 2 deletions(-) rename scripts/logs/{collect_logs.sh => collect-logs.sh} (100%) rename tests/neutron-ovs/{excluded_tests.txt => excluded-tests.txt} (100%) rename tests/neutron-ovs/{included_tests.txt => included-tests.txt} (100%) rename tests/neutron-ovs/{isolated_tests.txt => isolated-tests.txt} (100%) rename tests/nova/{excluded_tests.txt => excluded-tests.txt} (100%) rename tests/nova/{included_tests.txt => included-tests.txt} (100%) rename tests/nova/{isolated_tests.txt => isolated-tests.txt} (100%) rename tests/ovs/{excluded_tests.txt => excluded-tests.txt} (100%) rename tests/ovs/{included_tests.txt => included-tests.txt} (100%) rename tests/ovs/{isolated_tests.txt => isolated-tests.txt} (100%) diff --git a/scripts/jobs/common-job-script.sh b/scripts/jobs/common-job-script.sh index d7d1065..3f7c93f 100755 --- a/scripts/jobs/common-job-script.sh +++ b/scripts/jobs/common-job-script.sh @@ -59,8 +59,8 @@ if [[ $build_exit_code -eq 0 ]]; then exec_with_retry 5 2 ssh -tt -o 'PasswordAuthentication=no' -o 'StrictHostKeyChecking=no' -o 'UserKnownHostsFile=/dev/null' -i $JUJU_SSH_KEY ubuntu@$DEVSTACK \ "mkdir -p /home/ubuntu/tempest" ssh -tt -o 'PasswordAuthentication=no' -o 'StrictHostKeyChecking=no' -o 'UserKnownHostsFile=/dev/null' -i $JUJU_SSH_KEY ubuntu@$DEVSTACK \ - "/home/ubuntu/common-ci/scripts/bin/run-all-tests.sh --include-file /home/ubuntu/common-ci/tests/$project/included_tests.txt \ - --exclude-file /home/ubuntu/common-ci/tests/$project/excluded_tests.txt --isolated-file /home/ubuntu/common-ci/tests/$project/isolated_tests.txt \ + "/home/ubuntu/common-ci/scripts/bin/run-all-tests.sh --include-file /home/ubuntu/common-ci/tests/$project/included-tests.txt \ + --exclude-file /home/ubuntu/common-ci/tests/$project/excluded-tests.txt --isolated-file /home/ubuntu/common-ci/tests/$project/isolated-tests.txt \ --tests-dir /opt/stack/tempest --parallel-tests 10 --max-attempts 2" tests_exit_code=$? fi diff --git a/scripts/logs/collect_logs.sh b/scripts/logs/collect-logs.sh similarity index 100% rename from scripts/logs/collect_logs.sh rename to scripts/logs/collect-logs.sh diff --git a/tests/neutron-ovs/excluded_tests.txt b/tests/neutron-ovs/excluded-tests.txt similarity index 100% rename from tests/neutron-ovs/excluded_tests.txt rename to tests/neutron-ovs/excluded-tests.txt diff --git a/tests/neutron-ovs/included_tests.txt b/tests/neutron-ovs/included-tests.txt similarity index 100% rename from tests/neutron-ovs/included_tests.txt rename to tests/neutron-ovs/included-tests.txt diff --git a/tests/neutron-ovs/isolated_tests.txt b/tests/neutron-ovs/isolated-tests.txt similarity index 100% rename from tests/neutron-ovs/isolated_tests.txt rename to tests/neutron-ovs/isolated-tests.txt diff --git a/tests/nova/excluded_tests.txt b/tests/nova/excluded-tests.txt similarity index 100% rename from tests/nova/excluded_tests.txt rename to tests/nova/excluded-tests.txt diff --git a/tests/nova/included_tests.txt b/tests/nova/included-tests.txt similarity index 100% rename from tests/nova/included_tests.txt rename to tests/nova/included-tests.txt diff --git a/tests/nova/isolated_tests.txt b/tests/nova/isolated-tests.txt similarity index 100% rename from tests/nova/isolated_tests.txt rename to tests/nova/isolated-tests.txt diff --git a/tests/ovs/excluded_tests.txt b/tests/ovs/excluded-tests.txt similarity index 100% rename from tests/ovs/excluded_tests.txt rename to tests/ovs/excluded-tests.txt diff --git a/tests/ovs/included_tests.txt b/tests/ovs/included-tests.txt similarity index 100% rename from tests/ovs/included_tests.txt rename to tests/ovs/included-tests.txt diff --git a/tests/ovs/isolated_tests.txt b/tests/ovs/isolated-tests.txt similarity index 100% rename from tests/ovs/isolated_tests.txt rename to tests/ovs/isolated-tests.txt From b5cb8462b028b29349ff0cc4646425ee40b11ce7 Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Wed, 29 Mar 2017 19:55:01 +0000 Subject: [PATCH 09/22] changed template to reflect new config options --- templates/bundle.template | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/templates/bundle.template b/templates/bundle.template index bd9a547..cd2f581 100644 --- a/templates/bundle.template +++ b/templates/bundle.template @@ -29,7 +29,7 @@ services: extra-python-packages: "git+https://github.com/petrutlucian94/pywinrm.git lxml==3.6.4" heat-image-url: ${heat_image_url} test-image-url: ${test_image_url} - disable-ipv6: ${disable_ipv6} +# ip-version: ${ip_version} ml2-mechanism: openvswitch tenant-network-type: vxlan enable-tunneling: True From 54f49f1ce5a2de4bd1dec11f9497631cd2a7b59e Mon Sep 17 00:00:00 2001 From: capsali Date: Thu, 30 Mar 2017 15:20:41 +0300 Subject: [PATCH 10/22] Added get-isolated-tests.sh script to be able to correctly run the isolated tests --- scripts/bin/get-isolated-tests.sh | 40 +++++++++++++++++++++++++++++++ scripts/bin/run-all-tests.sh | 8 +++++-- 2 files changed, 46 insertions(+), 2 deletions(-) create mode 100755 scripts/bin/get-isolated-tests.sh diff --git a/scripts/bin/get-isolated-tests.sh b/scripts/bin/get-isolated-tests.sh new file mode 100755 index 0000000..a65a6c7 --- /dev/null +++ b/scripts/bin/get-isolated-tests.sh @@ -0,0 +1,40 @@ +#!/bin/bash +set -e + +array_to_regex() +{ + local ar=(${@}) + local regex="" + + for s in "${ar[@]}" + do + if [ "$regex" ]; then + regex+="\\|" + fi + regex+="^"$(echo $s | sed -e 's/[]\/$*.^|[]/\\&/g') + done + echo $regex +} + +tests_dir=$1 + +isolated_tests_file=$2 +exclude_tests_file=$3 + +if [ -f "$exclude_tests_file" ]; then + exclude_tests=(`awk 'NF && $1!~/^#/' $exclude_tests_file`) +fi + +if [ -f "$isolated_tests_file" ]; then + isolated_tests=(`awk 'NF && $1!~/^#/' $isolated_tests_file`) +fi + +exclude_regex=$(array_to_regex ${exclude_tests[@]}) +include_regex=$(array_to_regex ${isolated_tests[@]}) + +if [ ! "$exclude_regex" ]; then + exclude_regex='^$' +fi + +cd $tests_dir +testr list-tests | grep $include_regex | grep -v $exclude_regex diff --git a/scripts/bin/run-all-tests.sh b/scripts/bin/run-all-tests.sh index f5edd72..ea4cb65 100755 --- a/scripts/bin/run-all-tests.sh +++ b/scripts/bin/run-all-tests.sh @@ -56,6 +56,7 @@ if [ -z "$LOG_FILE" ]; then LOG_FILE="/home/ubuntu/tempest/subunit-output.log"; if [ -z "$RESULTS_HTML_FILE" ]; then RESULTS_HTML_FILE="/home/ubuntu/tempest/results.html"; fi BASEDIR=$(dirname $0) +SUBUNIT_STATS="/home/ubuntu/tempest/subunit_stats.log" pushd $BASEDIR @@ -82,11 +83,14 @@ $BASEDIR/parallel-test-runner.sh $TESTS_FILE $TESTS_DIR $LOG_FILE \ if [ -f "$ISOLATED_FILE" ]; then echo "Running isolated tests from: $ISOLATED_FILE" + isolated_tests_file=$(tempfile) + $BASEDIR/get-isolated-tests.sh $TESTS_DIR $ISOLATED_FILE $EXCLUDE_FILE > $isolated_tests_file log_tmp=$(tempfile) - $BASEDIR/parallel-test-runner.sh $ISOLATED_FILE $TESTS_DIR $log_tmp \ + $BASEDIR/parallel-test-runner.sh $isolated_tests_file $TESTS_DIR $log_tmp \ $PARALLEL_TESTS $MAX_ATTEMPTS 1 || true cat $log_tmp >> $LOG_FILE + rm $isolated_tests_file rm $log_tmp fi @@ -97,7 +101,7 @@ deactivate echo "Generating HTML report..." $BASEDIR/get-results-html.sh $LOG_FILE $RESULTS_HTML_FILE -subunit-stats $LOG_FILE > /dev/null +subunit-stats $LOG_FILE > $SUBUNIT_STATS #/dev/null exit_code=$? echo "Total execution time: $SECONDS seconds." From 2e8c1a6f2687260e85a382710bbcb9bd85040dde Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Thu, 30 Mar 2017 14:52:32 +0000 Subject: [PATCH 11/22] Added tempest-output.log file and group all tempest logs into tempest directory --- scripts/bin/run-all-tests.sh | 3 +++ scripts/logs/utils.sh | 7 +++++-- templates/bundle.template | 5 +---- tests/neutron-ovs/included-tests.txt | 1 - 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/scripts/bin/run-all-tests.sh b/scripts/bin/run-all-tests.sh index ea4cb65..9575266 100755 --- a/scripts/bin/run-all-tests.sh +++ b/scripts/bin/run-all-tests.sh @@ -57,6 +57,7 @@ if [ -z "$RESULTS_HTML_FILE" ]; then RESULTS_HTML_FILE="/home/ubuntu/tempest/res BASEDIR=$(dirname $0) SUBUNIT_STATS="/home/ubuntu/tempest/subunit_stats.log" +TEMPEST_OUTPUT="/home/ubuntu/tempest/tempest-output.log" pushd $BASEDIR @@ -101,6 +102,8 @@ deactivate echo "Generating HTML report..." $BASEDIR/get-results-html.sh $LOG_FILE $RESULTS_HTML_FILE +cat $LOG_FILE | subunit-trace -n -f > $TEMPEST_OUTPUT 2>&1 || true + subunit-stats $LOG_FILE > $SUBUNIT_STATS #/dev/null exit_code=$? diff --git a/scripts/logs/utils.sh b/scripts/logs/utils.sh index 1f782ae..1fc2119 100644 --- a/scripts/logs/utils.sh +++ b/scripts/logs/utils.sh @@ -182,10 +182,13 @@ function archive_devstack_configs() { function archive_tempest_files() { local TEMPEST_LOGS="/home/ubuntu/tempest" - + local LOG_DST_TEMPEST=${1:-$LOG_DST/tempest} + if [ ! -d "$LOG_DST_TEMPEST" ]; then + mkdir -p "$LOG_DST_TEMPEST" + fi for i in `ls -A $TEMPEST_LOGS` do - $GZIP "$TEMPEST_LOGS/$i" -c > "$LOG_DST/$i.gz" || emit_error "L133: Failed to archive tempest logs" + $GZIP "$TEMPEST_LOGS/$i" -c > "$LOG_DST_TEMPEST/$i.gz" || emit_error "L133: Failed to archive tempest logs" done } diff --git a/templates/bundle.template b/templates/bundle.template index cd2f581..70721a5 100644 --- a/templates/bundle.template +++ b/templates/bundle.template @@ -29,18 +29,16 @@ services: extra-python-packages: "git+https://github.com/petrutlucian94/pywinrm.git lxml==3.6.4" heat-image-url: ${heat_image_url} test-image-url: ${test_image_url} -# ip-version: ${ip_version} ml2-mechanism: openvswitch tenant-network-type: vxlan enable-tunneling: True enable-live-migration: True ntp-server: pool.ntp.org vlan-range: 2500:2550 - nameservers: 10.20.1.37 + nameservers: 10.20.1.37 8.8.8.8 enable-vlans: False scenario-img: ${scenario_img} cherry-picks: ${devstack_cherry_picks} - #scenario-img: ubuntu_final.vhdx.zip pypi-mirror: http://10.20.1.8:8080/cloudbase/CI/+simple/ data-port: ${data_port} external-port: ${external_port} @@ -68,7 +66,6 @@ services: post-python-packages: "kombu==4.0.1 amqp==2.1.3 SQLAlchemy==1.0.17" git-user-email: "mcapsali@gmail.com" git-user-name: "capsali" - # cherry-picks: "neutron|https://review.openstack.org/openstack/neutron|refs/changes/77/227077/11|21818de8a9041d3e7e63922bb9fa1edc5475adee" cherry-picks: "${hyperv_cherry_picks}" pypi-mirror: http://10.20.1.8:8080/cloudbase/CI/+simple/ data-port: ${data_port} diff --git a/tests/neutron-ovs/included-tests.txt b/tests/neutron-ovs/included-tests.txt index 67b4b58..19b9918 100644 --- a/tests/neutron-ovs/included-tests.txt +++ b/tests/neutron-ovs/included-tests.txt @@ -26,7 +26,6 @@ tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_suspe tempest.scenario.test_minimum_basic.TestMinimumBasicScenario.test_minimum_basic_scenario tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps tempest.scenario.test_network_basic_ops.TestNetworkBasicOps -tempest.scenario.test_network_v6.TestGettingAddress tempest.scenario.test_server_advanced_ops.TestServerAdvancedOps.test_server_sequence_suspend_resume tempest.scenario.test_server_basic_ops.TestServerBasicOps.test_server_basicops From 1997a7465afa6df1208a75823c355fee6d3f778e Mon Sep 17 00:00:00 2001 From: capsali Date: Fri, 31 Mar 2017 16:20:15 +0300 Subject: [PATCH 12/22] Added event-log in hyperv logs --- scripts/jobs/common-job-script.sh | 8 +- scripts/jobs/neutron-ovs-config.sh | 4 +- scripts/jobs/ovs-config.sh | 4 +- scripts/logs/export-eventlog.ps1 | 53 ++++ scripts/logs/utils.sh | 8 + templates/bundle.template | 6 +- templates/eventlog_css.txt | 44 +++ templates/eventlog_js.txt | 477 +++++++++++++++++++++++++++++ 8 files changed, 598 insertions(+), 6 deletions(-) create mode 100644 scripts/logs/export-eventlog.ps1 create mode 100644 templates/eventlog_css.txt create mode 100644 templates/eventlog_js.txt diff --git a/scripts/jobs/common-job-script.sh b/scripts/jobs/common-job-script.sh index 3f7c93f..b297ca4 100755 --- a/scripts/jobs/common-job-script.sh +++ b/scripts/jobs/common-job-script.sh @@ -26,7 +26,7 @@ set +e source ${WORKSPACE}/common-ci/scripts/jobs/${project}-config.sh -DEPLOYER_PATH="/home/ubuntu/deployer" +DEPLOYER_PATH="/home/ubuntu/ci-deployer" JUJU_SSH_KEY="/home/ubuntu/.local/share/juju/ssh/juju_id_rsa" LOGS_SERVER="10.20.1.14" LOGS_SSH_KEY="/home/ubuntu/.ssh/norman.pem" @@ -89,6 +89,12 @@ for hv in $(echo $HYPERV | tr "," "\n"); do mkdir -p $HV_LOGS mkdir -p $HV_CONFS + run_wsman_ps $hv "New-Item -Force -Type directory -Path C:\Openstack\Eventlog" + set_win_files $hv "\Openstack\Eventlog" "${WORKSPACE}/common-ci/scripts/logs/" "export-eventlog.ps1" + set_win_files $hv "\Openstack\Eventlog" "${WORKSPACE}/common-ci/templates/" "eventlog_css.txt" + set_win_files $hv "\Openstack\Eventlog" "${WORKSPACE}/common-ci/templates/" "eventlog_js.txt" + run_wsman_ps $hv "C:\Openstack\Eventlog\export-eventlog.ps1" + get_win_files $hv "\openstack\log" $HV_LOGS get_win_files $hv "\openstack\etc" $HV_CONFS get_win_files $hv "\juju\log" $HV_LOGS diff --git a/scripts/jobs/neutron-ovs-config.sh b/scripts/jobs/neutron-ovs-config.sh index 091530a..e7b898c 100644 --- a/scripts/jobs/neutron-ovs-config.sh +++ b/scripts/jobs/neutron-ovs-config.sh @@ -1,3 +1,5 @@ +#!/bin/bash + CI_CREDS="neutron-ovs-creds.yaml" test_signing="false" data_port="00:07:43:13:97:c8 00:07:43:13:96:b8 00:07:43:13:a6:08 00:07:43:14:d2:e8 00:07:43:13:f1:48 00:07:43:13:f1:88 00:07:43:13:b3:88 00:07:43:13:b5:18 00:07:43:13:ea:78 00:07:43:13:f1:68 00:07:43:13:9b:f8 00:07:43:14:12:c8 00:07:43:14:12:78 00:07:43:13:f1:58 00:07:43:14:12:88 00:07:43:14:12:98 00:07:43:13:a0:f8 00:07:43:13:9a:78 00:07:43:14:18:18 00:07:43:13:a1:48 00:07:43:14:1f:38 00:07:43:14:1b:48 00:07:43:14:18:38 00:07:43:13:f4:b8 00:07:43:13:98:48 00:07:43:13:f4:f8 00:07:43:14:18:98 00:07:43:13:f1:28 00:07:43:14:1a:18" @@ -8,7 +10,7 @@ hyperv_cherry_picks="https://review.openstack.org/openstack/neutron|refs/changes devstack_cherry_picks="https://git.openstack.org/openstack/tempest|refs/changes/49/383049/13|master,https://git.openstack.org/openstack/tempest|refs/changes/28/384528/9|master" disable_ipv6="false" win_user="Administrator" -win_password="Passw0rd" +win_password=$(openssl rand -base64 32) ovs_installer="http://10.20.1.14:8080/openvswitch-hyperv-2.6.1-certified.msi" heat_image_url="http://10.20.1.14:8080/cirros-latest.vhdx" test_image_url="http://10.20.1.14:8080/cirros-latest.vhdx" diff --git a/scripts/jobs/ovs-config.sh b/scripts/jobs/ovs-config.sh index 0abe992..f75c071 100644 --- a/scripts/jobs/ovs-config.sh +++ b/scripts/jobs/ovs-config.sh @@ -1,3 +1,5 @@ +#!/bin/bash + CI_CREDS="ovs-creds.yaml" test_signing="true" data_port="E4:1D:2D:22:A0:30 E4:1D:2D:22:A6:30 E4:1D:2D:22:A1:E0 24:8A:07:77:3D:00" @@ -9,7 +11,7 @@ hyperv_cherry_picks="https://review.openstack.org/openstack/neutron|refs/changes devstack_cherry_picks="https://git.openstack.org/openstack/tempest|refs/changes/49/383049/13|master,https://git.openstack.org/openstack/tempest|refs/changes/28/384528/9|master" disable_ipv6="false" win_user="Administrator" -win_password="Passw0rd" +win_password=$(openssl rand -base64 32) ovs_installer="http://10.20.1.14:8080/ovs/$UUID/OpenvSwitch.msi" ovs_certificate="http://10.20.1.14:8080/ovs/$UUID/package.cer" heat_image_url="http://10.20.1.14:8080/cirros-latest.vhdx" diff --git a/scripts/logs/export-eventlog.ps1 b/scripts/logs/export-eventlog.ps1 new file mode 100644 index 0000000..696f06d --- /dev/null +++ b/scripts/logs/export-eventlog.ps1 @@ -0,0 +1,53 @@ +# Loading config and utils + +function dumpeventlog($path){ + + foreach ($i in (get-winevent -ListLog * | ? {$_.RecordCount -gt 0 })) { + $logName = "eventlog_" + $i.LogName + ".evtx" + $logName = $logName.replace(" ","-").replace("/", "-").replace("\", "-") + Write-Host "exporting "$i.LogName" as "$logName + $bkup = Join-Path $path $logName + wevtutil epl $i.LogName $bkup + } +} + +function exporthtmleventlog($path){ + $css = Get-Content $eventlogcsspath -Raw + $js = Get-Content $eventlogjspath -Raw + $HTMLHeader = @" + + + +"@ + + foreach ($i in (get-winevent -ListLog * | ? {$_.RecordCount -gt 0 })) { + $Report = (get-winevent -LogName $i.LogName) + $logName = "eventlog_" + $i.LogName + ".html" + $logName = $logName.replace(" ","-").replace("/", "-").replace("\", "-") + Write-Host "exporting "$i.LogName" as "$logName + $Report = $Report | ConvertTo-Html -Title "${i}" -Head $HTMLHeader -As Table + $Report = $Report | ForEach-Object {$_ -replace "", ''} + $Report = $Report | ForEach-Object {$_ -replace "", '
'} + $bkup = Join-Path $path $logName + $Report = $Report | Set-Content $bkup + } +} + +function cleareventlog(){ + foreach ($i in (get-winevent -ListLog * | ? {$_.RecordCount -gt 0 })) { + wevtutil cl $i.LogName + } +} + +$eventlogPath = "C:\OpenStack\Logs\Eventlog" +$eventlogcsspath = "C:\Openstack\Eventlog\eventlog_css.txt" +$eventlogjspath = "C:\Openstack\Eventlog\eventlog_js.txt" + +if (Test-Path $eventlogPath){ + Remove-Item $eventlogPath -recurse -force +} + +New-Item -ItemType Directory -Force -Path $eventlogPath + +dumpeventlog $eventlogPath +exporthtmleventlog $eventlogPath diff --git a/scripts/logs/utils.sh b/scripts/logs/utils.sh index 1fc2119..c199d22 100644 --- a/scripts/logs/utils.sh +++ b/scripts/logs/utils.sh @@ -42,6 +42,14 @@ function get_win_files() { smbclient "//$host/C\$" -c "prompt OFF; recurse ON; lcd $local_dir; cd $remote_dir; mget *" -U "$win_user%$win_password" } +function set_win_files() { + local host=$1 + local remote_dir=$2 + local local_dir=$3 + local local_file=$4 + smbclient "//$host/C\$" --directory $remote_dir -c "prompt OFF; recurse ON; lcd $local_dir; put $local_file" -U "$win_user%$win_password" +} + function run_wsman_ps() { local host=$1 local cmd=$2 diff --git a/templates/bundle.template b/templates/bundle.template index 70721a5..43ad7ad 100644 --- a/templates/bundle.template +++ b/templates/bundle.template @@ -9,11 +9,11 @@ services: series: win2016 constraints: "tags=$ADTAGS" options: - administrator-password: "Passw0rd" - safe-mode-password: "Passw0rd" + administrator-password: ${win_password} + safe-mode-password: ${win_password} domain-name: openvswitch.local domain-user: "openvswitch" - domain-user-password: "Passw0rd" + domain-user-password: ${win_password} devstack-${UUID}: charm: /home/ubuntu/charms/ubuntu/devstack num_units: 1 diff --git a/templates/eventlog_css.txt b/templates/eventlog_css.txt new file mode 100644 index 0000000..e05d6ac --- /dev/null +++ b/templates/eventlog_css.txt @@ -0,0 +1,44 @@ + +body { +color: black; +} +#table{ +-moz-user-select:none; +cursor:default; +font-family:Arial,sans-serif; +font-size:11px; +width:100%; +} +#table th#hoverTH { +background:#555555 none repeat scroll 0%; +} +#table.sortable th { +background:#666666 none repeat scroll 0%; +border-bottom:1px solid #444444; +border-left:1px solid #555555; +border-top:1px solid #444444; +color:#FFFFFF; +cursor:pointer; +padding:4px 0pt 4px 9px; +text-align:left; +} + +table tr:nth-child(2n) td { +background:#EDF3FE none repeat scroll 0%; +border-bottom:1px solid #E8F0FF; +border-right:1px solid #FFFFFF; +border-top:1px solid #E8F0FF; +} +table tr:nth-child(2n+1) td { +background:#FFFFFF none repeat scroll 0%; +border-bottom:1px solid #FFFFFF; +border-right:1px solid #FFFFFF; +border-top:1px solid #FFFFFF; +} + +#table tr td { +border-bottom:1px solid #FFFFFF; +border-left:1px solid #D9D9D9; +border-top:1px solid #FFFFFF; +padding:3px 8px; +} diff --git a/templates/eventlog_js.txt b/templates/eventlog_js.txt new file mode 100644 index 0000000..8f07716 --- /dev/null +++ b/templates/eventlog_js.txt @@ -0,0 +1,477 @@ + +var stIsIE = /*@cc_on!@*/false; + +sorttable = { + init: function() { + // quit if this function has already been called + if (arguments.callee.done) return; + // flag this function so we don't do the same thing twice + arguments.callee.done = true; + // kill the timer + if (_timer) clearInterval(_timer); + + if (!document.createElement || !document.getElementsByTagName) return; + + sorttable.DATE_RE = /^(\d\d?)[\/\.-](\d\d?)[\/\.-]((\d\d)?\d\d)$/; + + forEach(document.getElementsByTagName('table'), function(table) { + if (table.className.search(/\bsortable\b/) != -1) { + sorttable.makeSortable(table); + } + }); + + }, + + makeSortable: function(table) { + if (table.getElementsByTagName('thead').length == 0) { + // table doesn't have a tHead. Since it should have, create one and + // put the first table row in it. + the = document.createElement('thead'); + the.appendChild(table.rows[0]); + table.insertBefore(the,table.firstChild); + } + // Safari doesn't support table.tHead, sigh + if (table.tHead == null) table.tHead = table.getElementsByTagName('thead')[0]; + + if (table.tHead.rows.length != 1) return; // can't cope with two header rows + + // Sorttable v1 put rows with a class of "sortbottom" at the bottom (as + // "total" rows, for example). This is B&R, since what you're supposed + // to do is put them in a tfoot. So, if there are sortbottom rows, + // for backwards compatibility, move them to tfoot (creating it if needed). + sortbottomrows = []; + for (var i=0; i5' : ' ▴'; + this.appendChild(sortrevind); + return; + } + if (this.className.search(/\bsorttable_sorted_reverse\b/) != -1) { + // if we're already sorted by this column in reverse, just + // re-reverse the table, which is quicker + sorttable.reverse(this.sorttable_tbody); + this.className = this.className.replace('sorttable_sorted_reverse', + 'sorttable_sorted'); + this.removeChild(document.getElementById('sorttable_sortrevind')); + sortfwdind = document.createElement('span'); + sortfwdind.id = "sorttable_sortfwdind"; + sortfwdind.innerHTML = stIsIE ? ' 6' : ' ▾'; + this.appendChild(sortfwdind); + return; + } + + // remove sorttable_sorted classes + theadrow = this.parentNode; + forEach(theadrow.childNodes, function(cell) { + if (cell.nodeType == 1) { // an element + cell.className = cell.className.replace('sorttable_sorted_reverse',''); + cell.className = cell.className.replace('sorttable_sorted',''); + } + }); + sortfwdind = document.getElementById('sorttable_sortfwdind'); + if (sortfwdind) { sortfwdind.parentNode.removeChild(sortfwdind); } + sortrevind = document.getElementById('sorttable_sortrevind'); + if (sortrevind) { sortrevind.parentNode.removeChild(sortrevind); } + + this.className += ' sorttable_sorted'; + sortfwdind = document.createElement('span'); + sortfwdind.id = "sorttable_sortfwdind"; + sortfwdind.innerHTML = stIsIE ? ' 6' : ' ▾'; + this.appendChild(sortfwdind); + + // build an array to sort. This is a Schwartzian transform thing, + // i.e., we "decorate" each row with the actual sort key, + // sort based on the sort keys, and then put the rows back in order + // which is a lot faster because you only do getInnerText once per row + row_array = []; + col = this.sorttable_columnindex; + rows = this.sorttable_tbody.rows; + for (var j=0; j$]?[\d,.]+%?$/)) { + return sorttable.sort_numeric; + } + // check for a date: dd/mm/yyyy or dd/mm/yy + // can have / or . or - as separator + // can be mm/dd as well + possdate = text.match(sorttable.DATE_RE) + if (possdate) { + // looks like a date + first = parseInt(possdate[1]); + second = parseInt(possdate[2]); + if (first > 12) { + // definitely dd/mm + return sorttable.sort_ddmm; + } else if (second > 12) { + return sorttable.sort_mmdd; + } else { + // looks like a date, but we can't tell which, so assume + // that it's dd/mm (English imperialism!) and keep looking + sortfn = sorttable.sort_ddmm; + } + } + } + } + return sortfn; + }, + + getInnerText: function(node) { + // gets the text we want to use for sorting for a cell. + // strips leading and trailing whitespace. + // this is *not* a generic getInnerText function; it's special to sorttable. + // for example, you can override the cell text with a customkey attribute. + // it also gets .value for fields. + + if (!node) return ""; + + hasInputs = (typeof node.getElementsByTagName == 'function') && + node.getElementsByTagName('input').length; + + if (node.getAttribute("sorttable_customkey") != null) { + return node.getAttribute("sorttable_customkey"); + } + else if (typeof node.textContent != 'undefined' && !hasInputs) { + return node.textContent.replace(/^\s+|\s+$/g, ''); + } + else if (typeof node.innerText != 'undefined' && !hasInputs) { + return node.innerText.replace(/^\s+|\s+$/g, ''); + } + else if (typeof node.text != 'undefined' && !hasInputs) { + return node.text.replace(/^\s+|\s+$/g, ''); + } + else { + switch (node.nodeType) { + case 3: + if (node.nodeName.toLowerCase() == 'input') { + return node.value.replace(/^\s+|\s+$/g, ''); + } + case 4: + return node.nodeValue.replace(/^\s+|\s+$/g, ''); + break; + case 1: + case 11: + var innerText = ''; + for (var i = 0; i < node.childNodes.length; i++) { + innerText += sorttable.getInnerText(node.childNodes[i]); + } + return innerText.replace(/^\s+|\s+$/g, ''); + break; + default: + return ''; + } + } + }, + + reverse: function(tbody) { + // reverse the rows in a tbody + newrows = []; + for (var i=0; i=0; i--) { + tbody.appendChild(newrows[i]); + } + delete newrows; + }, + + /* sort functions + each sort function takes two parameters, a and b + you are comparing a[0] and b[0] */ + sort_numeric: function(a,b) { + aa = parseFloat(a[0].replace(/[^0-9.-]/g,'')); + if (isNaN(aa)) aa = 0; + bb = parseFloat(b[0].replace(/[^0-9.-]/g,'')); + if (isNaN(bb)) bb = 0; + return aa-bb; + }, + sort_alpha: function(a,b) { + if (a[0]==b[0]) return 0; + if (a[0] 0 ) { + var q = list[i]; list[i] = list[i+1]; list[i+1] = q; + swap = true; + } + } // for + t--; + + if (!swap) break; + + for(var i = t; i > b; --i) { + if ( comp_func(list[i], list[i-1]) < 0 ) { + var q = list[i]; list[i] = list[i-1]; list[i-1] = q; + swap = true; + } + } // for + b++; + + } // while(swap) + } +} + +/* ****************************************************************** + Supporting functions: bundled here to avoid depending on a library + ****************************************************************** */ + +// Dean Edwards/Matthias Miller/John Resig + +/* for Mozilla/Opera9 */ +if (document.addEventListener) { + document.addEventListener("DOMContentLoaded", sorttable.init, false); +} + +/* for Internet Explorer */ +/*@cc_on @*/ +/*@if (@_win32) + document.write("